]>
Commit | Line | Data |
---|---|---|
73f30c63 | 1 | /* Operations with affine combinations of trees. |
cbe34bb5 | 2 | Copyright (C) 2005-2017 Free Software Foundation, Inc. |
b8698a0f | 3 | |
73f30c63 | 4 | This file is part of GCC. |
b8698a0f | 5 | |
73f30c63 ZD |
6 | GCC is free software; you can redistribute it and/or modify it |
7 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 8 | Free Software Foundation; either version 3, or (at your option) any |
73f30c63 | 9 | later version. |
b8698a0f | 10 | |
73f30c63 ZD |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT |
12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
b8698a0f | 15 | |
73f30c63 | 16 | You should have received a copy of the GNU General Public License |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
73f30c63 ZD |
19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
c7131fb2 | 23 | #include "backend.h" |
957060b5 | 24 | #include "rtl.h" |
73f30c63 | 25 | #include "tree.h" |
c7131fb2 | 26 | #include "gimple.h" |
ba00284c | 27 | #include "ssa.h" |
957060b5 | 28 | #include "tree-pretty-print.h" |
c7131fb2 | 29 | #include "fold-const.h" |
73f30c63 | 30 | #include "tree-affine.h" |
45b0be94 | 31 | #include "gimplify.h" |
7ee2468b | 32 | #include "dumpfile.h" |
1fe37220 | 33 | #include "cfgexpand.h" |
73f30c63 ZD |
34 | |
35 | /* Extends CST as appropriate for the affine combinations COMB. */ | |
36 | ||
cc8bea09 | 37 | static widest_int |
5291ab73 | 38 | wide_int_ext_for_comb (const widest_int &cst, tree type) |
73f30c63 | 39 | { |
5291ab73 | 40 | return wi::sext (cst, TYPE_PRECISION (type)); |
73f30c63 ZD |
41 | } |
42 | ||
cc8bea09 RS |
43 | /* Likewise for polynomial offsets. */ |
44 | ||
45 | static poly_widest_int | |
46 | wide_int_ext_for_comb (const poly_widest_int &cst, tree type) | |
47 | { | |
48 | return wi::sext (cst, TYPE_PRECISION (type)); | |
49 | } | |
50 | ||
73f30c63 ZD |
51 | /* Initializes affine combination COMB so that its value is zero in TYPE. */ |
52 | ||
53 | static void | |
54 | aff_combination_zero (aff_tree *comb, tree type) | |
55 | { | |
807e902e | 56 | int i; |
73f30c63 | 57 | comb->type = type; |
807e902e | 58 | comb->offset = 0; |
73f30c63 | 59 | comb->n = 0; |
807e902e KZ |
60 | for (i = 0; i < MAX_AFF_ELTS; i++) |
61 | comb->elts[i].coef = 0; | |
73f30c63 ZD |
62 | comb->rest = NULL_TREE; |
63 | } | |
64 | ||
65 | /* Sets COMB to CST. */ | |
66 | ||
67 | void | |
cc8bea09 | 68 | aff_combination_const (aff_tree *comb, tree type, const poly_widest_int &cst) |
73f30c63 ZD |
69 | { |
70 | aff_combination_zero (comb, type); | |
5291ab73 | 71 | comb->offset = wide_int_ext_for_comb (cst, comb->type);; |
73f30c63 ZD |
72 | } |
73 | ||
74 | /* Sets COMB to single element ELT. */ | |
75 | ||
76 | void | |
77 | aff_combination_elt (aff_tree *comb, tree type, tree elt) | |
78 | { | |
79 | aff_combination_zero (comb, type); | |
80 | ||
81 | comb->n = 1; | |
82 | comb->elts[0].val = elt; | |
807e902e | 83 | comb->elts[0].coef = 1; |
73f30c63 ZD |
84 | } |
85 | ||
86 | /* Scales COMB by SCALE. */ | |
87 | ||
88 | void | |
807e902e | 89 | aff_combination_scale (aff_tree *comb, const widest_int &scale_in) |
73f30c63 ZD |
90 | { |
91 | unsigned i, j; | |
92 | ||
5291ab73 | 93 | widest_int scale = wide_int_ext_for_comb (scale_in, comb->type); |
807e902e | 94 | if (scale == 1) |
73f30c63 ZD |
95 | return; |
96 | ||
807e902e | 97 | if (scale == 0) |
73f30c63 ZD |
98 | { |
99 | aff_combination_zero (comb, comb->type); | |
100 | return; | |
101 | } | |
102 | ||
5291ab73 | 103 | comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type); |
73f30c63 ZD |
104 | for (i = 0, j = 0; i < comb->n; i++) |
105 | { | |
807e902e | 106 | widest_int new_coef |
5291ab73 | 107 | = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type); |
73f30c63 ZD |
108 | /* A coefficient may become zero due to overflow. Remove the zero |
109 | elements. */ | |
807e902e | 110 | if (new_coef == 0) |
73f30c63 ZD |
111 | continue; |
112 | comb->elts[j].coef = new_coef; | |
113 | comb->elts[j].val = comb->elts[i].val; | |
114 | j++; | |
115 | } | |
116 | comb->n = j; | |
117 | ||
118 | if (comb->rest) | |
119 | { | |
5c24ddaf AP |
120 | tree type = comb->type; |
121 | if (POINTER_TYPE_P (type)) | |
122 | type = sizetype; | |
73f30c63 ZD |
123 | if (comb->n < MAX_AFF_ELTS) |
124 | { | |
125 | comb->elts[comb->n].coef = scale; | |
126 | comb->elts[comb->n].val = comb->rest; | |
127 | comb->rest = NULL_TREE; | |
128 | comb->n++; | |
129 | } | |
130 | else | |
b8698a0f | 131 | comb->rest = fold_build2 (MULT_EXPR, type, comb->rest, |
807e902e | 132 | wide_int_to_tree (type, scale)); |
73f30c63 ZD |
133 | } |
134 | } | |
135 | ||
136 | /* Adds ELT * SCALE to COMB. */ | |
137 | ||
138 | void | |
807e902e | 139 | aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in) |
73f30c63 ZD |
140 | { |
141 | unsigned i; | |
f46fe0e6 | 142 | tree type; |
73f30c63 | 143 | |
5291ab73 | 144 | widest_int scale = wide_int_ext_for_comb (scale_in, comb->type); |
807e902e | 145 | if (scale == 0) |
73f30c63 ZD |
146 | return; |
147 | ||
148 | for (i = 0; i < comb->n; i++) | |
149 | if (operand_equal_p (comb->elts[i].val, elt, 0)) | |
150 | { | |
807e902e | 151 | widest_int new_coef |
5291ab73 | 152 | = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type); |
807e902e | 153 | if (new_coef != 0) |
73f30c63 ZD |
154 | { |
155 | comb->elts[i].coef = new_coef; | |
156 | return; | |
157 | } | |
158 | ||
159 | comb->n--; | |
160 | comb->elts[i] = comb->elts[comb->n]; | |
161 | ||
162 | if (comb->rest) | |
163 | { | |
164 | gcc_assert (comb->n == MAX_AFF_ELTS - 1); | |
807e902e | 165 | comb->elts[comb->n].coef = 1; |
73f30c63 ZD |
166 | comb->elts[comb->n].val = comb->rest; |
167 | comb->rest = NULL_TREE; | |
168 | comb->n++; | |
169 | } | |
170 | return; | |
171 | } | |
172 | if (comb->n < MAX_AFF_ELTS) | |
173 | { | |
174 | comb->elts[comb->n].coef = scale; | |
175 | comb->elts[comb->n].val = elt; | |
176 | comb->n++; | |
177 | return; | |
178 | } | |
179 | ||
f46fe0e6 AP |
180 | type = comb->type; |
181 | if (POINTER_TYPE_P (type)) | |
182 | type = sizetype; | |
183 | ||
807e902e | 184 | if (scale == 1) |
f46fe0e6 | 185 | elt = fold_convert (type, elt); |
73f30c63 | 186 | else |
f46fe0e6 AP |
187 | elt = fold_build2 (MULT_EXPR, type, |
188 | fold_convert (type, elt), | |
807e902e | 189 | wide_int_to_tree (type, scale)); |
73f30c63 ZD |
190 | |
191 | if (comb->rest) | |
5c24ddaf AP |
192 | comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest, |
193 | elt); | |
73f30c63 ZD |
194 | else |
195 | comb->rest = elt; | |
196 | } | |
197 | ||
7e2ac86c ZD |
198 | /* Adds CST to C. */ |
199 | ||
200 | static void | |
cc8bea09 | 201 | aff_combination_add_cst (aff_tree *c, const poly_widest_int &cst) |
7e2ac86c | 202 | { |
5291ab73 | 203 | c->offset = wide_int_ext_for_comb (c->offset + cst, c->type); |
7e2ac86c ZD |
204 | } |
205 | ||
73f30c63 ZD |
206 | /* Adds COMB2 to COMB1. */ |
207 | ||
208 | void | |
209 | aff_combination_add (aff_tree *comb1, aff_tree *comb2) | |
210 | { | |
211 | unsigned i; | |
212 | ||
7e2ac86c | 213 | aff_combination_add_cst (comb1, comb2->offset); |
73f30c63 ZD |
214 | for (i = 0; i < comb2->n; i++) |
215 | aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef); | |
216 | if (comb2->rest) | |
807e902e | 217 | aff_combination_add_elt (comb1, comb2->rest, 1); |
73f30c63 ZD |
218 | } |
219 | ||
220 | /* Converts affine combination COMB to TYPE. */ | |
221 | ||
222 | void | |
223 | aff_combination_convert (aff_tree *comb, tree type) | |
224 | { | |
225 | unsigned i, j; | |
226 | tree comb_type = comb->type; | |
227 | ||
7e2ac86c ZD |
228 | if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type)) |
229 | { | |
230 | tree val = fold_convert (type, aff_combination_to_tree (comb)); | |
231 | tree_to_aff_combination (val, type, comb); | |
232 | return; | |
233 | } | |
234 | ||
73f30c63 | 235 | comb->type = type; |
5c24ddaf | 236 | if (comb->rest && !POINTER_TYPE_P (type)) |
73f30c63 ZD |
237 | comb->rest = fold_convert (type, comb->rest); |
238 | ||
239 | if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type)) | |
240 | return; | |
241 | ||
5291ab73 | 242 | comb->offset = wide_int_ext_for_comb (comb->offset, comb->type); |
73f30c63 ZD |
243 | for (i = j = 0; i < comb->n; i++) |
244 | { | |
807e902e | 245 | if (comb->elts[i].coef == 0) |
73f30c63 | 246 | continue; |
807e902e | 247 | comb->elts[j].coef = comb->elts[i].coef; |
73f30c63 ZD |
248 | comb->elts[j].val = fold_convert (type, comb->elts[i].val); |
249 | j++; | |
250 | } | |
251 | ||
252 | comb->n = j; | |
253 | if (comb->n < MAX_AFF_ELTS && comb->rest) | |
254 | { | |
807e902e | 255 | comb->elts[comb->n].coef = 1; |
73f30c63 ZD |
256 | comb->elts[comb->n].val = comb->rest; |
257 | comb->rest = NULL_TREE; | |
258 | comb->n++; | |
259 | } | |
260 | } | |
261 | ||
262 | /* Splits EXPR into an affine combination of parts. */ | |
263 | ||
264 | void | |
265 | tree_to_aff_combination (tree expr, tree type, aff_tree *comb) | |
266 | { | |
267 | aff_tree tmp; | |
268 | enum tree_code code; | |
269 | tree cst, core, toffset; | |
270 | HOST_WIDE_INT bitpos, bitsize; | |
ef4bddc2 | 271 | machine_mode mode; |
ee45a32d | 272 | int unsignedp, reversep, volatilep; |
73f30c63 ZD |
273 | |
274 | STRIP_NOPS (expr); | |
275 | ||
276 | code = TREE_CODE (expr); | |
277 | switch (code) | |
278 | { | |
5be014d5 AP |
279 | case POINTER_PLUS_EXPR: |
280 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
281 | tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); | |
5be014d5 AP |
282 | aff_combination_add (comb, &tmp); |
283 | return; | |
284 | ||
73f30c63 ZD |
285 | case PLUS_EXPR: |
286 | case MINUS_EXPR: | |
287 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
288 | tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); | |
289 | if (code == MINUS_EXPR) | |
807e902e | 290 | aff_combination_scale (&tmp, -1); |
73f30c63 ZD |
291 | aff_combination_add (comb, &tmp); |
292 | return; | |
293 | ||
294 | case MULT_EXPR: | |
295 | cst = TREE_OPERAND (expr, 1); | |
296 | if (TREE_CODE (cst) != INTEGER_CST) | |
297 | break; | |
298 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
807e902e | 299 | aff_combination_scale (comb, wi::to_widest (cst)); |
73f30c63 ZD |
300 | return; |
301 | ||
302 | case NEGATE_EXPR: | |
303 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
807e902e | 304 | aff_combination_scale (comb, -1); |
73f30c63 ZD |
305 | return; |
306 | ||
7e2ac86c ZD |
307 | case BIT_NOT_EXPR: |
308 | /* ~x = -x - 1 */ | |
309 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
807e902e KZ |
310 | aff_combination_scale (comb, -1); |
311 | aff_combination_add_cst (comb, -1); | |
7e2ac86c ZD |
312 | return; |
313 | ||
73f30c63 | 314 | case ADDR_EXPR: |
70f34814 RG |
315 | /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */ |
316 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF) | |
317 | { | |
318 | expr = TREE_OPERAND (expr, 0); | |
319 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
320 | tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); | |
321 | aff_combination_add (comb, &tmp); | |
322 | return; | |
323 | } | |
73f30c63 | 324 | core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, |
ee45a32d | 325 | &toffset, &mode, &unsignedp, &reversep, |
25b75a48 | 326 | &volatilep); |
73f30c63 ZD |
327 | if (bitpos % BITS_PER_UNIT != 0) |
328 | break; | |
807e902e | 329 | aff_combination_const (comb, type, bitpos / BITS_PER_UNIT); |
d1f1a283 BC |
330 | if (TREE_CODE (core) == MEM_REF) |
331 | { | |
332 | aff_combination_add_cst (comb, wi::to_widest (TREE_OPERAND (core, 1))); | |
333 | core = TREE_OPERAND (core, 0); | |
334 | } | |
335 | else | |
336 | core = build_fold_addr_expr (core); | |
337 | ||
73f30c63 | 338 | if (TREE_CODE (core) == ADDR_EXPR) |
807e902e | 339 | aff_combination_add_elt (comb, core, 1); |
73f30c63 ZD |
340 | else |
341 | { | |
342 | tree_to_aff_combination (core, type, &tmp); | |
343 | aff_combination_add (comb, &tmp); | |
344 | } | |
345 | if (toffset) | |
346 | { | |
347 | tree_to_aff_combination (toffset, type, &tmp); | |
348 | aff_combination_add (comb, &tmp); | |
349 | } | |
350 | return; | |
351 | ||
70f34814 RG |
352 | case MEM_REF: |
353 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR) | |
354 | tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0), | |
355 | type, comb); | |
356 | else if (integer_zerop (TREE_OPERAND (expr, 1))) | |
357 | { | |
358 | aff_combination_elt (comb, type, expr); | |
359 | return; | |
360 | } | |
361 | else | |
362 | aff_combination_elt (comb, type, | |
363 | build2 (MEM_REF, TREE_TYPE (expr), | |
364 | TREE_OPERAND (expr, 0), | |
365 | build_int_cst | |
366 | (TREE_TYPE (TREE_OPERAND (expr, 1)), 0))); | |
367 | tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); | |
368 | aff_combination_add (comb, &tmp); | |
369 | return; | |
370 | ||
1b92ccde BC |
371 | CASE_CONVERT: |
372 | { | |
373 | tree otype = TREE_TYPE (expr); | |
374 | tree inner = TREE_OPERAND (expr, 0); | |
375 | tree itype = TREE_TYPE (inner); | |
376 | enum tree_code icode = TREE_CODE (inner); | |
377 | ||
378 | /* In principle this is a valid folding, but it isn't necessarily | |
379 | an optimization, so do it here and not in fold_unary. */ | |
380 | if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR) | |
381 | && TREE_CODE (itype) == INTEGER_TYPE | |
382 | && TREE_CODE (otype) == INTEGER_TYPE | |
8813f50d | 383 | && TYPE_PRECISION (otype) > TYPE_PRECISION (itype)) |
1b92ccde | 384 | { |
8813f50d BC |
385 | tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1); |
386 | ||
387 | /* If inner type has undefined overflow behavior, fold conversion | |
388 | for below two cases: | |
389 | (T1)(X *+- CST) -> (T1)X *+- (T1)CST | |
390 | (T1)(X + X) -> (T1)X + (T1)X. */ | |
391 | if (TYPE_OVERFLOW_UNDEFINED (itype) | |
392 | && (TREE_CODE (op1) == INTEGER_CST | |
393 | || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0)))) | |
394 | { | |
395 | op0 = fold_convert (otype, op0); | |
396 | op1 = fold_convert (otype, op1); | |
397 | expr = fold_build2 (icode, otype, op0, op1); | |
398 | tree_to_aff_combination (expr, type, comb); | |
399 | return; | |
400 | } | |
ba00284c BC |
401 | wide_int minv, maxv; |
402 | /* If inner type has wrapping overflow behavior, fold conversion | |
403 | for below case: | |
404 | (T1)(X - CST) -> (T1)X - (T1)CST | |
405 | if X - CST doesn't overflow by range information. Also handle | |
406 | (T1)(X + CST) as (T1)(X - (-CST)). */ | |
407 | if (TYPE_UNSIGNED (itype) | |
408 | && TYPE_OVERFLOW_WRAPS (itype) | |
409 | && TREE_CODE (op0) == SSA_NAME | |
410 | && TREE_CODE (op1) == INTEGER_CST | |
411 | && icode != MULT_EXPR | |
412 | && get_range_info (op0, &minv, &maxv) == VR_RANGE) | |
413 | { | |
414 | if (icode == PLUS_EXPR) | |
8e6cdc90 RS |
415 | op1 = wide_int_to_tree (itype, -wi::to_wide (op1)); |
416 | if (wi::geu_p (minv, wi::to_wide (op1))) | |
ba00284c BC |
417 | { |
418 | op0 = fold_convert (otype, op0); | |
419 | op1 = fold_convert (otype, op1); | |
420 | expr = fold_build2 (MINUS_EXPR, otype, op0, op1); | |
421 | tree_to_aff_combination (expr, type, comb); | |
422 | return; | |
423 | } | |
424 | } | |
1b92ccde BC |
425 | } |
426 | } | |
427 | break; | |
428 | ||
73f30c63 | 429 | default: |
cc8bea09 RS |
430 | { |
431 | if (poly_int_tree_p (expr)) | |
432 | { | |
433 | aff_combination_const (comb, type, wi::to_poly_widest (expr)); | |
434 | return; | |
435 | } | |
436 | break; | |
437 | } | |
73f30c63 ZD |
438 | } |
439 | ||
440 | aff_combination_elt (comb, type, expr); | |
441 | } | |
442 | ||
443 | /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine | |
444 | combination COMB. */ | |
445 | ||
446 | static tree | |
5291ab73 | 447 | add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in) |
73f30c63 ZD |
448 | { |
449 | enum tree_code code; | |
450 | ||
5291ab73 | 451 | widest_int scale = wide_int_ext_for_comb (scale_in, type); |
78de2333 | 452 | |
aac69a62 | 453 | elt = fold_convert (type, elt); |
807e902e | 454 | if (scale == 1) |
73f30c63 ZD |
455 | { |
456 | if (!expr) | |
aac69a62 | 457 | return elt; |
78de2333 | 458 | |
aac69a62 | 459 | return fold_build2 (PLUS_EXPR, type, expr, elt); |
73f30c63 ZD |
460 | } |
461 | ||
807e902e | 462 | if (scale == -1) |
73f30c63 ZD |
463 | { |
464 | if (!expr) | |
aac69a62 | 465 | return fold_build1 (NEGATE_EXPR, type, elt); |
78de2333 | 466 | |
aac69a62 | 467 | return fold_build2 (MINUS_EXPR, type, expr, elt); |
73f30c63 ZD |
468 | } |
469 | ||
470 | if (!expr) | |
aac69a62 | 471 | return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale)); |
73f30c63 | 472 | |
807e902e | 473 | if (wi::neg_p (scale)) |
73f30c63 ZD |
474 | { |
475 | code = MINUS_EXPR; | |
27bcd47c | 476 | scale = -scale; |
73f30c63 ZD |
477 | } |
478 | else | |
479 | code = PLUS_EXPR; | |
480 | ||
aac69a62 BC |
481 | elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale)); |
482 | return fold_build2 (code, type, expr, elt); | |
73f30c63 ZD |
483 | } |
484 | ||
485 | /* Makes tree from the affine combination COMB. */ | |
486 | ||
487 | tree | |
488 | aff_combination_to_tree (aff_tree *comb) | |
489 | { | |
aac69a62 | 490 | tree type = comb->type, base = NULL_TREE, expr = NULL_TREE; |
73f30c63 | 491 | unsigned i; |
cc8bea09 RS |
492 | poly_widest_int off; |
493 | int sgn; | |
73f30c63 ZD |
494 | |
495 | gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE); | |
496 | ||
aac69a62 BC |
497 | i = 0; |
498 | if (POINTER_TYPE_P (type)) | |
499 | { | |
500 | type = sizetype; | |
501 | if (comb->n > 0 && comb->elts[0].coef == 1 | |
502 | && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val))) | |
503 | { | |
504 | base = comb->elts[0].val; | |
505 | ++i; | |
506 | } | |
507 | } | |
508 | ||
509 | for (; i < comb->n; i++) | |
5291ab73 | 510 | expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef); |
73f30c63 | 511 | |
6df588cb | 512 | if (comb->rest) |
5291ab73 | 513 | expr = add_elt_to_tree (expr, type, comb->rest, 1); |
6df588cb | 514 | |
73f30c63 ZD |
515 | /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is |
516 | unsigned. */ | |
cc8bea09 | 517 | if (known_lt (comb->offset, 0)) |
73f30c63 | 518 | { |
27bcd47c | 519 | off = -comb->offset; |
807e902e | 520 | sgn = -1; |
73f30c63 ZD |
521 | } |
522 | else | |
523 | { | |
524 | off = comb->offset; | |
807e902e | 525 | sgn = 1; |
73f30c63 | 526 | } |
aac69a62 BC |
527 | expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn); |
528 | ||
529 | if (base) | |
530 | return fold_build_pointer_plus (base, expr); | |
531 | else | |
532 | return fold_convert (comb->type, expr); | |
73f30c63 ZD |
533 | } |
534 | ||
535 | /* Copies the tree elements of COMB to ensure that they are not shared. */ | |
536 | ||
537 | void | |
538 | unshare_aff_combination (aff_tree *comb) | |
539 | { | |
540 | unsigned i; | |
541 | ||
542 | for (i = 0; i < comb->n; i++) | |
543 | comb->elts[i].val = unshare_expr (comb->elts[i].val); | |
544 | if (comb->rest) | |
545 | comb->rest = unshare_expr (comb->rest); | |
546 | } | |
547 | ||
548 | /* Remove M-th element from COMB. */ | |
549 | ||
550 | void | |
551 | aff_combination_remove_elt (aff_tree *comb, unsigned m) | |
552 | { | |
553 | comb->n--; | |
554 | if (m <= comb->n) | |
555 | comb->elts[m] = comb->elts[comb->n]; | |
556 | if (comb->rest) | |
557 | { | |
807e902e | 558 | comb->elts[comb->n].coef = 1; |
73f30c63 ZD |
559 | comb->elts[comb->n].val = comb->rest; |
560 | comb->rest = NULL_TREE; | |
561 | comb->n++; | |
562 | } | |
563 | } | |
7e2ac86c ZD |
564 | |
565 | /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only | |
566 | C * COEF is added to R. */ | |
b8698a0f | 567 | |
7e2ac86c ZD |
568 | |
569 | static void | |
807e902e | 570 | aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val, |
7e2ac86c ZD |
571 | aff_tree *r) |
572 | { | |
573 | unsigned i; | |
574 | tree aval, type; | |
575 | ||
576 | for (i = 0; i < c->n; i++) | |
577 | { | |
578 | aval = c->elts[i].val; | |
579 | if (val) | |
580 | { | |
581 | type = TREE_TYPE (aval); | |
582 | aval = fold_build2 (MULT_EXPR, type, aval, | |
583 | fold_convert (type, val)); | |
584 | } | |
585 | ||
27bcd47c | 586 | aff_combination_add_elt (r, aval, coef * c->elts[i].coef); |
7e2ac86c ZD |
587 | } |
588 | ||
589 | if (c->rest) | |
590 | { | |
591 | aval = c->rest; | |
592 | if (val) | |
593 | { | |
594 | type = TREE_TYPE (aval); | |
595 | aval = fold_build2 (MULT_EXPR, type, aval, | |
596 | fold_convert (type, val)); | |
597 | } | |
598 | ||
599 | aff_combination_add_elt (r, aval, coef); | |
600 | } | |
601 | ||
602 | if (val) | |
cc8bea09 RS |
603 | { |
604 | if (c->offset.is_constant ()) | |
605 | /* Access coeffs[0] directly, for efficiency. */ | |
606 | aff_combination_add_elt (r, val, coef * c->offset.coeffs[0]); | |
607 | else | |
608 | { | |
609 | /* c->offset is polynomial, so multiply VAL rather than COEF | |
610 | by it. */ | |
611 | tree offset = wide_int_to_tree (TREE_TYPE (val), c->offset); | |
612 | val = fold_build2 (MULT_EXPR, TREE_TYPE (val), val, offset); | |
613 | aff_combination_add_elt (r, val, coef); | |
614 | } | |
615 | } | |
7e2ac86c | 616 | else |
27bcd47c | 617 | aff_combination_add_cst (r, coef * c->offset); |
7e2ac86c ZD |
618 | } |
619 | ||
620 | /* Multiplies C1 by C2, storing the result to R */ | |
621 | ||
622 | void | |
623 | aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r) | |
624 | { | |
625 | unsigned i; | |
626 | gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type)); | |
627 | ||
628 | aff_combination_zero (r, c1->type); | |
629 | ||
630 | for (i = 0; i < c2->n; i++) | |
631 | aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r); | |
632 | if (c2->rest) | |
807e902e | 633 | aff_combination_add_product (c1, 1, c2->rest, r); |
cc8bea09 RS |
634 | if (c2->offset.is_constant ()) |
635 | /* Access coeffs[0] directly, for efficiency. */ | |
636 | aff_combination_add_product (c1, c2->offset.coeffs[0], NULL, r); | |
637 | else | |
638 | { | |
639 | /* c2->offset is polynomial, so do the multiplication in tree form. */ | |
640 | tree offset = wide_int_to_tree (c2->type, c2->offset); | |
641 | aff_combination_add_product (c1, 1, offset, r); | |
642 | } | |
7e2ac86c | 643 | } |
bbc8a8dc ZD |
644 | |
645 | /* Returns the element of COMB whose value is VAL, or NULL if no such | |
646 | element exists. If IDX is not NULL, it is set to the index of VAL in | |
647 | COMB. */ | |
b8698a0f | 648 | |
bbc8a8dc ZD |
649 | static struct aff_comb_elt * |
650 | aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx) | |
651 | { | |
652 | unsigned i; | |
653 | ||
654 | for (i = 0; i < comb->n; i++) | |
655 | if (operand_equal_p (comb->elts[i].val, val, 0)) | |
656 | { | |
657 | if (idx) | |
658 | *idx = i; | |
659 | ||
660 | return &comb->elts[i]; | |
661 | } | |
662 | ||
663 | return NULL; | |
664 | } | |
665 | ||
666 | /* Element of the cache that maps ssa name NAME to its expanded form | |
667 | as an affine expression EXPANSION. */ | |
668 | ||
669 | struct name_expansion | |
670 | { | |
671 | aff_tree expansion; | |
672 | ||
673 | /* True if the expansion for the name is just being generated. */ | |
674 | unsigned in_progress : 1; | |
675 | }; | |
676 | ||
72425608 ZD |
677 | /* Expands SSA names in COMB recursively. CACHE is used to cache the |
678 | results. */ | |
bbc8a8dc ZD |
679 | |
680 | void | |
726a989a | 681 | aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED, |
39c8aaa4 | 682 | hash_map<tree, name_expansion *> **cache) |
bbc8a8dc ZD |
683 | { |
684 | unsigned i; | |
685 | aff_tree to_add, current, curre; | |
726a989a | 686 | tree e, rhs; |
355fe088 | 687 | gimple *def; |
807e902e | 688 | widest_int scale; |
bbc8a8dc ZD |
689 | struct name_expansion *exp; |
690 | ||
72425608 | 691 | aff_combination_zero (&to_add, comb->type); |
bbc8a8dc ZD |
692 | for (i = 0; i < comb->n; i++) |
693 | { | |
e544c850 | 694 | tree type, name; |
726a989a RB |
695 | enum tree_code code; |
696 | ||
bbc8a8dc | 697 | e = comb->elts[i].val; |
e544c850 RG |
698 | type = TREE_TYPE (e); |
699 | name = e; | |
700 | /* Look through some conversions. */ | |
625a9766 | 701 | if (CONVERT_EXPR_P (e) |
e544c850 RG |
702 | && (TYPE_PRECISION (type) |
703 | >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0))))) | |
704 | name = TREE_OPERAND (e, 0); | |
705 | if (TREE_CODE (name) != SSA_NAME) | |
bbc8a8dc | 706 | continue; |
e544c850 | 707 | def = SSA_NAME_DEF_STMT (name); |
726a989a | 708 | if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name) |
bbc8a8dc ZD |
709 | continue; |
710 | ||
726a989a RB |
711 | code = gimple_assign_rhs_code (def); |
712 | if (code != SSA_NAME | |
713 | && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) | |
714 | && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS | |
715 | || !is_gimple_min_invariant (gimple_assign_rhs1 (def)))) | |
bbc8a8dc ZD |
716 | continue; |
717 | ||
718 | /* We do not know whether the reference retains its value at the | |
719 | place where the expansion is used. */ | |
726a989a | 720 | if (TREE_CODE_CLASS (code) == tcc_reference) |
bbc8a8dc ZD |
721 | continue; |
722 | ||
723 | if (!*cache) | |
39c8aaa4 TS |
724 | *cache = new hash_map<tree, name_expansion *>; |
725 | name_expansion **slot = &(*cache)->get_or_insert (e); | |
726 | exp = *slot; | |
bbc8a8dc ZD |
727 | |
728 | if (!exp) | |
729 | { | |
730 | exp = XNEW (struct name_expansion); | |
731 | exp->in_progress = 1; | |
732 | *slot = exp; | |
1b92ccde BC |
733 | rhs = gimple_assign_rhs_to_tree (def); |
734 | if (e != name) | |
735 | rhs = fold_convert (type, rhs); | |
736 | ||
72425608 | 737 | tree_to_aff_combination_expand (rhs, comb->type, ¤t, cache); |
bbc8a8dc ZD |
738 | exp->expansion = current; |
739 | exp->in_progress = 0; | |
740 | } | |
741 | else | |
742 | { | |
743 | /* Since we follow the definitions in the SSA form, we should not | |
744 | enter a cycle unless we pass through a phi node. */ | |
745 | gcc_assert (!exp->in_progress); | |
746 | current = exp->expansion; | |
747 | } | |
748 | ||
749 | /* Accumulate the new terms to TO_ADD, so that we do not modify | |
750 | COMB while traversing it; include the term -coef * E, to remove | |
751 | it from COMB. */ | |
752 | scale = comb->elts[i].coef; | |
72425608 | 753 | aff_combination_zero (&curre, comb->type); |
27bcd47c | 754 | aff_combination_add_elt (&curre, e, -scale); |
bbc8a8dc ZD |
755 | aff_combination_scale (¤t, scale); |
756 | aff_combination_add (&to_add, ¤t); | |
757 | aff_combination_add (&to_add, &curre); | |
758 | } | |
759 | aff_combination_add (comb, &to_add); | |
760 | } | |
761 | ||
72425608 ZD |
762 | /* Similar to tree_to_aff_combination, but follows SSA name definitions |
763 | and expands them recursively. CACHE is used to cache the expansions | |
764 | of the ssa names, to avoid exponential time complexity for cases | |
765 | like | |
766 | ||
767 | a1 = a0 + a0; | |
768 | a2 = a1 + a1; | |
769 | a3 = a2 + a2; | |
770 | ... */ | |
771 | ||
772 | void | |
773 | tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb, | |
39c8aaa4 | 774 | hash_map<tree, name_expansion *> **cache) |
72425608 ZD |
775 | { |
776 | tree_to_aff_combination (expr, type, comb); | |
777 | aff_combination_expand (comb, cache); | |
778 | } | |
779 | ||
bbc8a8dc | 780 | /* Frees memory occupied by struct name_expansion in *VALUE. Callback for |
39c8aaa4 | 781 | hash_map::traverse. */ |
bbc8a8dc | 782 | |
39c8aaa4 TS |
783 | bool |
784 | free_name_expansion (tree const &, name_expansion **value, void *) | |
bbc8a8dc | 785 | { |
39c8aaa4 | 786 | free (*value); |
bbc8a8dc ZD |
787 | return true; |
788 | } | |
789 | ||
790 | /* Frees memory allocated for the CACHE used by | |
791 | tree_to_aff_combination_expand. */ | |
792 | ||
793 | void | |
39c8aaa4 | 794 | free_affine_expand_cache (hash_map<tree, name_expansion *> **cache) |
bbc8a8dc ZD |
795 | { |
796 | if (!*cache) | |
797 | return; | |
798 | ||
39c8aaa4 TS |
799 | (*cache)->traverse<void *, free_name_expansion> (NULL); |
800 | delete (*cache); | |
bbc8a8dc ZD |
801 | *cache = NULL; |
802 | } | |
803 | ||
804 | /* If VAL != CST * DIV for any constant CST, returns false. | |
b03be25f RB |
805 | Otherwise, if *MULT_SET is true, additionally compares CST and MULT, |
806 | and if they are different, returns false. Finally, if neither of these | |
807 | two cases occur, true is returned, and CST is stored to MULT and MULT_SET | |
808 | is set to true. */ | |
bbc8a8dc ZD |
809 | |
810 | static bool | |
cc8bea09 RS |
811 | wide_int_constant_multiple_p (const poly_widest_int &val, |
812 | const poly_widest_int &div, | |
813 | bool *mult_set, poly_widest_int *mult) | |
bbc8a8dc | 814 | { |
cc8bea09 | 815 | poly_widest_int rem, cst; |
bbc8a8dc | 816 | |
cc8bea09 | 817 | if (known_eq (val, 0)) |
b03be25f | 818 | { |
cc8bea09 | 819 | if (*mult_set && maybe_ne (*mult, 0)) |
b03be25f RB |
820 | return false; |
821 | *mult_set = true; | |
807e902e | 822 | *mult = 0; |
b03be25f RB |
823 | return true; |
824 | } | |
bbc8a8dc | 825 | |
cc8bea09 | 826 | if (maybe_eq (div, 0)) |
bbc8a8dc ZD |
827 | return false; |
828 | ||
cc8bea09 | 829 | if (!multiple_p (val, div, &cst)) |
bbc8a8dc ZD |
830 | return false; |
831 | ||
cc8bea09 | 832 | if (*mult_set && maybe_ne (*mult, cst)) |
bbc8a8dc ZD |
833 | return false; |
834 | ||
835 | *mult_set = true; | |
836 | *mult = cst; | |
837 | return true; | |
838 | } | |
839 | ||
840 | /* Returns true if VAL = X * DIV for some constant X. If this is the case, | |
841 | X is stored to MULT. */ | |
842 | ||
843 | bool | |
844 | aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div, | |
cc8bea09 | 845 | poly_widest_int *mult) |
bbc8a8dc ZD |
846 | { |
847 | bool mult_set = false; | |
848 | unsigned i; | |
849 | ||
cc8bea09 | 850 | if (val->n == 0 && known_eq (val->offset, 0)) |
bbc8a8dc | 851 | { |
807e902e | 852 | *mult = 0; |
bbc8a8dc ZD |
853 | return true; |
854 | } | |
855 | if (val->n != div->n) | |
856 | return false; | |
857 | ||
858 | if (val->rest || div->rest) | |
859 | return false; | |
860 | ||
807e902e KZ |
861 | if (!wide_int_constant_multiple_p (val->offset, div->offset, |
862 | &mult_set, mult)) | |
bbc8a8dc ZD |
863 | return false; |
864 | ||
865 | for (i = 0; i < div->n; i++) | |
866 | { | |
867 | struct aff_comb_elt *elt | |
868 | = aff_combination_find_elt (val, div->elts[i].val, NULL); | |
869 | if (!elt) | |
870 | return false; | |
807e902e KZ |
871 | if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef, |
872 | &mult_set, mult)) | |
bbc8a8dc ZD |
873 | return false; |
874 | } | |
875 | ||
876 | gcc_assert (mult_set); | |
877 | return true; | |
878 | } | |
ea336dd5 AP |
879 | |
880 | /* Prints the affine VAL to the FILE. */ | |
881 | ||
aeb83f09 | 882 | static void |
ea336dd5 AP |
883 | print_aff (FILE *file, aff_tree *val) |
884 | { | |
885 | unsigned i; | |
807e902e | 886 | signop sgn = TYPE_SIGN (val->type); |
ea336dd5 | 887 | if (POINTER_TYPE_P (val->type)) |
807e902e | 888 | sgn = SIGNED; |
ea336dd5 AP |
889 | fprintf (file, "{\n type = "); |
890 | print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS); | |
891 | fprintf (file, "\n offset = "); | |
807e902e | 892 | print_dec (val->offset, file, sgn); |
ea336dd5 AP |
893 | if (val->n > 0) |
894 | { | |
895 | fprintf (file, "\n elements = {\n"); | |
896 | for (i = 0; i < val->n; i++) | |
897 | { | |
898 | fprintf (file, " [%d] = ", i); | |
899 | print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS); | |
b8698a0f | 900 | |
ea336dd5 | 901 | fprintf (file, " * "); |
807e902e | 902 | print_dec (val->elts[i].coef, file, sgn); |
ea336dd5 AP |
903 | if (i != val->n - 1) |
904 | fprintf (file, ", \n"); | |
905 | } | |
906 | fprintf (file, "\n }"); | |
907 | } | |
908 | if (val->rest) | |
909 | { | |
910 | fprintf (file, "\n rest = "); | |
911 | print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS); | |
912 | } | |
913 | fprintf (file, "\n}"); | |
914 | } | |
915 | ||
916 | /* Prints the affine VAL to the standard error, used for debugging. */ | |
917 | ||
24e47c76 | 918 | DEBUG_FUNCTION void |
ea336dd5 AP |
919 | debug_aff (aff_tree *val) |
920 | { | |
921 | print_aff (stderr, val); | |
922 | fprintf (stderr, "\n"); | |
923 | } | |
72425608 | 924 | |
be8c1c8c BC |
925 | /* Computes address of the reference REF in ADDR. The size of the accessed |
926 | location is stored to SIZE. Returns the ultimate containing object to | |
927 | which REF refers. */ | |
72425608 | 928 | |
be8c1c8c | 929 | tree |
807e902e | 930 | get_inner_reference_aff (tree ref, aff_tree *addr, widest_int *size) |
72425608 ZD |
931 | { |
932 | HOST_WIDE_INT bitsize, bitpos; | |
933 | tree toff; | |
ef4bddc2 | 934 | machine_mode mode; |
ee45a32d | 935 | int uns, rev, vol; |
72425608 ZD |
936 | aff_tree tmp; |
937 | tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode, | |
25b75a48 | 938 | &uns, &rev, &vol); |
72425608 ZD |
939 | tree base_addr = build_fold_addr_expr (base); |
940 | ||
941 | /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */ | |
942 | ||
943 | tree_to_aff_combination (base_addr, sizetype, addr); | |
944 | ||
945 | if (toff) | |
946 | { | |
947 | tree_to_aff_combination (toff, sizetype, &tmp); | |
948 | aff_combination_add (addr, &tmp); | |
949 | } | |
950 | ||
807e902e | 951 | aff_combination_const (&tmp, sizetype, bitpos / BITS_PER_UNIT); |
72425608 ZD |
952 | aff_combination_add (addr, &tmp); |
953 | ||
807e902e | 954 | *size = (bitsize + BITS_PER_UNIT - 1) / BITS_PER_UNIT; |
be8c1c8c BC |
955 | |
956 | return base; | |
72425608 ZD |
957 | } |
958 | ||
02f5d6c5 RG |
959 | /* Returns true if a region of size SIZE1 at position 0 and a region of |
960 | size SIZE2 at position DIFF cannot overlap. */ | |
961 | ||
962 | bool | |
cc8bea09 RS |
963 | aff_comb_cannot_overlap_p (aff_tree *diff, const poly_widest_int &size1, |
964 | const poly_widest_int &size2) | |
02f5d6c5 | 965 | { |
02f5d6c5 RG |
966 | /* Unless the difference is a constant, we fail. */ |
967 | if (diff->n != 0) | |
968 | return false; | |
969 | ||
cc8bea09 RS |
970 | if (!ordered_p (diff->offset, 0)) |
971 | return false; | |
972 | ||
973 | if (maybe_lt (diff->offset, 0)) | |
02f5d6c5 RG |
974 | { |
975 | /* The second object is before the first one, we succeed if the last | |
976 | element of the second object is before the start of the first one. */ | |
cc8bea09 | 977 | return known_le (diff->offset + size2, 0); |
02f5d6c5 RG |
978 | } |
979 | else | |
980 | { | |
981 | /* We succeed if the second object starts after the first one ends. */ | |
cc8bea09 | 982 | return known_le (size1, diff->offset); |
02f5d6c5 RG |
983 | } |
984 | } | |
985 |