]>
Commit | Line | Data |
---|---|---|
73f30c63 | 1 | /* Operations with affine combinations of trees. |
a5544970 | 2 | Copyright (C) 2005-2019 Free Software Foundation, Inc. |
b8698a0f | 3 | |
73f30c63 | 4 | This file is part of GCC. |
b8698a0f | 5 | |
73f30c63 ZD |
6 | GCC is free software; you can redistribute it and/or modify it |
7 | under the terms of the GNU General Public License as published by the | |
9dcd6f09 | 8 | Free Software Foundation; either version 3, or (at your option) any |
73f30c63 | 9 | later version. |
b8698a0f | 10 | |
73f30c63 ZD |
11 | GCC is distributed in the hope that it will be useful, but WITHOUT |
12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
b8698a0f | 15 | |
73f30c63 | 16 | You should have received a copy of the GNU General Public License |
9dcd6f09 NC |
17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
73f30c63 ZD |
19 | |
20 | #include "config.h" | |
21 | #include "system.h" | |
22 | #include "coretypes.h" | |
c7131fb2 | 23 | #include "backend.h" |
957060b5 | 24 | #include "rtl.h" |
73f30c63 | 25 | #include "tree.h" |
c7131fb2 | 26 | #include "gimple.h" |
ba00284c | 27 | #include "ssa.h" |
957060b5 | 28 | #include "tree-pretty-print.h" |
c7131fb2 | 29 | #include "fold-const.h" |
73f30c63 | 30 | #include "tree-affine.h" |
45b0be94 | 31 | #include "gimplify.h" |
7ee2468b | 32 | #include "dumpfile.h" |
1fe37220 | 33 | #include "cfgexpand.h" |
73f30c63 ZD |
34 | |
35 | /* Extends CST as appropriate for the affine combinations COMB. */ | |
36 | ||
cc8bea09 | 37 | static widest_int |
5291ab73 | 38 | wide_int_ext_for_comb (const widest_int &cst, tree type) |
73f30c63 | 39 | { |
5291ab73 | 40 | return wi::sext (cst, TYPE_PRECISION (type)); |
73f30c63 ZD |
41 | } |
42 | ||
cc8bea09 RS |
43 | /* Likewise for polynomial offsets. */ |
44 | ||
45 | static poly_widest_int | |
46 | wide_int_ext_for_comb (const poly_widest_int &cst, tree type) | |
47 | { | |
48 | return wi::sext (cst, TYPE_PRECISION (type)); | |
49 | } | |
50 | ||
73f30c63 ZD |
51 | /* Initializes affine combination COMB so that its value is zero in TYPE. */ |
52 | ||
53 | static void | |
54 | aff_combination_zero (aff_tree *comb, tree type) | |
55 | { | |
807e902e | 56 | int i; |
73f30c63 | 57 | comb->type = type; |
807e902e | 58 | comb->offset = 0; |
73f30c63 | 59 | comb->n = 0; |
807e902e KZ |
60 | for (i = 0; i < MAX_AFF_ELTS; i++) |
61 | comb->elts[i].coef = 0; | |
73f30c63 ZD |
62 | comb->rest = NULL_TREE; |
63 | } | |
64 | ||
65 | /* Sets COMB to CST. */ | |
66 | ||
67 | void | |
cc8bea09 | 68 | aff_combination_const (aff_tree *comb, tree type, const poly_widest_int &cst) |
73f30c63 ZD |
69 | { |
70 | aff_combination_zero (comb, type); | |
5291ab73 | 71 | comb->offset = wide_int_ext_for_comb (cst, comb->type);; |
73f30c63 ZD |
72 | } |
73 | ||
74 | /* Sets COMB to single element ELT. */ | |
75 | ||
76 | void | |
77 | aff_combination_elt (aff_tree *comb, tree type, tree elt) | |
78 | { | |
79 | aff_combination_zero (comb, type); | |
80 | ||
81 | comb->n = 1; | |
82 | comb->elts[0].val = elt; | |
807e902e | 83 | comb->elts[0].coef = 1; |
73f30c63 ZD |
84 | } |
85 | ||
86 | /* Scales COMB by SCALE. */ | |
87 | ||
88 | void | |
807e902e | 89 | aff_combination_scale (aff_tree *comb, const widest_int &scale_in) |
73f30c63 ZD |
90 | { |
91 | unsigned i, j; | |
92 | ||
5291ab73 | 93 | widest_int scale = wide_int_ext_for_comb (scale_in, comb->type); |
807e902e | 94 | if (scale == 1) |
73f30c63 ZD |
95 | return; |
96 | ||
807e902e | 97 | if (scale == 0) |
73f30c63 ZD |
98 | { |
99 | aff_combination_zero (comb, comb->type); | |
100 | return; | |
101 | } | |
102 | ||
5291ab73 | 103 | comb->offset = wide_int_ext_for_comb (scale * comb->offset, comb->type); |
73f30c63 ZD |
104 | for (i = 0, j = 0; i < comb->n; i++) |
105 | { | |
807e902e | 106 | widest_int new_coef |
5291ab73 | 107 | = wide_int_ext_for_comb (scale * comb->elts[i].coef, comb->type); |
73f30c63 ZD |
108 | /* A coefficient may become zero due to overflow. Remove the zero |
109 | elements. */ | |
807e902e | 110 | if (new_coef == 0) |
73f30c63 ZD |
111 | continue; |
112 | comb->elts[j].coef = new_coef; | |
113 | comb->elts[j].val = comb->elts[i].val; | |
114 | j++; | |
115 | } | |
116 | comb->n = j; | |
117 | ||
118 | if (comb->rest) | |
119 | { | |
5c24ddaf AP |
120 | tree type = comb->type; |
121 | if (POINTER_TYPE_P (type)) | |
122 | type = sizetype; | |
73f30c63 ZD |
123 | if (comb->n < MAX_AFF_ELTS) |
124 | { | |
125 | comb->elts[comb->n].coef = scale; | |
126 | comb->elts[comb->n].val = comb->rest; | |
127 | comb->rest = NULL_TREE; | |
128 | comb->n++; | |
129 | } | |
130 | else | |
b8698a0f | 131 | comb->rest = fold_build2 (MULT_EXPR, type, comb->rest, |
807e902e | 132 | wide_int_to_tree (type, scale)); |
73f30c63 ZD |
133 | } |
134 | } | |
135 | ||
136 | /* Adds ELT * SCALE to COMB. */ | |
137 | ||
138 | void | |
807e902e | 139 | aff_combination_add_elt (aff_tree *comb, tree elt, const widest_int &scale_in) |
73f30c63 ZD |
140 | { |
141 | unsigned i; | |
f46fe0e6 | 142 | tree type; |
73f30c63 | 143 | |
5291ab73 | 144 | widest_int scale = wide_int_ext_for_comb (scale_in, comb->type); |
807e902e | 145 | if (scale == 0) |
73f30c63 ZD |
146 | return; |
147 | ||
148 | for (i = 0; i < comb->n; i++) | |
149 | if (operand_equal_p (comb->elts[i].val, elt, 0)) | |
150 | { | |
807e902e | 151 | widest_int new_coef |
5291ab73 | 152 | = wide_int_ext_for_comb (comb->elts[i].coef + scale, comb->type); |
807e902e | 153 | if (new_coef != 0) |
73f30c63 ZD |
154 | { |
155 | comb->elts[i].coef = new_coef; | |
156 | return; | |
157 | } | |
158 | ||
159 | comb->n--; | |
160 | comb->elts[i] = comb->elts[comb->n]; | |
161 | ||
162 | if (comb->rest) | |
163 | { | |
164 | gcc_assert (comb->n == MAX_AFF_ELTS - 1); | |
807e902e | 165 | comb->elts[comb->n].coef = 1; |
73f30c63 ZD |
166 | comb->elts[comb->n].val = comb->rest; |
167 | comb->rest = NULL_TREE; | |
168 | comb->n++; | |
169 | } | |
170 | return; | |
171 | } | |
172 | if (comb->n < MAX_AFF_ELTS) | |
173 | { | |
174 | comb->elts[comb->n].coef = scale; | |
175 | comb->elts[comb->n].val = elt; | |
176 | comb->n++; | |
177 | return; | |
178 | } | |
179 | ||
f46fe0e6 AP |
180 | type = comb->type; |
181 | if (POINTER_TYPE_P (type)) | |
182 | type = sizetype; | |
183 | ||
807e902e | 184 | if (scale == 1) |
f46fe0e6 | 185 | elt = fold_convert (type, elt); |
73f30c63 | 186 | else |
f46fe0e6 AP |
187 | elt = fold_build2 (MULT_EXPR, type, |
188 | fold_convert (type, elt), | |
807e902e | 189 | wide_int_to_tree (type, scale)); |
73f30c63 ZD |
190 | |
191 | if (comb->rest) | |
5c24ddaf AP |
192 | comb->rest = fold_build2 (PLUS_EXPR, type, comb->rest, |
193 | elt); | |
73f30c63 ZD |
194 | else |
195 | comb->rest = elt; | |
196 | } | |
197 | ||
7e2ac86c ZD |
198 | /* Adds CST to C. */ |
199 | ||
200 | static void | |
cc8bea09 | 201 | aff_combination_add_cst (aff_tree *c, const poly_widest_int &cst) |
7e2ac86c | 202 | { |
5291ab73 | 203 | c->offset = wide_int_ext_for_comb (c->offset + cst, c->type); |
7e2ac86c ZD |
204 | } |
205 | ||
73f30c63 ZD |
206 | /* Adds COMB2 to COMB1. */ |
207 | ||
208 | void | |
209 | aff_combination_add (aff_tree *comb1, aff_tree *comb2) | |
210 | { | |
211 | unsigned i; | |
212 | ||
7e2ac86c | 213 | aff_combination_add_cst (comb1, comb2->offset); |
73f30c63 ZD |
214 | for (i = 0; i < comb2->n; i++) |
215 | aff_combination_add_elt (comb1, comb2->elts[i].val, comb2->elts[i].coef); | |
216 | if (comb2->rest) | |
807e902e | 217 | aff_combination_add_elt (comb1, comb2->rest, 1); |
73f30c63 ZD |
218 | } |
219 | ||
220 | /* Converts affine combination COMB to TYPE. */ | |
221 | ||
222 | void | |
223 | aff_combination_convert (aff_tree *comb, tree type) | |
224 | { | |
225 | unsigned i, j; | |
226 | tree comb_type = comb->type; | |
227 | ||
7e2ac86c ZD |
228 | if (TYPE_PRECISION (type) > TYPE_PRECISION (comb_type)) |
229 | { | |
230 | tree val = fold_convert (type, aff_combination_to_tree (comb)); | |
231 | tree_to_aff_combination (val, type, comb); | |
232 | return; | |
233 | } | |
234 | ||
73f30c63 | 235 | comb->type = type; |
5c24ddaf | 236 | if (comb->rest && !POINTER_TYPE_P (type)) |
73f30c63 ZD |
237 | comb->rest = fold_convert (type, comb->rest); |
238 | ||
239 | if (TYPE_PRECISION (type) == TYPE_PRECISION (comb_type)) | |
240 | return; | |
241 | ||
5291ab73 | 242 | comb->offset = wide_int_ext_for_comb (comb->offset, comb->type); |
73f30c63 ZD |
243 | for (i = j = 0; i < comb->n; i++) |
244 | { | |
807e902e | 245 | if (comb->elts[i].coef == 0) |
73f30c63 | 246 | continue; |
807e902e | 247 | comb->elts[j].coef = comb->elts[i].coef; |
73f30c63 ZD |
248 | comb->elts[j].val = fold_convert (type, comb->elts[i].val); |
249 | j++; | |
250 | } | |
251 | ||
252 | comb->n = j; | |
253 | if (comb->n < MAX_AFF_ELTS && comb->rest) | |
254 | { | |
807e902e | 255 | comb->elts[comb->n].coef = 1; |
73f30c63 ZD |
256 | comb->elts[comb->n].val = comb->rest; |
257 | comb->rest = NULL_TREE; | |
258 | comb->n++; | |
259 | } | |
260 | } | |
261 | ||
262 | /* Splits EXPR into an affine combination of parts. */ | |
263 | ||
264 | void | |
265 | tree_to_aff_combination (tree expr, tree type, aff_tree *comb) | |
266 | { | |
267 | aff_tree tmp; | |
268 | enum tree_code code; | |
269 | tree cst, core, toffset; | |
f37fac2b | 270 | poly_int64 bitpos, bitsize, bytepos; |
ef4bddc2 | 271 | machine_mode mode; |
ee45a32d | 272 | int unsignedp, reversep, volatilep; |
73f30c63 ZD |
273 | |
274 | STRIP_NOPS (expr); | |
275 | ||
276 | code = TREE_CODE (expr); | |
277 | switch (code) | |
278 | { | |
5be014d5 AP |
279 | case POINTER_PLUS_EXPR: |
280 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
281 | tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); | |
5be014d5 AP |
282 | aff_combination_add (comb, &tmp); |
283 | return; | |
284 | ||
73f30c63 ZD |
285 | case PLUS_EXPR: |
286 | case MINUS_EXPR: | |
287 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
288 | tree_to_aff_combination (TREE_OPERAND (expr, 1), type, &tmp); | |
289 | if (code == MINUS_EXPR) | |
807e902e | 290 | aff_combination_scale (&tmp, -1); |
73f30c63 ZD |
291 | aff_combination_add (comb, &tmp); |
292 | return; | |
293 | ||
294 | case MULT_EXPR: | |
295 | cst = TREE_OPERAND (expr, 1); | |
296 | if (TREE_CODE (cst) != INTEGER_CST) | |
297 | break; | |
298 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
807e902e | 299 | aff_combination_scale (comb, wi::to_widest (cst)); |
73f30c63 ZD |
300 | return; |
301 | ||
302 | case NEGATE_EXPR: | |
303 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
807e902e | 304 | aff_combination_scale (comb, -1); |
73f30c63 ZD |
305 | return; |
306 | ||
7e2ac86c ZD |
307 | case BIT_NOT_EXPR: |
308 | /* ~x = -x - 1 */ | |
309 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
807e902e KZ |
310 | aff_combination_scale (comb, -1); |
311 | aff_combination_add_cst (comb, -1); | |
7e2ac86c ZD |
312 | return; |
313 | ||
73f30c63 | 314 | case ADDR_EXPR: |
70f34814 RG |
315 | /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR. */ |
316 | if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF) | |
317 | { | |
318 | expr = TREE_OPERAND (expr, 0); | |
319 | tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb); | |
320 | tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp); | |
321 | aff_combination_add (comb, &tmp); | |
322 | return; | |
323 | } | |
73f30c63 | 324 | core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos, |
ee45a32d | 325 | &toffset, &mode, &unsignedp, &reversep, |
25b75a48 | 326 | &volatilep); |
f37fac2b | 327 | if (!multiple_p (bitpos, BITS_PER_UNIT, &bytepos)) |
73f30c63 | 328 | break; |
f37fac2b | 329 | aff_combination_const (comb, type, bytepos); |
d1f1a283 BC |
330 | if (TREE_CODE (core) == MEM_REF) |
331 | { | |
f37fac2b RS |
332 | tree mem_offset = TREE_OPERAND (core, 1); |
333 | aff_combination_add_cst (comb, wi::to_poly_widest (mem_offset)); | |
d1f1a283 BC |
334 | core = TREE_OPERAND (core, 0); |
335 | } | |
336 | else | |
337 | core = build_fold_addr_expr (core); | |
338 | ||
73f30c63 | 339 | if (TREE_CODE (core) == ADDR_EXPR) |
807e902e | 340 | aff_combination_add_elt (comb, core, 1); |
73f30c63 ZD |
341 | else |
342 | { | |
343 | tree_to_aff_combination (core, type, &tmp); | |
344 | aff_combination_add (comb, &tmp); | |
345 | } | |
346 | if (toffset) | |
347 | { | |
348 | tree_to_aff_combination (toffset, type, &tmp); | |
349 | aff_combination_add (comb, &tmp); | |
350 | } | |
351 | return; | |
352 | ||
1b92ccde BC |
353 | CASE_CONVERT: |
354 | { | |
355 | tree otype = TREE_TYPE (expr); | |
356 | tree inner = TREE_OPERAND (expr, 0); | |
357 | tree itype = TREE_TYPE (inner); | |
358 | enum tree_code icode = TREE_CODE (inner); | |
359 | ||
360 | /* In principle this is a valid folding, but it isn't necessarily | |
361 | an optimization, so do it here and not in fold_unary. */ | |
362 | if ((icode == PLUS_EXPR || icode == MINUS_EXPR || icode == MULT_EXPR) | |
363 | && TREE_CODE (itype) == INTEGER_TYPE | |
364 | && TREE_CODE (otype) == INTEGER_TYPE | |
8813f50d | 365 | && TYPE_PRECISION (otype) > TYPE_PRECISION (itype)) |
1b92ccde | 366 | { |
8813f50d BC |
367 | tree op0 = TREE_OPERAND (inner, 0), op1 = TREE_OPERAND (inner, 1); |
368 | ||
369 | /* If inner type has undefined overflow behavior, fold conversion | |
370 | for below two cases: | |
371 | (T1)(X *+- CST) -> (T1)X *+- (T1)CST | |
372 | (T1)(X + X) -> (T1)X + (T1)X. */ | |
373 | if (TYPE_OVERFLOW_UNDEFINED (itype) | |
374 | && (TREE_CODE (op1) == INTEGER_CST | |
375 | || (icode == PLUS_EXPR && operand_equal_p (op0, op1, 0)))) | |
376 | { | |
377 | op0 = fold_convert (otype, op0); | |
378 | op1 = fold_convert (otype, op1); | |
379 | expr = fold_build2 (icode, otype, op0, op1); | |
380 | tree_to_aff_combination (expr, type, comb); | |
381 | return; | |
382 | } | |
ba00284c BC |
383 | wide_int minv, maxv; |
384 | /* If inner type has wrapping overflow behavior, fold conversion | |
385 | for below case: | |
386 | (T1)(X - CST) -> (T1)X - (T1)CST | |
387 | if X - CST doesn't overflow by range information. Also handle | |
388 | (T1)(X + CST) as (T1)(X - (-CST)). */ | |
389 | if (TYPE_UNSIGNED (itype) | |
390 | && TYPE_OVERFLOW_WRAPS (itype) | |
391 | && TREE_CODE (op0) == SSA_NAME | |
392 | && TREE_CODE (op1) == INTEGER_CST | |
393 | && icode != MULT_EXPR | |
394 | && get_range_info (op0, &minv, &maxv) == VR_RANGE) | |
395 | { | |
396 | if (icode == PLUS_EXPR) | |
8e6cdc90 RS |
397 | op1 = wide_int_to_tree (itype, -wi::to_wide (op1)); |
398 | if (wi::geu_p (minv, wi::to_wide (op1))) | |
ba00284c BC |
399 | { |
400 | op0 = fold_convert (otype, op0); | |
401 | op1 = fold_convert (otype, op1); | |
402 | expr = fold_build2 (MINUS_EXPR, otype, op0, op1); | |
403 | tree_to_aff_combination (expr, type, comb); | |
404 | return; | |
405 | } | |
406 | } | |
1b92ccde BC |
407 | } |
408 | } | |
409 | break; | |
410 | ||
73f30c63 | 411 | default: |
cc8bea09 RS |
412 | { |
413 | if (poly_int_tree_p (expr)) | |
414 | { | |
415 | aff_combination_const (comb, type, wi::to_poly_widest (expr)); | |
416 | return; | |
417 | } | |
418 | break; | |
419 | } | |
73f30c63 ZD |
420 | } |
421 | ||
422 | aff_combination_elt (comb, type, expr); | |
423 | } | |
424 | ||
425 | /* Creates EXPR + ELT * SCALE in TYPE. EXPR is taken from affine | |
426 | combination COMB. */ | |
427 | ||
428 | static tree | |
5291ab73 | 429 | add_elt_to_tree (tree expr, tree type, tree elt, const widest_int &scale_in) |
73f30c63 ZD |
430 | { |
431 | enum tree_code code; | |
432 | ||
5291ab73 | 433 | widest_int scale = wide_int_ext_for_comb (scale_in, type); |
78de2333 | 434 | |
aac69a62 | 435 | elt = fold_convert (type, elt); |
807e902e | 436 | if (scale == 1) |
73f30c63 ZD |
437 | { |
438 | if (!expr) | |
aac69a62 | 439 | return elt; |
78de2333 | 440 | |
aac69a62 | 441 | return fold_build2 (PLUS_EXPR, type, expr, elt); |
73f30c63 ZD |
442 | } |
443 | ||
807e902e | 444 | if (scale == -1) |
73f30c63 ZD |
445 | { |
446 | if (!expr) | |
aac69a62 | 447 | return fold_build1 (NEGATE_EXPR, type, elt); |
78de2333 | 448 | |
aac69a62 | 449 | return fold_build2 (MINUS_EXPR, type, expr, elt); |
73f30c63 ZD |
450 | } |
451 | ||
452 | if (!expr) | |
aac69a62 | 453 | return fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale)); |
73f30c63 | 454 | |
807e902e | 455 | if (wi::neg_p (scale)) |
73f30c63 ZD |
456 | { |
457 | code = MINUS_EXPR; | |
27bcd47c | 458 | scale = -scale; |
73f30c63 ZD |
459 | } |
460 | else | |
461 | code = PLUS_EXPR; | |
462 | ||
aac69a62 BC |
463 | elt = fold_build2 (MULT_EXPR, type, elt, wide_int_to_tree (type, scale)); |
464 | return fold_build2 (code, type, expr, elt); | |
73f30c63 ZD |
465 | } |
466 | ||
467 | /* Makes tree from the affine combination COMB. */ | |
468 | ||
469 | tree | |
470 | aff_combination_to_tree (aff_tree *comb) | |
471 | { | |
aac69a62 | 472 | tree type = comb->type, base = NULL_TREE, expr = NULL_TREE; |
73f30c63 | 473 | unsigned i; |
cc8bea09 RS |
474 | poly_widest_int off; |
475 | int sgn; | |
73f30c63 ZD |
476 | |
477 | gcc_assert (comb->n == MAX_AFF_ELTS || comb->rest == NULL_TREE); | |
478 | ||
aac69a62 BC |
479 | i = 0; |
480 | if (POINTER_TYPE_P (type)) | |
481 | { | |
482 | type = sizetype; | |
483 | if (comb->n > 0 && comb->elts[0].coef == 1 | |
484 | && POINTER_TYPE_P (TREE_TYPE (comb->elts[0].val))) | |
485 | { | |
486 | base = comb->elts[0].val; | |
487 | ++i; | |
488 | } | |
489 | } | |
490 | ||
491 | for (; i < comb->n; i++) | |
5291ab73 | 492 | expr = add_elt_to_tree (expr, type, comb->elts[i].val, comb->elts[i].coef); |
73f30c63 | 493 | |
6df588cb | 494 | if (comb->rest) |
5291ab73 | 495 | expr = add_elt_to_tree (expr, type, comb->rest, 1); |
6df588cb | 496 | |
73f30c63 ZD |
497 | /* Ensure that we get x - 1, not x + (-1) or x + 0xff..f if x is |
498 | unsigned. */ | |
cc8bea09 | 499 | if (known_lt (comb->offset, 0)) |
73f30c63 | 500 | { |
27bcd47c | 501 | off = -comb->offset; |
807e902e | 502 | sgn = -1; |
73f30c63 ZD |
503 | } |
504 | else | |
505 | { | |
506 | off = comb->offset; | |
807e902e | 507 | sgn = 1; |
73f30c63 | 508 | } |
aac69a62 BC |
509 | expr = add_elt_to_tree (expr, type, wide_int_to_tree (type, off), sgn); |
510 | ||
511 | if (base) | |
512 | return fold_build_pointer_plus (base, expr); | |
513 | else | |
514 | return fold_convert (comb->type, expr); | |
73f30c63 ZD |
515 | } |
516 | ||
517 | /* Copies the tree elements of COMB to ensure that they are not shared. */ | |
518 | ||
519 | void | |
520 | unshare_aff_combination (aff_tree *comb) | |
521 | { | |
522 | unsigned i; | |
523 | ||
524 | for (i = 0; i < comb->n; i++) | |
525 | comb->elts[i].val = unshare_expr (comb->elts[i].val); | |
526 | if (comb->rest) | |
527 | comb->rest = unshare_expr (comb->rest); | |
528 | } | |
529 | ||
530 | /* Remove M-th element from COMB. */ | |
531 | ||
532 | void | |
533 | aff_combination_remove_elt (aff_tree *comb, unsigned m) | |
534 | { | |
535 | comb->n--; | |
536 | if (m <= comb->n) | |
537 | comb->elts[m] = comb->elts[comb->n]; | |
538 | if (comb->rest) | |
539 | { | |
807e902e | 540 | comb->elts[comb->n].coef = 1; |
73f30c63 ZD |
541 | comb->elts[comb->n].val = comb->rest; |
542 | comb->rest = NULL_TREE; | |
543 | comb->n++; | |
544 | } | |
545 | } | |
7e2ac86c ZD |
546 | |
547 | /* Adds C * COEF * VAL to R. VAL may be NULL, in that case only | |
548 | C * COEF is added to R. */ | |
b8698a0f | 549 | |
7e2ac86c ZD |
550 | |
551 | static void | |
807e902e | 552 | aff_combination_add_product (aff_tree *c, const widest_int &coef, tree val, |
7e2ac86c ZD |
553 | aff_tree *r) |
554 | { | |
555 | unsigned i; | |
556 | tree aval, type; | |
557 | ||
558 | for (i = 0; i < c->n; i++) | |
559 | { | |
560 | aval = c->elts[i].val; | |
561 | if (val) | |
562 | { | |
563 | type = TREE_TYPE (aval); | |
564 | aval = fold_build2 (MULT_EXPR, type, aval, | |
565 | fold_convert (type, val)); | |
566 | } | |
567 | ||
27bcd47c | 568 | aff_combination_add_elt (r, aval, coef * c->elts[i].coef); |
7e2ac86c ZD |
569 | } |
570 | ||
571 | if (c->rest) | |
572 | { | |
573 | aval = c->rest; | |
574 | if (val) | |
575 | { | |
576 | type = TREE_TYPE (aval); | |
577 | aval = fold_build2 (MULT_EXPR, type, aval, | |
578 | fold_convert (type, val)); | |
579 | } | |
580 | ||
581 | aff_combination_add_elt (r, aval, coef); | |
582 | } | |
583 | ||
584 | if (val) | |
cc8bea09 RS |
585 | { |
586 | if (c->offset.is_constant ()) | |
587 | /* Access coeffs[0] directly, for efficiency. */ | |
588 | aff_combination_add_elt (r, val, coef * c->offset.coeffs[0]); | |
589 | else | |
590 | { | |
591 | /* c->offset is polynomial, so multiply VAL rather than COEF | |
592 | by it. */ | |
593 | tree offset = wide_int_to_tree (TREE_TYPE (val), c->offset); | |
594 | val = fold_build2 (MULT_EXPR, TREE_TYPE (val), val, offset); | |
595 | aff_combination_add_elt (r, val, coef); | |
596 | } | |
597 | } | |
7e2ac86c | 598 | else |
27bcd47c | 599 | aff_combination_add_cst (r, coef * c->offset); |
7e2ac86c ZD |
600 | } |
601 | ||
602 | /* Multiplies C1 by C2, storing the result to R */ | |
603 | ||
604 | void | |
605 | aff_combination_mult (aff_tree *c1, aff_tree *c2, aff_tree *r) | |
606 | { | |
607 | unsigned i; | |
608 | gcc_assert (TYPE_PRECISION (c1->type) == TYPE_PRECISION (c2->type)); | |
609 | ||
610 | aff_combination_zero (r, c1->type); | |
611 | ||
612 | for (i = 0; i < c2->n; i++) | |
613 | aff_combination_add_product (c1, c2->elts[i].coef, c2->elts[i].val, r); | |
614 | if (c2->rest) | |
807e902e | 615 | aff_combination_add_product (c1, 1, c2->rest, r); |
cc8bea09 RS |
616 | if (c2->offset.is_constant ()) |
617 | /* Access coeffs[0] directly, for efficiency. */ | |
618 | aff_combination_add_product (c1, c2->offset.coeffs[0], NULL, r); | |
619 | else | |
620 | { | |
621 | /* c2->offset is polynomial, so do the multiplication in tree form. */ | |
622 | tree offset = wide_int_to_tree (c2->type, c2->offset); | |
623 | aff_combination_add_product (c1, 1, offset, r); | |
624 | } | |
7e2ac86c | 625 | } |
bbc8a8dc ZD |
626 | |
627 | /* Returns the element of COMB whose value is VAL, or NULL if no such | |
628 | element exists. If IDX is not NULL, it is set to the index of VAL in | |
629 | COMB. */ | |
b8698a0f | 630 | |
bbc8a8dc ZD |
631 | static struct aff_comb_elt * |
632 | aff_combination_find_elt (aff_tree *comb, tree val, unsigned *idx) | |
633 | { | |
634 | unsigned i; | |
635 | ||
636 | for (i = 0; i < comb->n; i++) | |
637 | if (operand_equal_p (comb->elts[i].val, val, 0)) | |
638 | { | |
639 | if (idx) | |
640 | *idx = i; | |
641 | ||
642 | return &comb->elts[i]; | |
643 | } | |
644 | ||
645 | return NULL; | |
646 | } | |
647 | ||
648 | /* Element of the cache that maps ssa name NAME to its expanded form | |
649 | as an affine expression EXPANSION. */ | |
650 | ||
651 | struct name_expansion | |
652 | { | |
653 | aff_tree expansion; | |
654 | ||
655 | /* True if the expansion for the name is just being generated. */ | |
656 | unsigned in_progress : 1; | |
657 | }; | |
658 | ||
72425608 ZD |
659 | /* Expands SSA names in COMB recursively. CACHE is used to cache the |
660 | results. */ | |
bbc8a8dc ZD |
661 | |
662 | void | |
726a989a | 663 | aff_combination_expand (aff_tree *comb ATTRIBUTE_UNUSED, |
39c8aaa4 | 664 | hash_map<tree, name_expansion *> **cache) |
bbc8a8dc ZD |
665 | { |
666 | unsigned i; | |
667 | aff_tree to_add, current, curre; | |
726a989a | 668 | tree e, rhs; |
355fe088 | 669 | gimple *def; |
807e902e | 670 | widest_int scale; |
bbc8a8dc ZD |
671 | struct name_expansion *exp; |
672 | ||
72425608 | 673 | aff_combination_zero (&to_add, comb->type); |
bbc8a8dc ZD |
674 | for (i = 0; i < comb->n; i++) |
675 | { | |
e544c850 | 676 | tree type, name; |
726a989a RB |
677 | enum tree_code code; |
678 | ||
bbc8a8dc | 679 | e = comb->elts[i].val; |
e544c850 RG |
680 | type = TREE_TYPE (e); |
681 | name = e; | |
682 | /* Look through some conversions. */ | |
625a9766 | 683 | if (CONVERT_EXPR_P (e) |
e544c850 RG |
684 | && (TYPE_PRECISION (type) |
685 | >= TYPE_PRECISION (TREE_TYPE (TREE_OPERAND (e, 0))))) | |
686 | name = TREE_OPERAND (e, 0); | |
687 | if (TREE_CODE (name) != SSA_NAME) | |
bbc8a8dc | 688 | continue; |
e544c850 | 689 | def = SSA_NAME_DEF_STMT (name); |
726a989a | 690 | if (!is_gimple_assign (def) || gimple_assign_lhs (def) != name) |
bbc8a8dc ZD |
691 | continue; |
692 | ||
726a989a RB |
693 | code = gimple_assign_rhs_code (def); |
694 | if (code != SSA_NAME | |
695 | && !IS_EXPR_CODE_CLASS (TREE_CODE_CLASS (code)) | |
696 | && (get_gimple_rhs_class (code) != GIMPLE_SINGLE_RHS | |
697 | || !is_gimple_min_invariant (gimple_assign_rhs1 (def)))) | |
bbc8a8dc ZD |
698 | continue; |
699 | ||
700 | /* We do not know whether the reference retains its value at the | |
701 | place where the expansion is used. */ | |
726a989a | 702 | if (TREE_CODE_CLASS (code) == tcc_reference) |
bbc8a8dc ZD |
703 | continue; |
704 | ||
e5840e75 RB |
705 | name_expansion **slot = NULL; |
706 | if (*cache) | |
707 | slot = (*cache)->get (name); | |
708 | exp = slot ? *slot : NULL; | |
bbc8a8dc ZD |
709 | if (!exp) |
710 | { | |
e5840e75 RB |
711 | /* Only bother to handle cases tree_to_aff_combination will. */ |
712 | switch (code) | |
713 | { | |
714 | case POINTER_PLUS_EXPR: | |
715 | case PLUS_EXPR: | |
716 | case MINUS_EXPR: | |
717 | case MULT_EXPR: | |
718 | case NEGATE_EXPR: | |
719 | case BIT_NOT_EXPR: | |
720 | CASE_CONVERT: | |
721 | rhs = gimple_assign_rhs_to_tree (def); | |
722 | break; | |
723 | case ADDR_EXPR: | |
724 | case INTEGER_CST: | |
725 | case POLY_INT_CST: | |
726 | rhs = gimple_assign_rhs1 (def); | |
727 | break; | |
728 | default: | |
729 | continue; | |
730 | } | |
731 | tree_to_aff_combination (rhs, TREE_TYPE (name), ¤t); | |
bbc8a8dc ZD |
732 | exp = XNEW (struct name_expansion); |
733 | exp->in_progress = 1; | |
e5840e75 RB |
734 | if (!*cache) |
735 | *cache = new hash_map<tree, name_expansion *>; | |
736 | (*cache)->put (name, exp); | |
737 | aff_combination_expand (¤t, cache); | |
bbc8a8dc ZD |
738 | exp->expansion = current; |
739 | exp->in_progress = 0; | |
740 | } | |
741 | else | |
742 | { | |
743 | /* Since we follow the definitions in the SSA form, we should not | |
744 | enter a cycle unless we pass through a phi node. */ | |
745 | gcc_assert (!exp->in_progress); | |
746 | current = exp->expansion; | |
747 | } | |
e5840e75 RB |
748 | if (!useless_type_conversion_p (comb->type, current.type)) |
749 | aff_combination_convert (¤t, comb->type); | |
bbc8a8dc ZD |
750 | |
751 | /* Accumulate the new terms to TO_ADD, so that we do not modify | |
752 | COMB while traversing it; include the term -coef * E, to remove | |
753 | it from COMB. */ | |
754 | scale = comb->elts[i].coef; | |
72425608 | 755 | aff_combination_zero (&curre, comb->type); |
27bcd47c | 756 | aff_combination_add_elt (&curre, e, -scale); |
bbc8a8dc ZD |
757 | aff_combination_scale (¤t, scale); |
758 | aff_combination_add (&to_add, ¤t); | |
759 | aff_combination_add (&to_add, &curre); | |
760 | } | |
761 | aff_combination_add (comb, &to_add); | |
762 | } | |
763 | ||
72425608 ZD |
764 | /* Similar to tree_to_aff_combination, but follows SSA name definitions |
765 | and expands them recursively. CACHE is used to cache the expansions | |
766 | of the ssa names, to avoid exponential time complexity for cases | |
767 | like | |
768 | ||
769 | a1 = a0 + a0; | |
770 | a2 = a1 + a1; | |
771 | a3 = a2 + a2; | |
772 | ... */ | |
773 | ||
774 | void | |
775 | tree_to_aff_combination_expand (tree expr, tree type, aff_tree *comb, | |
39c8aaa4 | 776 | hash_map<tree, name_expansion *> **cache) |
72425608 ZD |
777 | { |
778 | tree_to_aff_combination (expr, type, comb); | |
779 | aff_combination_expand (comb, cache); | |
780 | } | |
781 | ||
bbc8a8dc | 782 | /* Frees memory occupied by struct name_expansion in *VALUE. Callback for |
39c8aaa4 | 783 | hash_map::traverse. */ |
bbc8a8dc | 784 | |
39c8aaa4 TS |
785 | bool |
786 | free_name_expansion (tree const &, name_expansion **value, void *) | |
bbc8a8dc | 787 | { |
39c8aaa4 | 788 | free (*value); |
bbc8a8dc ZD |
789 | return true; |
790 | } | |
791 | ||
792 | /* Frees memory allocated for the CACHE used by | |
793 | tree_to_aff_combination_expand. */ | |
794 | ||
795 | void | |
39c8aaa4 | 796 | free_affine_expand_cache (hash_map<tree, name_expansion *> **cache) |
bbc8a8dc ZD |
797 | { |
798 | if (!*cache) | |
799 | return; | |
800 | ||
39c8aaa4 TS |
801 | (*cache)->traverse<void *, free_name_expansion> (NULL); |
802 | delete (*cache); | |
bbc8a8dc ZD |
803 | *cache = NULL; |
804 | } | |
805 | ||
806 | /* If VAL != CST * DIV for any constant CST, returns false. | |
b03be25f RB |
807 | Otherwise, if *MULT_SET is true, additionally compares CST and MULT, |
808 | and if they are different, returns false. Finally, if neither of these | |
809 | two cases occur, true is returned, and CST is stored to MULT and MULT_SET | |
810 | is set to true. */ | |
bbc8a8dc ZD |
811 | |
812 | static bool | |
cc8bea09 RS |
813 | wide_int_constant_multiple_p (const poly_widest_int &val, |
814 | const poly_widest_int &div, | |
815 | bool *mult_set, poly_widest_int *mult) | |
bbc8a8dc | 816 | { |
cc8bea09 | 817 | poly_widest_int rem, cst; |
bbc8a8dc | 818 | |
cc8bea09 | 819 | if (known_eq (val, 0)) |
b03be25f | 820 | { |
cc8bea09 | 821 | if (*mult_set && maybe_ne (*mult, 0)) |
b03be25f RB |
822 | return false; |
823 | *mult_set = true; | |
807e902e | 824 | *mult = 0; |
b03be25f RB |
825 | return true; |
826 | } | |
bbc8a8dc | 827 | |
cc8bea09 | 828 | if (maybe_eq (div, 0)) |
bbc8a8dc ZD |
829 | return false; |
830 | ||
cc8bea09 | 831 | if (!multiple_p (val, div, &cst)) |
bbc8a8dc ZD |
832 | return false; |
833 | ||
cc8bea09 | 834 | if (*mult_set && maybe_ne (*mult, cst)) |
bbc8a8dc ZD |
835 | return false; |
836 | ||
837 | *mult_set = true; | |
838 | *mult = cst; | |
839 | return true; | |
840 | } | |
841 | ||
842 | /* Returns true if VAL = X * DIV for some constant X. If this is the case, | |
843 | X is stored to MULT. */ | |
844 | ||
845 | bool | |
846 | aff_combination_constant_multiple_p (aff_tree *val, aff_tree *div, | |
cc8bea09 | 847 | poly_widest_int *mult) |
bbc8a8dc ZD |
848 | { |
849 | bool mult_set = false; | |
850 | unsigned i; | |
851 | ||
cc8bea09 | 852 | if (val->n == 0 && known_eq (val->offset, 0)) |
bbc8a8dc | 853 | { |
807e902e | 854 | *mult = 0; |
bbc8a8dc ZD |
855 | return true; |
856 | } | |
857 | if (val->n != div->n) | |
858 | return false; | |
859 | ||
860 | if (val->rest || div->rest) | |
861 | return false; | |
862 | ||
807e902e KZ |
863 | if (!wide_int_constant_multiple_p (val->offset, div->offset, |
864 | &mult_set, mult)) | |
bbc8a8dc ZD |
865 | return false; |
866 | ||
867 | for (i = 0; i < div->n; i++) | |
868 | { | |
869 | struct aff_comb_elt *elt | |
870 | = aff_combination_find_elt (val, div->elts[i].val, NULL); | |
871 | if (!elt) | |
872 | return false; | |
807e902e KZ |
873 | if (!wide_int_constant_multiple_p (elt->coef, div->elts[i].coef, |
874 | &mult_set, mult)) | |
bbc8a8dc ZD |
875 | return false; |
876 | } | |
877 | ||
878 | gcc_assert (mult_set); | |
879 | return true; | |
880 | } | |
ea336dd5 AP |
881 | |
882 | /* Prints the affine VAL to the FILE. */ | |
883 | ||
aeb83f09 | 884 | static void |
ea336dd5 AP |
885 | print_aff (FILE *file, aff_tree *val) |
886 | { | |
887 | unsigned i; | |
807e902e | 888 | signop sgn = TYPE_SIGN (val->type); |
ea336dd5 | 889 | if (POINTER_TYPE_P (val->type)) |
807e902e | 890 | sgn = SIGNED; |
ea336dd5 AP |
891 | fprintf (file, "{\n type = "); |
892 | print_generic_expr (file, val->type, TDF_VOPS|TDF_MEMSYMS); | |
893 | fprintf (file, "\n offset = "); | |
807e902e | 894 | print_dec (val->offset, file, sgn); |
ea336dd5 AP |
895 | if (val->n > 0) |
896 | { | |
897 | fprintf (file, "\n elements = {\n"); | |
898 | for (i = 0; i < val->n; i++) | |
899 | { | |
900 | fprintf (file, " [%d] = ", i); | |
901 | print_generic_expr (file, val->elts[i].val, TDF_VOPS|TDF_MEMSYMS); | |
b8698a0f | 902 | |
ea336dd5 | 903 | fprintf (file, " * "); |
807e902e | 904 | print_dec (val->elts[i].coef, file, sgn); |
ea336dd5 AP |
905 | if (i != val->n - 1) |
906 | fprintf (file, ", \n"); | |
907 | } | |
908 | fprintf (file, "\n }"); | |
909 | } | |
910 | if (val->rest) | |
911 | { | |
912 | fprintf (file, "\n rest = "); | |
913 | print_generic_expr (file, val->rest, TDF_VOPS|TDF_MEMSYMS); | |
914 | } | |
915 | fprintf (file, "\n}"); | |
916 | } | |
917 | ||
918 | /* Prints the affine VAL to the standard error, used for debugging. */ | |
919 | ||
24e47c76 | 920 | DEBUG_FUNCTION void |
ea336dd5 AP |
921 | debug_aff (aff_tree *val) |
922 | { | |
923 | print_aff (stderr, val); | |
924 | fprintf (stderr, "\n"); | |
925 | } | |
72425608 | 926 | |
be8c1c8c BC |
927 | /* Computes address of the reference REF in ADDR. The size of the accessed |
928 | location is stored to SIZE. Returns the ultimate containing object to | |
929 | which REF refers. */ | |
72425608 | 930 | |
be8c1c8c | 931 | tree |
a85d87b2 | 932 | get_inner_reference_aff (tree ref, aff_tree *addr, poly_widest_int *size) |
72425608 | 933 | { |
f37fac2b | 934 | poly_int64 bitsize, bitpos; |
72425608 | 935 | tree toff; |
ef4bddc2 | 936 | machine_mode mode; |
ee45a32d | 937 | int uns, rev, vol; |
72425608 ZD |
938 | aff_tree tmp; |
939 | tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode, | |
25b75a48 | 940 | &uns, &rev, &vol); |
72425608 ZD |
941 | tree base_addr = build_fold_addr_expr (base); |
942 | ||
943 | /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT. */ | |
944 | ||
945 | tree_to_aff_combination (base_addr, sizetype, addr); | |
946 | ||
947 | if (toff) | |
948 | { | |
949 | tree_to_aff_combination (toff, sizetype, &tmp); | |
950 | aff_combination_add (addr, &tmp); | |
951 | } | |
952 | ||
f37fac2b | 953 | aff_combination_const (&tmp, sizetype, bits_to_bytes_round_down (bitpos)); |
72425608 ZD |
954 | aff_combination_add (addr, &tmp); |
955 | ||
f37fac2b | 956 | *size = bits_to_bytes_round_up (bitsize); |
be8c1c8c BC |
957 | |
958 | return base; | |
72425608 ZD |
959 | } |
960 | ||
02f5d6c5 RG |
961 | /* Returns true if a region of size SIZE1 at position 0 and a region of |
962 | size SIZE2 at position DIFF cannot overlap. */ | |
963 | ||
964 | bool | |
cc8bea09 RS |
965 | aff_comb_cannot_overlap_p (aff_tree *diff, const poly_widest_int &size1, |
966 | const poly_widest_int &size2) | |
02f5d6c5 | 967 | { |
02f5d6c5 RG |
968 | /* Unless the difference is a constant, we fail. */ |
969 | if (diff->n != 0) | |
970 | return false; | |
971 | ||
cc8bea09 RS |
972 | if (!ordered_p (diff->offset, 0)) |
973 | return false; | |
974 | ||
975 | if (maybe_lt (diff->offset, 0)) | |
02f5d6c5 RG |
976 | { |
977 | /* The second object is before the first one, we succeed if the last | |
978 | element of the second object is before the start of the first one. */ | |
cc8bea09 | 979 | return known_le (diff->offset + size2, 0); |
02f5d6c5 RG |
980 | } |
981 | else | |
982 | { | |
983 | /* We succeed if the second object starts after the first one ends. */ | |
cc8bea09 | 984 | return known_le (size1, diff->offset); |
02f5d6c5 RG |
985 | } |
986 | } | |
987 |