]>
Commit | Line | Data |
---|---|---|
aed164c3 | 1 | /* Memory address lowering and addressing mode selection. |
fbd26352 | 2 | Copyright (C) 2004-2019 Free Software Foundation, Inc. |
48e1416a | 3 | |
aed164c3 | 4 | This file is part of GCC. |
48e1416a | 5 | |
aed164c3 | 6 | GCC is free software; you can redistribute it and/or modify it |
7 | under the terms of the GNU General Public License as published by the | |
8c4c00c1 | 8 | Free Software Foundation; either version 3, or (at your option) any |
aed164c3 | 9 | later version. |
48e1416a | 10 | |
aed164c3 | 11 | GCC is distributed in the hope that it will be useful, but WITHOUT |
12 | ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or | |
13 | FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License | |
14 | for more details. | |
48e1416a | 15 | |
aed164c3 | 16 | You should have received a copy of the GNU General Public License |
8c4c00c1 | 17 | along with GCC; see the file COPYING3. If not see |
18 | <http://www.gnu.org/licenses/>. */ | |
aed164c3 | 19 | |
20 | /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions | |
21 | that directly map to addressing modes of the target. */ | |
22 | ||
23 | #include "config.h" | |
24 | #include "system.h" | |
25 | #include "coretypes.h" | |
9ef16211 | 26 | #include "backend.h" |
7c29e30e | 27 | #include "target.h" |
28 | #include "rtl.h" | |
aed164c3 | 29 | #include "tree.h" |
9ef16211 | 30 | #include "gimple.h" |
c2f87792 | 31 | #include "memmodel.h" |
7c29e30e | 32 | #include "stringpool.h" |
c296f633 | 33 | #include "tree-vrp.h" |
7c29e30e | 34 | #include "tree-ssanames.h" |
35 | #include "expmed.h" | |
36 | #include "insn-config.h" | |
c2f87792 | 37 | #include "emit-rtl.h" |
7c29e30e | 38 | #include "recog.h" |
39 | #include "tree-pretty-print.h" | |
b20a8bb4 | 40 | #include "fold-const.h" |
9ed99284 | 41 | #include "stor-layout.h" |
e795d6e1 | 42 | #include "gimple-iterator.h" |
43 | #include "gimplify-me.h" | |
05d9c18a | 44 | #include "tree-ssa-loop-ivopts.h" |
9ed99284 | 45 | #include "expr.h" |
073c1fd5 | 46 | #include "tree-dfa.h" |
b9ed1410 | 47 | #include "dumpfile.h" |
a7a46268 | 48 | #include "tree-affine.h" |
9da9c22f | 49 | #include "gimplify.h" |
a7a46268 | 50 | |
51 | /* FIXME: We compute address costs using RTL. */ | |
64641360 | 52 | #include "tree-ssa-address.h" |
aed164c3 | 53 | |
54 | /* TODO -- handling of symbols (according to Richard Hendersons | |
55 | comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html): | |
48e1416a | 56 | |
aed164c3 | 57 | There are at least 5 different kinds of symbols that we can run up against: |
58 | ||
59 | (1) binds_local_p, small data area. | |
60 | (2) binds_local_p, eg local statics | |
61 | (3) !binds_local_p, eg global variables | |
62 | (4) thread local, local_exec | |
63 | (5) thread local, !local_exec | |
64 | ||
65 | Now, (1) won't appear often in an array context, but it certainly can. | |
66 | All you have to do is set -GN high enough, or explicitly mark any | |
67 | random object __attribute__((section (".sdata"))). | |
68 | ||
69 | All of these affect whether or not a symbol is in fact a valid address. | |
70 | The only one tested here is (3). And that result may very well | |
71 | be incorrect for (4) or (5). | |
72 | ||
73 | An incorrect result here does not cause incorrect results out the | |
74 | back end, because the expander in expr.c validizes the address. However | |
75 | it would be nice to improve the handling here in order to produce more | |
76 | precise results. */ | |
77 | ||
78 | /* A "template" for memory address, used to determine whether the address is | |
79 | valid for mode. */ | |
80 | ||
6dc50383 | 81 | struct GTY (()) mem_addr_template { |
aed164c3 | 82 | rtx ref; /* The template. */ |
83 | rtx * GTY ((skip)) step_p; /* The point in template where the step should be | |
84 | filled in. */ | |
85 | rtx * GTY ((skip)) off_p; /* The point in template where the offset should | |
86 | be filled in. */ | |
6dc50383 | 87 | }; |
aed164c3 | 88 | |
aed164c3 | 89 | |
98155838 | 90 | /* The templates. Each of the low five bits of the index corresponds to one |
91 | component of TARGET_MEM_REF being present, while the high bits identify | |
92 | the address space. See TEMPL_IDX. */ | |
aed164c3 | 93 | |
f1f41a6c | 94 | static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list; |
98155838 | 95 | |
96 | #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \ | |
97 | (((int) (AS) << 5) \ | |
98 | | ((SYMBOL != 0) << 4) \ | |
aed164c3 | 99 | | ((BASE != 0) << 3) \ |
100 | | ((INDEX != 0) << 2) \ | |
101 | | ((STEP != 0) << 1) \ | |
102 | | (OFFSET != 0)) | |
103 | ||
104 | /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX, | |
98155838 | 105 | STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers |
106 | to where step is placed to *STEP_P and offset to *OFFSET_P. */ | |
aed164c3 | 107 | |
108 | static void | |
3754d046 | 109 | gen_addr_rtx (machine_mode address_mode, |
98155838 | 110 | rtx symbol, rtx base, rtx index, rtx step, rtx offset, |
aed164c3 | 111 | rtx *addr, rtx **step_p, rtx **offset_p) |
112 | { | |
113 | rtx act_elem; | |
114 | ||
115 | *addr = NULL_RTX; | |
116 | if (step_p) | |
117 | *step_p = NULL; | |
118 | if (offset_p) | |
119 | *offset_p = NULL; | |
120 | ||
945a3e67 | 121 | if (index && index != const0_rtx) |
aed164c3 | 122 | { |
123 | act_elem = index; | |
124 | if (step) | |
125 | { | |
98155838 | 126 | act_elem = gen_rtx_MULT (address_mode, act_elem, step); |
aed164c3 | 127 | |
128 | if (step_p) | |
129 | *step_p = &XEXP (act_elem, 1); | |
130 | } | |
131 | ||
132 | *addr = act_elem; | |
133 | } | |
134 | ||
8525edb5 | 135 | if (base && base != const0_rtx) |
aed164c3 | 136 | { |
137 | if (*addr) | |
98155838 | 138 | *addr = simplify_gen_binary (PLUS, address_mode, base, *addr); |
aed164c3 | 139 | else |
140 | *addr = base; | |
141 | } | |
142 | ||
143 | if (symbol) | |
144 | { | |
145 | act_elem = symbol; | |
146 | if (offset) | |
147 | { | |
98155838 | 148 | act_elem = gen_rtx_PLUS (address_mode, act_elem, offset); |
9dda1f80 | 149 | |
aed164c3 | 150 | if (offset_p) |
9dda1f80 | 151 | *offset_p = &XEXP (act_elem, 1); |
152 | ||
153 | if (GET_CODE (symbol) == SYMBOL_REF | |
154 | || GET_CODE (symbol) == LABEL_REF | |
155 | || GET_CODE (symbol) == CONST) | |
98155838 | 156 | act_elem = gen_rtx_CONST (address_mode, act_elem); |
aed164c3 | 157 | } |
158 | ||
159 | if (*addr) | |
98155838 | 160 | *addr = gen_rtx_PLUS (address_mode, *addr, act_elem); |
aed164c3 | 161 | else |
162 | *addr = act_elem; | |
163 | } | |
164 | else if (offset) | |
165 | { | |
166 | if (*addr) | |
167 | { | |
98155838 | 168 | *addr = gen_rtx_PLUS (address_mode, *addr, offset); |
aed164c3 | 169 | if (offset_p) |
170 | *offset_p = &XEXP (*addr, 1); | |
171 | } | |
172 | else | |
173 | { | |
174 | *addr = offset; | |
175 | if (offset_p) | |
176 | *offset_p = addr; | |
177 | } | |
178 | } | |
179 | ||
180 | if (!*addr) | |
181 | *addr = const0_rtx; | |
182 | } | |
183 | ||
98155838 | 184 | /* Returns address for TARGET_MEM_REF with parameters given by ADDR |
185 | in address space AS. | |
48e1416a | 186 | If REALLY_EXPAND is false, just make fake registers instead |
aed164c3 | 187 | of really expanding the operands, and perform the expansion in-place |
188 | by using one of the "templates". */ | |
189 | ||
190 | rtx | |
98155838 | 191 | addr_for_mem_ref (struct mem_address *addr, addr_space_t as, |
192 | bool really_expand) | |
aed164c3 | 193 | { |
f77c4496 | 194 | scalar_int_mode address_mode = targetm.addr_space.address_mode (as); |
195 | scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as); | |
aed164c3 | 196 | rtx address, sym, bse, idx, st, off; |
aed164c3 | 197 | struct mem_addr_template *templ; |
198 | ||
199 | if (addr->step && !integer_onep (addr->step)) | |
e3d0f65c | 200 | st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode); |
aed164c3 | 201 | else |
202 | st = NULL_RTX; | |
203 | ||
204 | if (addr->offset && !integer_zerop (addr->offset)) | |
e913b5cd | 205 | { |
8672ee56 | 206 | poly_offset_int dc |
207 | = poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED); | |
e913b5cd | 208 | off = immed_wide_int_const (dc, pointer_mode); |
209 | } | |
aed164c3 | 210 | else |
211 | off = NULL_RTX; | |
212 | ||
213 | if (!really_expand) | |
214 | { | |
98155838 | 215 | unsigned int templ_index |
216 | = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off); | |
217 | ||
f1f41a6c | 218 | if (templ_index >= vec_safe_length (mem_addr_template_list)) |
219 | vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1); | |
98155838 | 220 | |
aed164c3 | 221 | /* Reuse the templates for addresses, so that we do not waste memory. */ |
f1f41a6c | 222 | templ = &(*mem_addr_template_list)[templ_index]; |
98155838 | 223 | if (!templ->ref) |
aed164c3 | 224 | { |
98155838 | 225 | sym = (addr->symbol ? |
321583cb | 226 | gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol")) |
98155838 | 227 | : NULL_RTX); |
228 | bse = (addr->base ? | |
321583cb | 229 | gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1) |
98155838 | 230 | : NULL_RTX); |
231 | idx = (addr->index ? | |
321583cb | 232 | gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2) |
98155838 | 233 | : NULL_RTX); |
234 | ||
321583cb | 235 | gen_addr_rtx (pointer_mode, sym, bse, idx, |
98155838 | 236 | st? const0_rtx : NULL_RTX, |
237 | off? const0_rtx : NULL_RTX, | |
238 | &templ->ref, | |
239 | &templ->step_p, | |
240 | &templ->off_p); | |
aed164c3 | 241 | } |
242 | ||
aed164c3 | 243 | if (st) |
244 | *templ->step_p = st; | |
245 | if (off) | |
246 | *templ->off_p = off; | |
247 | ||
248 | return templ->ref; | |
249 | } | |
250 | ||
251 | /* Otherwise really expand the expressions. */ | |
252 | sym = (addr->symbol | |
321583cb | 253 | ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL) |
aed164c3 | 254 | : NULL_RTX); |
255 | bse = (addr->base | |
321583cb | 256 | ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL) |
aed164c3 | 257 | : NULL_RTX); |
258 | idx = (addr->index | |
321583cb | 259 | ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL) |
aed164c3 | 260 | : NULL_RTX); |
261 | ||
321583cb | 262 | gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL); |
263 | if (pointer_mode != address_mode) | |
264 | address = convert_memory_address (address_mode, address); | |
aed164c3 | 265 | return address; |
266 | } | |
267 | ||
64641360 | 268 | /* implement addr_for_mem_ref() directly from a tree, which avoids exporting |
269 | the mem_address structure. */ | |
270 | ||
271 | rtx | |
272 | addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand) | |
273 | { | |
274 | struct mem_address addr; | |
275 | get_address_description (exp, &addr); | |
276 | return addr_for_mem_ref (&addr, as, really_expand); | |
277 | } | |
278 | ||
aed164c3 | 279 | /* Returns address of MEM_REF in TYPE. */ |
280 | ||
281 | tree | |
282 | tree_mem_ref_addr (tree type, tree mem_ref) | |
283 | { | |
33c25cf0 | 284 | tree addr; |
aed164c3 | 285 | tree act_elem; |
286 | tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref); | |
33c25cf0 | 287 | tree addr_base = NULL_TREE, addr_off = NULL_TREE; |
aed164c3 | 288 | |
28daba6f | 289 | addr_base = fold_convert (type, TMR_BASE (mem_ref)); |
aed164c3 | 290 | |
33c25cf0 | 291 | act_elem = TMR_INDEX (mem_ref); |
aed164c3 | 292 | if (act_elem) |
293 | { | |
33c25cf0 | 294 | if (step) |
a0553bff | 295 | act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem), |
296 | act_elem, step); | |
33c25cf0 | 297 | addr_off = act_elem; |
aed164c3 | 298 | } |
299 | ||
28daba6f | 300 | act_elem = TMR_INDEX2 (mem_ref); |
aed164c3 | 301 | if (act_elem) |
302 | { | |
33c25cf0 | 303 | if (addr_off) |
a0553bff | 304 | addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), |
305 | addr_off, act_elem); | |
aed164c3 | 306 | else |
33c25cf0 | 307 | addr_off = act_elem; |
aed164c3 | 308 | } |
309 | ||
cd743a11 | 310 | if (offset && !integer_zerop (offset)) |
aed164c3 | 311 | { |
33c25cf0 | 312 | if (addr_off) |
a0553bff | 313 | addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off, |
314 | fold_convert (TREE_TYPE (addr_off), offset)); | |
aed164c3 | 315 | else |
33c25cf0 | 316 | addr_off = offset; |
aed164c3 | 317 | } |
318 | ||
33c25cf0 | 319 | if (addr_off) |
2cc66f2a | 320 | addr = fold_build_pointer_plus (addr_base, addr_off); |
33c25cf0 | 321 | else |
28daba6f | 322 | addr = addr_base; |
aed164c3 | 323 | |
324 | return addr; | |
325 | } | |
326 | ||
327 | /* Returns true if a memory reference in MODE and with parameters given by | |
328 | ADDR is valid on the current target. */ | |
329 | ||
4639f543 | 330 | bool |
3754d046 | 331 | valid_mem_ref_p (machine_mode mode, addr_space_t as, |
bd1a81f7 | 332 | struct mem_address *addr) |
aed164c3 | 333 | { |
334 | rtx address; | |
335 | ||
98155838 | 336 | address = addr_for_mem_ref (addr, as, false); |
aed164c3 | 337 | if (!address) |
338 | return false; | |
339 | ||
bd1a81f7 | 340 | return memory_address_addr_space_p (mode, address, as); |
aed164c3 | 341 | } |
342 | ||
343 | /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR | |
344 | is valid on the current target and if so, creates and returns the | |
be4eb429 | 345 | TARGET_MEM_REF. If VERIFY is false omit the verification step. */ |
aed164c3 | 346 | |
347 | static tree | |
be4eb429 | 348 | create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr, |
349 | bool verify) | |
aed164c3 | 350 | { |
28daba6f | 351 | tree base, index2; |
352 | ||
be4eb429 | 353 | if (verify |
354 | && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr)) | |
aed164c3 | 355 | return NULL_TREE; |
356 | ||
357 | if (addr->step && integer_onep (addr->step)) | |
358 | addr->step = NULL_TREE; | |
359 | ||
9a14ba4f | 360 | if (addr->offset) |
361 | addr->offset = fold_convert (alias_ptr_type, addr->offset); | |
362 | else | |
363 | addr->offset = build_int_cst (alias_ptr_type, 0); | |
aed164c3 | 364 | |
28daba6f | 365 | if (addr->symbol) |
9614eeff | 366 | { |
28daba6f | 367 | base = addr->symbol; |
368 | index2 = addr->base; | |
369 | } | |
370 | else if (addr->base | |
371 | && POINTER_TYPE_P (TREE_TYPE (addr->base))) | |
372 | { | |
373 | base = addr->base; | |
374 | index2 = NULL_TREE; | |
9614eeff | 375 | } |
28daba6f | 376 | else |
377 | { | |
22cd4e5e | 378 | base = build_int_cst (build_pointer_type (type), 0); |
28daba6f | 379 | index2 = addr->base; |
380 | } | |
381 | ||
a07f6629 | 382 | /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF. |
383 | ??? As IVOPTs does not follow restrictions to where the base | |
384 | pointer may point to create a MEM_REF only if we know that | |
385 | base is valid. */ | |
8525edb5 | 386 | if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST) |
28daba6f | 387 | && (!index2 || integer_zerop (index2)) |
388 | && (!addr->index || integer_zerop (addr->index))) | |
389 | return fold_build2 (MEM_REF, type, base, addr->offset); | |
9614eeff | 390 | |
9a14ba4f | 391 | return build5 (TARGET_MEM_REF, type, |
28daba6f | 392 | base, addr->offset, addr->index, addr->step, index2); |
aed164c3 | 393 | } |
394 | ||
395 | /* Returns true if OBJ is an object whose address is a link time constant. */ | |
396 | ||
397 | static bool | |
398 | fixed_address_object_p (tree obj) | |
399 | { | |
53e9c5c4 | 400 | return (VAR_P (obj) |
401 | && (TREE_STATIC (obj) || DECL_EXTERNAL (obj)) | |
d8b498d3 | 402 | && ! DECL_DLLIMPORT_P (obj)); |
aed164c3 | 403 | } |
404 | ||
33c25cf0 | 405 | /* If ADDR contains an address of object that is a link time constant, |
406 | move it to PARTS->symbol. */ | |
aed164c3 | 407 | |
4639f543 | 408 | void |
33c25cf0 | 409 | move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr) |
aed164c3 | 410 | { |
33c25cf0 | 411 | unsigned i; |
412 | tree val = NULL_TREE; | |
d3610bea | 413 | |
33c25cf0 | 414 | for (i = 0; i < addr->n; i++) |
aed164c3 | 415 | { |
796b6678 | 416 | if (addr->elts[i].coef != 1) |
33c25cf0 | 417 | continue; |
418 | ||
419 | val = addr->elts[i].val; | |
420 | if (TREE_CODE (val) == ADDR_EXPR | |
421 | && fixed_address_object_p (TREE_OPERAND (val, 0))) | |
422 | break; | |
aed164c3 | 423 | } |
424 | ||
33c25cf0 | 425 | if (i == addr->n) |
426 | return; | |
427 | ||
e077c79b | 428 | parts->symbol = val; |
33c25cf0 | 429 | aff_combination_remove_elt (addr, i); |
430 | } | |
431 | ||
9da9c22f | 432 | /* Return true if ADDR contains an instance of BASE_HINT and it's moved to |
433 | PARTS->base. */ | |
2451b8b8 | 434 | |
9da9c22f | 435 | static bool |
2451b8b8 | 436 | move_hint_to_base (tree type, struct mem_address *parts, tree base_hint, |
437 | aff_tree *addr) | |
438 | { | |
439 | unsigned i; | |
440 | tree val = NULL_TREE; | |
29f67cfe | 441 | int qual; |
2451b8b8 | 442 | |
443 | for (i = 0; i < addr->n; i++) | |
444 | { | |
796b6678 | 445 | if (addr->elts[i].coef != 1) |
2451b8b8 | 446 | continue; |
447 | ||
448 | val = addr->elts[i].val; | |
449 | if (operand_equal_p (val, base_hint, 0)) | |
450 | break; | |
451 | } | |
452 | ||
453 | if (i == addr->n) | |
9da9c22f | 454 | return false; |
2451b8b8 | 455 | |
29f67cfe | 456 | /* Cast value to appropriate pointer type. We cannot use a pointer |
457 | to TYPE directly, as the back-end will assume registers of pointer | |
458 | type are aligned, and just the base itself may not actually be. | |
459 | We use void pointer to the type's address space instead. */ | |
460 | qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type)); | |
461 | type = build_qualified_type (void_type_node, qual); | |
2451b8b8 | 462 | parts->base = fold_convert (build_pointer_type (type), val); |
463 | aff_combination_remove_elt (addr, i); | |
9da9c22f | 464 | return true; |
2451b8b8 | 465 | } |
466 | ||
33c25cf0 | 467 | /* If ADDR contains an address of a dereferenced pointer, move it to |
468 | PARTS->base. */ | |
469 | ||
470 | static void | |
471 | move_pointer_to_base (struct mem_address *parts, aff_tree *addr) | |
472 | { | |
473 | unsigned i; | |
474 | tree val = NULL_TREE; | |
475 | ||
476 | for (i = 0; i < addr->n; i++) | |
aed164c3 | 477 | { |
796b6678 | 478 | if (addr->elts[i].coef != 1) |
33c25cf0 | 479 | continue; |
480 | ||
481 | val = addr->elts[i].val; | |
482 | if (POINTER_TYPE_P (TREE_TYPE (val))) | |
483 | break; | |
aed164c3 | 484 | } |
485 | ||
33c25cf0 | 486 | if (i == addr->n) |
487 | return; | |
488 | ||
489 | parts->base = val; | |
490 | aff_combination_remove_elt (addr, i); | |
491 | } | |
492 | ||
a7ae69c9 | 493 | /* Moves the loop variant part V in linear address ADDR to be the index |
494 | of PARTS. */ | |
495 | ||
496 | static void | |
497 | move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v) | |
498 | { | |
499 | unsigned i; | |
500 | tree val = NULL_TREE; | |
501 | ||
502 | gcc_assert (!parts->index); | |
503 | for (i = 0; i < addr->n; i++) | |
504 | { | |
505 | val = addr->elts[i].val; | |
506 | if (operand_equal_p (val, v, 0)) | |
507 | break; | |
508 | } | |
509 | ||
510 | if (i == addr->n) | |
511 | return; | |
512 | ||
513 | parts->index = fold_convert (sizetype, val); | |
e913b5cd | 514 | parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef); |
a7ae69c9 | 515 | aff_combination_remove_elt (addr, i); |
516 | } | |
517 | ||
33c25cf0 | 518 | /* Adds ELT to PARTS. */ |
519 | ||
520 | static void | |
521 | add_to_parts (struct mem_address *parts, tree elt) | |
522 | { | |
523 | tree type; | |
524 | ||
aed164c3 | 525 | if (!parts->index) |
526 | { | |
0de36bdb | 527 | parts->index = fold_convert (sizetype, elt); |
aed164c3 | 528 | return; |
529 | } | |
530 | ||
33c25cf0 | 531 | if (!parts->base) |
532 | { | |
533 | parts->base = elt; | |
534 | return; | |
535 | } | |
536 | ||
aed164c3 | 537 | /* Add ELT to base. */ |
33c25cf0 | 538 | type = TREE_TYPE (parts->base); |
36f0904b | 539 | if (POINTER_TYPE_P (type)) |
2cc66f2a | 540 | parts->base = fold_build_pointer_plus (parts->base, elt); |
36f0904b | 541 | else |
9da9c22f | 542 | parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt); |
aed164c3 | 543 | } |
544 | ||
c2f87792 | 545 | /* Returns true if multiplying by RATIO is allowed in an address. Test the |
546 | validity for a memory reference accessing memory of mode MODE in address | |
547 | space AS. */ | |
548 | ||
549 | static bool | |
550 | multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode, | |
551 | addr_space_t as) | |
552 | { | |
553 | #define MAX_RATIO 128 | |
554 | unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode; | |
555 | static vec<sbitmap> valid_mult_list; | |
556 | sbitmap valid_mult; | |
557 | ||
558 | if (data_index >= valid_mult_list.length ()) | |
559 | valid_mult_list.safe_grow_cleared (data_index + 1); | |
560 | ||
561 | valid_mult = valid_mult_list[data_index]; | |
562 | if (!valid_mult) | |
563 | { | |
564 | machine_mode address_mode = targetm.addr_space.address_mode (as); | |
565 | rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1); | |
566 | rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2); | |
567 | rtx addr, scaled; | |
568 | HOST_WIDE_INT i; | |
569 | ||
570 | valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1); | |
571 | bitmap_clear (valid_mult); | |
572 | scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX); | |
573 | addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2); | |
574 | for (i = -MAX_RATIO; i <= MAX_RATIO; i++) | |
575 | { | |
576 | XEXP (scaled, 1) = gen_int_mode (i, address_mode); | |
577 | if (memory_address_addr_space_p (mode, addr, as) | |
578 | || memory_address_addr_space_p (mode, scaled, as)) | |
579 | bitmap_set_bit (valid_mult, i + MAX_RATIO); | |
580 | } | |
581 | ||
582 | if (dump_file && (dump_flags & TDF_DETAILS)) | |
583 | { | |
584 | fprintf (dump_file, " allowed multipliers:"); | |
585 | for (i = -MAX_RATIO; i <= MAX_RATIO; i++) | |
586 | if (bitmap_bit_p (valid_mult, i + MAX_RATIO)) | |
587 | fprintf (dump_file, " %d", (int) i); | |
588 | fprintf (dump_file, "\n"); | |
589 | fprintf (dump_file, "\n"); | |
590 | } | |
591 | ||
592 | valid_mult_list[data_index] = valid_mult; | |
593 | } | |
594 | ||
595 | if (ratio > MAX_RATIO || ratio < -MAX_RATIO) | |
596 | return false; | |
597 | ||
598 | return bitmap_bit_p (valid_mult, ratio + MAX_RATIO); | |
599 | } | |
600 | ||
aed164c3 | 601 | /* Finds the most expensive multiplication in ADDR that can be |
602 | expressed in an addressing mode and move the corresponding | |
33c25cf0 | 603 | element(s) to PARTS. */ |
aed164c3 | 604 | |
605 | static void | |
2451b8b8 | 606 | most_expensive_mult_to_index (tree type, struct mem_address *parts, |
607 | aff_tree *addr, bool speed) | |
aed164c3 | 608 | { |
2451b8b8 | 609 | addr_space_t as = TYPE_ADDR_SPACE (type); |
3754d046 | 610 | machine_mode address_mode = targetm.addr_space.address_mode (as); |
d3610bea | 611 | HOST_WIDE_INT coef; |
aed164c3 | 612 | unsigned best_mult_cost = 0, acost; |
613 | tree mult_elt = NULL_TREE, elt; | |
614 | unsigned i, j; | |
d3610bea | 615 | enum tree_code op_code; |
aed164c3 | 616 | |
ab2c1de8 | 617 | offset_int best_mult = 0; |
aed164c3 | 618 | for (i = 0; i < addr->n; i++) |
619 | { | |
796b6678 | 620 | if (!wi::fits_shwi_p (addr->elts[i].coef)) |
d3610bea | 621 | continue; |
622 | ||
cf8f0e63 | 623 | coef = addr->elts[i].coef.to_shwi (); |
d3610bea | 624 | if (coef == 1 |
2451b8b8 | 625 | || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as)) |
aed164c3 | 626 | continue; |
d3610bea | 627 | |
72655676 | 628 | acost = mult_by_coeff_cost (coef, address_mode, speed); |
aed164c3 | 629 | |
630 | if (acost > best_mult_cost) | |
631 | { | |
632 | best_mult_cost = acost; | |
5de9d3ed | 633 | best_mult = offset_int::from (addr->elts[i].coef, SIGNED); |
aed164c3 | 634 | } |
635 | } | |
636 | ||
d3610bea | 637 | if (!best_mult_cost) |
aed164c3 | 638 | return; |
639 | ||
d3610bea | 640 | /* Collect elements multiplied by best_mult. */ |
aed164c3 | 641 | for (i = j = 0; i < addr->n; i++) |
642 | { | |
ab2c1de8 | 643 | offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED); |
644 | offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type)); | |
48e1416a | 645 | |
cf8f0e63 | 646 | if (amult == best_mult) |
d3610bea | 647 | op_code = PLUS_EXPR; |
cf8f0e63 | 648 | else if (amult_neg == best_mult) |
d3610bea | 649 | op_code = MINUS_EXPR; |
650 | else | |
aed164c3 | 651 | { |
aed164c3 | 652 | addr->elts[j] = addr->elts[i]; |
653 | j++; | |
654 | continue; | |
655 | } | |
0de36bdb | 656 | |
33c25cf0 | 657 | elt = fold_convert (sizetype, addr->elts[i].val); |
d3610bea | 658 | if (mult_elt) |
33c25cf0 | 659 | mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt); |
d3610bea | 660 | else if (op_code == PLUS_EXPR) |
aed164c3 | 661 | mult_elt = elt; |
662 | else | |
33c25cf0 | 663 | mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt); |
aed164c3 | 664 | } |
665 | addr->n = j; | |
48e1416a | 666 | |
aed164c3 | 667 | parts->index = mult_elt; |
e913b5cd | 668 | parts->step = wide_int_to_tree (sizetype, best_mult); |
aed164c3 | 669 | } |
670 | ||
2451b8b8 | 671 | /* Splits address ADDR for a memory access of type TYPE into PARTS. |
672 | If BASE_HINT is non-NULL, it specifies an SSA name to be used | |
a7ae69c9 | 673 | preferentially as base of the reference, and IV_CAND is the selected |
9da9c22f | 674 | iv candidate used in ADDR. Store true to VAR_IN_BASE if variant |
675 | part of address is split to PARTS.base. | |
2451b8b8 | 676 | |
aed164c3 | 677 | TODO -- be more clever about the distribution of the elements of ADDR |
678 | to PARTS. Some architectures do not support anything but single | |
679 | register in address, possibly with a small integer offset; while | |
680 | create_mem_ref will simplify the address to an acceptable shape | |
d3610bea | 681 | later, it would be more efficient to know that asking for complicated |
682 | addressing modes is useless. */ | |
aed164c3 | 683 | |
684 | static void | |
9da9c22f | 685 | addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint, |
686 | struct mem_address *parts, bool *var_in_base, bool speed) | |
aed164c3 | 687 | { |
d3610bea | 688 | tree part; |
aed164c3 | 689 | unsigned i; |
690 | ||
691 | parts->symbol = NULL_TREE; | |
692 | parts->base = NULL_TREE; | |
693 | parts->index = NULL_TREE; | |
694 | parts->step = NULL_TREE; | |
695 | ||
1aeea61f | 696 | if (maybe_ne (addr->offset, 0)) |
e913b5cd | 697 | parts->offset = wide_int_to_tree (sizetype, addr->offset); |
aed164c3 | 698 | else |
699 | parts->offset = NULL_TREE; | |
700 | ||
33c25cf0 | 701 | /* Try to find a symbol. */ |
702 | move_fixed_address_to_symbol (parts, addr); | |
703 | ||
9da9c22f | 704 | /* Since at the moment there is no reliable way to know how to |
705 | distinguish between pointer and its offset, we decide if var | |
706 | part is the pointer based on guess. */ | |
707 | *var_in_base = (base_hint != NULL && parts->symbol == NULL); | |
708 | if (*var_in_base) | |
709 | *var_in_base = move_hint_to_base (type, parts, base_hint, addr); | |
710 | else | |
a7ae69c9 | 711 | move_variant_to_index (parts, addr, iv_cand); |
712 | ||
9da9c22f | 713 | /* First move the most expensive feasible multiplication to index. */ |
a7ae69c9 | 714 | if (!parts->index) |
715 | most_expensive_mult_to_index (type, parts, addr, speed); | |
33c25cf0 | 716 | |
9da9c22f | 717 | /* Move pointer into base. */ |
2451b8b8 | 718 | if (!parts->symbol && !parts->base) |
33c25cf0 | 719 | move_pointer_to_base (parts, addr); |
aed164c3 | 720 | |
721 | /* Then try to process the remaining elements. */ | |
722 | for (i = 0; i < addr->n; i++) | |
d3610bea | 723 | { |
33c25cf0 | 724 | part = fold_convert (sizetype, addr->elts[i].val); |
796b6678 | 725 | if (addr->elts[i].coef != 1) |
33c25cf0 | 726 | part = fold_build2 (MULT_EXPR, sizetype, part, |
e913b5cd | 727 | wide_int_to_tree (sizetype, addr->elts[i].coef)); |
33c25cf0 | 728 | add_to_parts (parts, part); |
d3610bea | 729 | } |
aed164c3 | 730 | if (addr->rest) |
33c25cf0 | 731 | add_to_parts (parts, fold_convert (sizetype, addr->rest)); |
aed164c3 | 732 | } |
733 | ||
734 | /* Force the PARTS to register. */ | |
735 | ||
736 | static void | |
75a70cf9 | 737 | gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts) |
aed164c3 | 738 | { |
739 | if (parts->base) | |
c2acfe90 | 740 | parts->base = force_gimple_operand_gsi_1 (gsi, parts->base, |
741 | is_gimple_mem_ref_addr, NULL_TREE, | |
75a70cf9 | 742 | true, GSI_SAME_STMT); |
aed164c3 | 743 | if (parts->index) |
75a70cf9 | 744 | parts->index = force_gimple_operand_gsi (gsi, parts->index, |
0d734975 | 745 | true, NULL_TREE, |
75a70cf9 | 746 | true, GSI_SAME_STMT); |
aed164c3 | 747 | } |
748 | ||
18283ee2 | 749 | /* Return true if the OFFSET in PARTS is the only thing that is making |
750 | it an invalid address for type TYPE. */ | |
751 | ||
752 | static bool | |
753 | mem_ref_valid_without_offset_p (tree type, mem_address parts) | |
754 | { | |
755 | if (!parts.base) | |
756 | parts.base = parts.offset; | |
757 | parts.offset = NULL_TREE; | |
758 | return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), &parts); | |
759 | } | |
760 | ||
761 | /* Fold PARTS->offset into PARTS->base, so that there is no longer | |
762 | a separate offset. Emit any new instructions before GSI. */ | |
763 | ||
764 | static void | |
765 | add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts) | |
766 | { | |
767 | tree tmp = parts->offset; | |
768 | if (parts->base) | |
769 | { | |
770 | tmp = fold_build_pointer_plus (parts->base, tmp); | |
771 | tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr, | |
772 | NULL_TREE, true, GSI_SAME_STMT); | |
773 | } | |
774 | parts->base = tmp; | |
775 | parts->offset = NULL_TREE; | |
776 | } | |
777 | ||
aed164c3 | 778 | /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary |
75a70cf9 | 779 | computations are emitted in front of GSI. TYPE is the mode |
a7ae69c9 | 780 | of created memory reference. IV_CAND is the selected iv candidate in ADDR, |
781 | and BASE_HINT is non NULL if IV_CAND comes from a base address | |
782 | object. */ | |
aed164c3 | 783 | |
784 | tree | |
a7ae69c9 | 785 | create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr, |
786 | tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed) | |
aed164c3 | 787 | { |
9da9c22f | 788 | bool var_in_base; |
aed164c3 | 789 | tree mem_ref, tmp; |
aed164c3 | 790 | struct mem_address parts; |
791 | ||
9da9c22f | 792 | addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed); |
75a70cf9 | 793 | gimplify_mem_ref_parts (gsi, &parts); |
be4eb429 | 794 | mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); |
aed164c3 | 795 | if (mem_ref) |
796 | return mem_ref; | |
797 | ||
798 | /* The expression is too complicated. Try making it simpler. */ | |
799 | ||
9da9c22f | 800 | /* Merge symbol into other parts. */ |
801 | if (parts.symbol) | |
802 | { | |
803 | tmp = parts.symbol; | |
804 | parts.symbol = NULL_TREE; | |
805 | gcc_assert (is_gimple_val (tmp)); | |
806 | ||
807 | if (parts.base) | |
808 | { | |
809 | gcc_assert (useless_type_conversion_p (sizetype, | |
810 | TREE_TYPE (parts.base))); | |
811 | ||
812 | if (parts.index) | |
813 | { | |
814 | /* Add the symbol to base, eventually forcing it to register. */ | |
815 | tmp = fold_build_pointer_plus (tmp, parts.base); | |
816 | tmp = force_gimple_operand_gsi_1 (gsi, tmp, | |
817 | is_gimple_mem_ref_addr, | |
818 | NULL_TREE, true, | |
819 | GSI_SAME_STMT); | |
820 | } | |
821 | else | |
822 | { | |
823 | /* Move base to index, then move the symbol to base. */ | |
824 | parts.index = parts.base; | |
825 | } | |
826 | parts.base = tmp; | |
827 | } | |
828 | else | |
829 | parts.base = tmp; | |
830 | ||
831 | mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); | |
832 | if (mem_ref) | |
833 | return mem_ref; | |
834 | } | |
835 | ||
836 | /* Move multiplication to index by transforming address expression: | |
837 | [... + index << step + ...] | |
838 | into: | |
839 | index' = index << step; | |
840 | [... + index' + ,,,]. */ | |
aed164c3 | 841 | if (parts.step && !integer_onep (parts.step)) |
842 | { | |
aed164c3 | 843 | gcc_assert (parts.index); |
18283ee2 | 844 | if (parts.offset && mem_ref_valid_without_offset_p (type, parts)) |
845 | { | |
846 | add_offset_to_base (gsi, &parts); | |
847 | mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); | |
848 | gcc_assert (mem_ref); | |
849 | return mem_ref; | |
850 | } | |
851 | ||
75a70cf9 | 852 | parts.index = force_gimple_operand_gsi (gsi, |
33c25cf0 | 853 | fold_build2 (MULT_EXPR, sizetype, |
854 | parts.index, parts.step), | |
75a70cf9 | 855 | true, NULL_TREE, true, GSI_SAME_STMT); |
aed164c3 | 856 | parts.step = NULL_TREE; |
48e1416a | 857 | |
be4eb429 | 858 | mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); |
aed164c3 | 859 | if (mem_ref) |
860 | return mem_ref; | |
861 | } | |
862 | ||
9da9c22f | 863 | /* Add offset to invariant part by transforming address expression: |
864 | [base + index + offset] | |
865 | into: | |
866 | base' = base + offset; | |
867 | [base' + index] | |
868 | or: | |
869 | index' = index + offset; | |
870 | [base + index'] | |
871 | depending on which one is invariant. */ | |
872 | if (parts.offset && !integer_zerop (parts.offset)) | |
aed164c3 | 873 | { |
9da9c22f | 874 | tree old_base = unshare_expr (parts.base); |
875 | tree old_index = unshare_expr (parts.index); | |
876 | tree old_offset = unshare_expr (parts.offset); | |
48e1416a | 877 | |
9da9c22f | 878 | tmp = parts.offset; |
879 | parts.offset = NULL_TREE; | |
880 | /* Add offset to invariant part. */ | |
881 | if (!var_in_base) | |
7fce04b0 | 882 | { |
9da9c22f | 883 | if (parts.base) |
5e22b9e0 | 884 | { |
9da9c22f | 885 | tmp = fold_build_pointer_plus (parts.base, tmp); |
886 | tmp = force_gimple_operand_gsi_1 (gsi, tmp, | |
887 | is_gimple_mem_ref_addr, | |
888 | NULL_TREE, true, | |
889 | GSI_SAME_STMT); | |
5e22b9e0 | 890 | } |
9da9c22f | 891 | parts.base = tmp; |
892 | } | |
893 | else | |
894 | { | |
895 | if (parts.index) | |
7fce04b0 | 896 | { |
9da9c22f | 897 | tmp = fold_build_pointer_plus (parts.index, tmp); |
898 | tmp = force_gimple_operand_gsi_1 (gsi, tmp, | |
899 | is_gimple_mem_ref_addr, | |
900 | NULL_TREE, true, | |
901 | GSI_SAME_STMT); | |
7fce04b0 | 902 | } |
9da9c22f | 903 | parts.index = tmp; |
7fce04b0 | 904 | } |
aed164c3 | 905 | |
be4eb429 | 906 | mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); |
aed164c3 | 907 | if (mem_ref) |
908 | return mem_ref; | |
9da9c22f | 909 | |
910 | /* Restore parts.base, index and offset so that we can check if | |
911 | [base + offset] addressing mode is supported in next step. | |
912 | This is necessary for targets only support [base + offset], | |
913 | but not [base + index] addressing mode. */ | |
914 | parts.base = old_base; | |
915 | parts.index = old_index; | |
916 | parts.offset = old_offset; | |
aed164c3 | 917 | } |
918 | ||
9da9c22f | 919 | /* Transform [base + index + ...] into: |
920 | base' = base + index; | |
921 | [base' + ...]. */ | |
33c25cf0 | 922 | if (parts.index) |
aed164c3 | 923 | { |
9da9c22f | 924 | tmp = parts.index; |
925 | parts.index = NULL_TREE; | |
33c25cf0 | 926 | /* Add index to base. */ |
927 | if (parts.base) | |
928 | { | |
9da9c22f | 929 | tmp = fold_build_pointer_plus (parts.base, tmp); |
930 | tmp = force_gimple_operand_gsi_1 (gsi, tmp, | |
931 | is_gimple_mem_ref_addr, | |
932 | NULL_TREE, true, GSI_SAME_STMT); | |
33c25cf0 | 933 | } |
9da9c22f | 934 | parts.base = tmp; |
aed164c3 | 935 | |
be4eb429 | 936 | mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); |
aed164c3 | 937 | if (mem_ref) |
938 | return mem_ref; | |
939 | } | |
940 | ||
9da9c22f | 941 | /* Transform [base + offset] into: |
942 | base' = base + offset; | |
943 | [base']. */ | |
aed164c3 | 944 | if (parts.offset && !integer_zerop (parts.offset)) |
945 | { | |
18283ee2 | 946 | add_offset_to_base (gsi, &parts); |
be4eb429 | 947 | mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true); |
aed164c3 | 948 | if (mem_ref) |
949 | return mem_ref; | |
950 | } | |
951 | ||
952 | /* Verify that the address is in the simplest possible shape | |
953 | (only a register). If we cannot create such a memory reference, | |
954 | something is really wrong. */ | |
955 | gcc_assert (parts.symbol == NULL_TREE); | |
33c25cf0 | 956 | gcc_assert (parts.index == NULL_TREE); |
aed164c3 | 957 | gcc_assert (!parts.step || integer_onep (parts.step)); |
958 | gcc_assert (!parts.offset || integer_zerop (parts.offset)); | |
959 | gcc_unreachable (); | |
960 | } | |
961 | ||
962 | /* Copies components of the address from OP to ADDR. */ | |
963 | ||
964 | void | |
965 | get_address_description (tree op, struct mem_address *addr) | |
966 | { | |
28daba6f | 967 | if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR) |
968 | { | |
969 | addr->symbol = TMR_BASE (op); | |
970 | addr->base = TMR_INDEX2 (op); | |
971 | } | |
972 | else | |
973 | { | |
974 | addr->symbol = NULL_TREE; | |
975 | if (TMR_INDEX2 (op)) | |
976 | { | |
977 | gcc_assert (integer_zerop (TMR_BASE (op))); | |
978 | addr->base = TMR_INDEX2 (op); | |
979 | } | |
980 | else | |
981 | addr->base = TMR_BASE (op); | |
982 | } | |
aed164c3 | 983 | addr->index = TMR_INDEX (op); |
984 | addr->step = TMR_STEP (op); | |
985 | addr->offset = TMR_OFFSET (op); | |
986 | } | |
987 | ||
74fcb726 | 988 | /* Copies the reference information from OLD_REF to NEW_REF, where |
989 | NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */ | |
990 | ||
991 | void | |
992 | copy_ref_info (tree new_ref, tree old_ref) | |
993 | { | |
994 | tree new_ptr_base = NULL_TREE; | |
995 | ||
996 | gcc_assert (TREE_CODE (new_ref) == MEM_REF | |
997 | || TREE_CODE (new_ref) == TARGET_MEM_REF); | |
998 | ||
999 | TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref); | |
1000 | TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref); | |
1001 | ||
1002 | new_ptr_base = TREE_OPERAND (new_ref, 0); | |
1003 | ||
1004 | /* We can transfer points-to information from an old pointer | |
1005 | or decl base to the new one. */ | |
1006 | if (new_ptr_base | |
1007 | && TREE_CODE (new_ptr_base) == SSA_NAME | |
1008 | && !SSA_NAME_PTR_INFO (new_ptr_base)) | |
1009 | { | |
1010 | tree base = get_base_address (old_ref); | |
1011 | if (!base) | |
1012 | ; | |
1013 | else if ((TREE_CODE (base) == MEM_REF | |
1014 | || TREE_CODE (base) == TARGET_MEM_REF) | |
1015 | && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME | |
1016 | && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0))) | |
1017 | { | |
1018 | struct ptr_info_def *new_pi; | |
ceea063b | 1019 | unsigned int align, misalign; |
1020 | ||
74fcb726 | 1021 | duplicate_ssa_name_ptr_info |
1022 | (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0))); | |
1023 | new_pi = SSA_NAME_PTR_INFO (new_ptr_base); | |
9d75589a | 1024 | /* We have to be careful about transferring alignment information. */ |
ceea063b | 1025 | if (get_ptr_info_alignment (new_pi, &align, &misalign) |
1026 | && TREE_CODE (old_ref) == MEM_REF | |
74fcb726 | 1027 | && !(TREE_CODE (new_ref) == TARGET_MEM_REF |
1028 | && (TMR_INDEX2 (new_ref) | |
71c68976 | 1029 | /* TODO: Below conditions can be relaxed if TMR_INDEX |
1030 | is an indcution variable and its initial value and | |
1031 | step are aligned. */ | |
1032 | || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref)) | |
74fcb726 | 1033 | || (TMR_STEP (new_ref) |
f9ae6f95 | 1034 | && (TREE_INT_CST_LOW (TMR_STEP (new_ref)) |
ceea063b | 1035 | < align))))) |
74fcb726 | 1036 | { |
90ca1268 | 1037 | poly_uint64 inc = (mem_ref_offset (old_ref) |
1038 | - mem_ref_offset (new_ref)).force_uhwi (); | |
ceea063b | 1039 | adjust_ptr_info_misalignment (new_pi, inc); |
74fcb726 | 1040 | } |
1041 | else | |
ceea063b | 1042 | mark_ptr_info_alignment_unknown (new_pi); |
74fcb726 | 1043 | } |
53e9c5c4 | 1044 | else if (VAR_P (base) |
74fcb726 | 1045 | || TREE_CODE (base) == PARM_DECL |
1046 | || TREE_CODE (base) == RESULT_DECL) | |
1047 | { | |
1048 | struct ptr_info_def *pi = get_ptr_info (new_ptr_base); | |
1049 | pt_solution_set_var (&pi->pt, base); | |
1050 | } | |
1051 | } | |
1052 | } | |
1053 | ||
aed164c3 | 1054 | /* Move constants in target_mem_ref REF to offset. Returns the new target |
1055 | mem ref if anything changes, NULL_TREE otherwise. */ | |
1056 | ||
1057 | tree | |
1058 | maybe_fold_tmr (tree ref) | |
1059 | { | |
1060 | struct mem_address addr; | |
1061 | bool changed = false; | |
6d7105fe | 1062 | tree new_ref, off; |
aed164c3 | 1063 | |
1064 | get_address_description (ref, &addr); | |
1065 | ||
28daba6f | 1066 | if (addr.base |
1067 | && TREE_CODE (addr.base) == INTEGER_CST | |
1068 | && !integer_zerop (addr.base)) | |
aed164c3 | 1069 | { |
9a14ba4f | 1070 | addr.offset = fold_binary_to_constant (PLUS_EXPR, |
1071 | TREE_TYPE (addr.offset), | |
1072 | addr.offset, addr.base); | |
aed164c3 | 1073 | addr.base = NULL_TREE; |
1074 | changed = true; | |
1075 | } | |
1076 | ||
28daba6f | 1077 | if (addr.symbol |
1078 | && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF) | |
1079 | { | |
1080 | addr.offset = fold_binary_to_constant | |
1081 | (PLUS_EXPR, TREE_TYPE (addr.offset), | |
1082 | addr.offset, | |
1083 | TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1)); | |
1084 | addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0); | |
1085 | changed = true; | |
1086 | } | |
1087 | else if (addr.symbol | |
1088 | && handled_component_p (TREE_OPERAND (addr.symbol, 0))) | |
1089 | { | |
773078cb | 1090 | poly_int64 offset; |
28daba6f | 1091 | addr.symbol = build_fold_addr_expr |
1092 | (get_addr_base_and_unit_offset | |
1093 | (TREE_OPERAND (addr.symbol, 0), &offset)); | |
1094 | addr.offset = int_const_binop (PLUS_EXPR, | |
317e2a67 | 1095 | addr.offset, size_int (offset)); |
28daba6f | 1096 | changed = true; |
1097 | } | |
1098 | ||
aed164c3 | 1099 | if (addr.index && TREE_CODE (addr.index) == INTEGER_CST) |
1100 | { | |
1101 | off = addr.index; | |
1102 | if (addr.step) | |
1103 | { | |
33c25cf0 | 1104 | off = fold_binary_to_constant (MULT_EXPR, sizetype, |
aed164c3 | 1105 | off, addr.step); |
1106 | addr.step = NULL_TREE; | |
1107 | } | |
1108 | ||
9a14ba4f | 1109 | addr.offset = fold_binary_to_constant (PLUS_EXPR, |
1110 | TREE_TYPE (addr.offset), | |
1111 | addr.offset, off); | |
aed164c3 | 1112 | addr.index = NULL_TREE; |
1113 | changed = true; | |
1114 | } | |
1115 | ||
1116 | if (!changed) | |
1117 | return NULL_TREE; | |
48e1416a | 1118 | |
be4eb429 | 1119 | /* If we have propagated something into this TARGET_MEM_REF and thus |
1120 | ended up folding it, always create a new TARGET_MEM_REF regardless | |
1121 | if it is valid in this for on the target - the propagation result | |
1122 | wouldn't be anyway. */ | |
6d7105fe | 1123 | new_ref = create_mem_ref_raw (TREE_TYPE (ref), |
1124 | TREE_TYPE (addr.offset), &addr, false); | |
1125 | TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref); | |
1126 | TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref); | |
1127 | return new_ref; | |
aed164c3 | 1128 | } |
1129 | ||
217ad6d6 | 1130 | /* Return the preferred index scale factor for accessing memory of mode |
1131 | MEM_MODE in the address space of pointer BASE. Assume that we're | |
1132 | optimizing for speed if SPEED is true and for size otherwise. */ | |
1133 | unsigned int | |
1134 | preferred_mem_scale_factor (tree base, machine_mode mem_mode, | |
1135 | bool speed) | |
1136 | { | |
c296b868 | 1137 | /* For BLKmode, we can't do anything so return 1. */ |
1138 | if (mem_mode == BLKmode) | |
1139 | return 1; | |
1140 | ||
217ad6d6 | 1141 | struct mem_address parts = {}; |
1142 | addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base)); | |
1143 | unsigned int fact = GET_MODE_UNIT_SIZE (mem_mode); | |
1144 | ||
1145 | /* Addressing mode "base + index". */ | |
1146 | parts.index = integer_one_node; | |
1147 | parts.base = integer_one_node; | |
1148 | rtx addr = addr_for_mem_ref (&parts, as, false); | |
1149 | unsigned cost = address_cost (addr, mem_mode, as, speed); | |
1150 | ||
1151 | /* Addressing mode "base + index << scale". */ | |
1152 | parts.step = wide_int_to_tree (sizetype, fact); | |
1153 | addr = addr_for_mem_ref (&parts, as, false); | |
1154 | unsigned new_cost = address_cost (addr, mem_mode, as, speed); | |
1155 | ||
1156 | /* Compare the cost of an address with an unscaled index with | |
1157 | a scaled index and return factor if useful. */ | |
1158 | if (new_cost < cost) | |
1159 | return GET_MODE_UNIT_SIZE (mem_mode); | |
1160 | return 1; | |
1161 | } | |
1162 | ||
aed164c3 | 1163 | /* Dump PARTS to FILE. */ |
1164 | ||
1165 | extern void dump_mem_address (FILE *, struct mem_address *); | |
1166 | void | |
1167 | dump_mem_address (FILE *file, struct mem_address *parts) | |
1168 | { | |
1169 | if (parts->symbol) | |
1170 | { | |
1171 | fprintf (file, "symbol: "); | |
e077c79b | 1172 | print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM); |
aed164c3 | 1173 | fprintf (file, "\n"); |
1174 | } | |
1175 | if (parts->base) | |
1176 | { | |
1177 | fprintf (file, "base: "); | |
1178 | print_generic_expr (file, parts->base, TDF_SLIM); | |
1179 | fprintf (file, "\n"); | |
1180 | } | |
1181 | if (parts->index) | |
1182 | { | |
1183 | fprintf (file, "index: "); | |
1184 | print_generic_expr (file, parts->index, TDF_SLIM); | |
1185 | fprintf (file, "\n"); | |
1186 | } | |
1187 | if (parts->step) | |
1188 | { | |
1189 | fprintf (file, "step: "); | |
1190 | print_generic_expr (file, parts->step, TDF_SLIM); | |
1191 | fprintf (file, "\n"); | |
1192 | } | |
1193 | if (parts->offset) | |
1194 | { | |
1195 | fprintf (file, "offset: "); | |
1196 | print_generic_expr (file, parts->offset, TDF_SLIM); | |
1197 | fprintf (file, "\n"); | |
1198 | } | |
1199 | } | |
1200 | ||
1201 | #include "gt-tree-ssa-address.h" |