]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-address.c
[Ada] Revert change for gnatprove that is no longer needed
[thirdparty/gcc.git] / gcc / tree-ssa-address.c
CommitLineData
aed164c3 1/* Memory address lowering and addressing mode selection.
fbd26352 2 Copyright (C) 2004-2019 Free Software Foundation, Inc.
48e1416a 3
aed164c3 4This file is part of GCC.
48e1416a 5
aed164c3 6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
8c4c00c1 8Free Software Foundation; either version 3, or (at your option) any
aed164c3 9later version.
48e1416a 10
aed164c3 11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
48e1416a 15
aed164c3 16You should have received a copy of the GNU General Public License
8c4c00c1 17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
aed164c3 19
20/* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
9ef16211 26#include "backend.h"
7c29e30e 27#include "target.h"
28#include "rtl.h"
aed164c3 29#include "tree.h"
9ef16211 30#include "gimple.h"
c2f87792 31#include "memmodel.h"
7c29e30e 32#include "stringpool.h"
c296f633 33#include "tree-vrp.h"
7c29e30e 34#include "tree-ssanames.h"
35#include "expmed.h"
36#include "insn-config.h"
c2f87792 37#include "emit-rtl.h"
7c29e30e 38#include "recog.h"
39#include "tree-pretty-print.h"
b20a8bb4 40#include "fold-const.h"
9ed99284 41#include "stor-layout.h"
e795d6e1 42#include "gimple-iterator.h"
43#include "gimplify-me.h"
05d9c18a 44#include "tree-ssa-loop-ivopts.h"
9ed99284 45#include "expr.h"
073c1fd5 46#include "tree-dfa.h"
b9ed1410 47#include "dumpfile.h"
a7a46268 48#include "tree-affine.h"
9da9c22f 49#include "gimplify.h"
a7a46268 50
51/* FIXME: We compute address costs using RTL. */
64641360 52#include "tree-ssa-address.h"
aed164c3 53
54/* TODO -- handling of symbols (according to Richard Hendersons
55 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
48e1416a 56
aed164c3 57 There are at least 5 different kinds of symbols that we can run up against:
58
59 (1) binds_local_p, small data area.
60 (2) binds_local_p, eg local statics
61 (3) !binds_local_p, eg global variables
62 (4) thread local, local_exec
63 (5) thread local, !local_exec
64
65 Now, (1) won't appear often in an array context, but it certainly can.
66 All you have to do is set -GN high enough, or explicitly mark any
67 random object __attribute__((section (".sdata"))).
68
69 All of these affect whether or not a symbol is in fact a valid address.
70 The only one tested here is (3). And that result may very well
71 be incorrect for (4) or (5).
72
73 An incorrect result here does not cause incorrect results out the
74 back end, because the expander in expr.c validizes the address. However
75 it would be nice to improve the handling here in order to produce more
76 precise results. */
77
78/* A "template" for memory address, used to determine whether the address is
79 valid for mode. */
80
6dc50383 81struct GTY (()) mem_addr_template {
aed164c3 82 rtx ref; /* The template. */
83 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
84 filled in. */
85 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
86 be filled in. */
6dc50383 87};
aed164c3 88
aed164c3 89
98155838 90/* The templates. Each of the low five bits of the index corresponds to one
91 component of TARGET_MEM_REF being present, while the high bits identify
92 the address space. See TEMPL_IDX. */
aed164c3 93
f1f41a6c 94static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
98155838 95
96#define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
97 (((int) (AS) << 5) \
98 | ((SYMBOL != 0) << 4) \
aed164c3 99 | ((BASE != 0) << 3) \
100 | ((INDEX != 0) << 2) \
101 | ((STEP != 0) << 1) \
102 | (OFFSET != 0))
103
104/* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
98155838 105 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
106 to where step is placed to *STEP_P and offset to *OFFSET_P. */
aed164c3 107
108static void
3754d046 109gen_addr_rtx (machine_mode address_mode,
98155838 110 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
aed164c3 111 rtx *addr, rtx **step_p, rtx **offset_p)
112{
113 rtx act_elem;
114
115 *addr = NULL_RTX;
116 if (step_p)
117 *step_p = NULL;
118 if (offset_p)
119 *offset_p = NULL;
120
945a3e67 121 if (index && index != const0_rtx)
aed164c3 122 {
123 act_elem = index;
124 if (step)
125 {
98155838 126 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
aed164c3 127
128 if (step_p)
129 *step_p = &XEXP (act_elem, 1);
130 }
131
132 *addr = act_elem;
133 }
134
8525edb5 135 if (base && base != const0_rtx)
aed164c3 136 {
137 if (*addr)
98155838 138 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
aed164c3 139 else
140 *addr = base;
141 }
142
143 if (symbol)
144 {
145 act_elem = symbol;
146 if (offset)
147 {
98155838 148 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
9dda1f80 149
aed164c3 150 if (offset_p)
9dda1f80 151 *offset_p = &XEXP (act_elem, 1);
152
153 if (GET_CODE (symbol) == SYMBOL_REF
154 || GET_CODE (symbol) == LABEL_REF
155 || GET_CODE (symbol) == CONST)
98155838 156 act_elem = gen_rtx_CONST (address_mode, act_elem);
aed164c3 157 }
158
159 if (*addr)
98155838 160 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
aed164c3 161 else
162 *addr = act_elem;
163 }
164 else if (offset)
165 {
166 if (*addr)
167 {
98155838 168 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
aed164c3 169 if (offset_p)
170 *offset_p = &XEXP (*addr, 1);
171 }
172 else
173 {
174 *addr = offset;
175 if (offset_p)
176 *offset_p = addr;
177 }
178 }
179
180 if (!*addr)
181 *addr = const0_rtx;
182}
183
98155838 184/* Returns address for TARGET_MEM_REF with parameters given by ADDR
185 in address space AS.
48e1416a 186 If REALLY_EXPAND is false, just make fake registers instead
aed164c3 187 of really expanding the operands, and perform the expansion in-place
188 by using one of the "templates". */
189
190rtx
98155838 191addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
192 bool really_expand)
aed164c3 193{
f77c4496 194 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
195 scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
aed164c3 196 rtx address, sym, bse, idx, st, off;
aed164c3 197 struct mem_addr_template *templ;
198
199 if (addr->step && !integer_onep (addr->step))
e3d0f65c 200 st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode);
aed164c3 201 else
202 st = NULL_RTX;
203
204 if (addr->offset && !integer_zerop (addr->offset))
e913b5cd 205 {
8672ee56 206 poly_offset_int dc
207 = poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED);
e913b5cd 208 off = immed_wide_int_const (dc, pointer_mode);
209 }
aed164c3 210 else
211 off = NULL_RTX;
212
213 if (!really_expand)
214 {
98155838 215 unsigned int templ_index
216 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
217
f1f41a6c 218 if (templ_index >= vec_safe_length (mem_addr_template_list))
219 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
98155838 220
aed164c3 221 /* Reuse the templates for addresses, so that we do not waste memory. */
f1f41a6c 222 templ = &(*mem_addr_template_list)[templ_index];
98155838 223 if (!templ->ref)
aed164c3 224 {
98155838 225 sym = (addr->symbol ?
321583cb 226 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
98155838 227 : NULL_RTX);
228 bse = (addr->base ?
321583cb 229 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
98155838 230 : NULL_RTX);
231 idx = (addr->index ?
321583cb 232 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
98155838 233 : NULL_RTX);
234
321583cb 235 gen_addr_rtx (pointer_mode, sym, bse, idx,
98155838 236 st? const0_rtx : NULL_RTX,
237 off? const0_rtx : NULL_RTX,
238 &templ->ref,
239 &templ->step_p,
240 &templ->off_p);
aed164c3 241 }
242
aed164c3 243 if (st)
244 *templ->step_p = st;
245 if (off)
246 *templ->off_p = off;
247
248 return templ->ref;
249 }
250
251 /* Otherwise really expand the expressions. */
252 sym = (addr->symbol
321583cb 253 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
aed164c3 254 : NULL_RTX);
255 bse = (addr->base
321583cb 256 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
aed164c3 257 : NULL_RTX);
258 idx = (addr->index
321583cb 259 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
aed164c3 260 : NULL_RTX);
261
a0efaa5c 262 /* addr->base could be an SSA_NAME that was set to a constant value. The
263 call to expand_expr may expose that constant. If so, fold the value
264 into OFF and clear BSE. Otherwise we may later try to pull a mode from
265 BSE to generate a REG, which won't work with constants because they
266 are modeless. */
267 if (bse && GET_CODE (bse) == CONST_INT)
268 {
269 if (off)
270 off = simplify_gen_binary (PLUS, pointer_mode, bse, off);
271 else
272 off = bse;
273 gcc_assert (GET_CODE (off) == CONST_INT);
274 bse = NULL_RTX;
275 }
321583cb 276 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
277 if (pointer_mode != address_mode)
278 address = convert_memory_address (address_mode, address);
aed164c3 279 return address;
280}
281
64641360 282/* implement addr_for_mem_ref() directly from a tree, which avoids exporting
283 the mem_address structure. */
284
285rtx
286addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
287{
288 struct mem_address addr;
289 get_address_description (exp, &addr);
290 return addr_for_mem_ref (&addr, as, really_expand);
291}
292
aed164c3 293/* Returns address of MEM_REF in TYPE. */
294
295tree
296tree_mem_ref_addr (tree type, tree mem_ref)
297{
33c25cf0 298 tree addr;
aed164c3 299 tree act_elem;
300 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
33c25cf0 301 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
aed164c3 302
28daba6f 303 addr_base = fold_convert (type, TMR_BASE (mem_ref));
aed164c3 304
33c25cf0 305 act_elem = TMR_INDEX (mem_ref);
aed164c3 306 if (act_elem)
307 {
33c25cf0 308 if (step)
a0553bff 309 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
310 act_elem, step);
33c25cf0 311 addr_off = act_elem;
aed164c3 312 }
313
28daba6f 314 act_elem = TMR_INDEX2 (mem_ref);
aed164c3 315 if (act_elem)
316 {
33c25cf0 317 if (addr_off)
a0553bff 318 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
319 addr_off, act_elem);
aed164c3 320 else
33c25cf0 321 addr_off = act_elem;
aed164c3 322 }
323
cd743a11 324 if (offset && !integer_zerop (offset))
aed164c3 325 {
33c25cf0 326 if (addr_off)
a0553bff 327 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
328 fold_convert (TREE_TYPE (addr_off), offset));
aed164c3 329 else
33c25cf0 330 addr_off = offset;
aed164c3 331 }
332
33c25cf0 333 if (addr_off)
2cc66f2a 334 addr = fold_build_pointer_plus (addr_base, addr_off);
33c25cf0 335 else
28daba6f 336 addr = addr_base;
aed164c3 337
338 return addr;
339}
340
341/* Returns true if a memory reference in MODE and with parameters given by
342 ADDR is valid on the current target. */
343
4639f543 344bool
3754d046 345valid_mem_ref_p (machine_mode mode, addr_space_t as,
bd1a81f7 346 struct mem_address *addr)
aed164c3 347{
348 rtx address;
349
98155838 350 address = addr_for_mem_ref (addr, as, false);
aed164c3 351 if (!address)
352 return false;
353
bd1a81f7 354 return memory_address_addr_space_p (mode, address, as);
aed164c3 355}
356
357/* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
358 is valid on the current target and if so, creates and returns the
be4eb429 359 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
aed164c3 360
361static tree
be4eb429 362create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
363 bool verify)
aed164c3 364{
28daba6f 365 tree base, index2;
366
be4eb429 367 if (verify
368 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
aed164c3 369 return NULL_TREE;
370
371 if (addr->step && integer_onep (addr->step))
372 addr->step = NULL_TREE;
373
9a14ba4f 374 if (addr->offset)
375 addr->offset = fold_convert (alias_ptr_type, addr->offset);
376 else
377 addr->offset = build_int_cst (alias_ptr_type, 0);
aed164c3 378
28daba6f 379 if (addr->symbol)
9614eeff 380 {
28daba6f 381 base = addr->symbol;
382 index2 = addr->base;
383 }
384 else if (addr->base
385 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
386 {
387 base = addr->base;
388 index2 = NULL_TREE;
9614eeff 389 }
28daba6f 390 else
391 {
22cd4e5e 392 base = build_int_cst (build_pointer_type (type), 0);
28daba6f 393 index2 = addr->base;
394 }
395
a07f6629 396 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
397 ??? As IVOPTs does not follow restrictions to where the base
398 pointer may point to create a MEM_REF only if we know that
399 base is valid. */
8525edb5 400 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
28daba6f 401 && (!index2 || integer_zerop (index2))
402 && (!addr->index || integer_zerop (addr->index)))
403 return fold_build2 (MEM_REF, type, base, addr->offset);
9614eeff 404
9a14ba4f 405 return build5 (TARGET_MEM_REF, type,
28daba6f 406 base, addr->offset, addr->index, addr->step, index2);
aed164c3 407}
408
409/* Returns true if OBJ is an object whose address is a link time constant. */
410
411static bool
412fixed_address_object_p (tree obj)
413{
53e9c5c4 414 return (VAR_P (obj)
415 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
d8b498d3 416 && ! DECL_DLLIMPORT_P (obj));
aed164c3 417}
418
33c25cf0 419/* If ADDR contains an address of object that is a link time constant,
420 move it to PARTS->symbol. */
aed164c3 421
4639f543 422void
33c25cf0 423move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
aed164c3 424{
33c25cf0 425 unsigned i;
426 tree val = NULL_TREE;
d3610bea 427
33c25cf0 428 for (i = 0; i < addr->n; i++)
aed164c3 429 {
796b6678 430 if (addr->elts[i].coef != 1)
33c25cf0 431 continue;
432
433 val = addr->elts[i].val;
434 if (TREE_CODE (val) == ADDR_EXPR
435 && fixed_address_object_p (TREE_OPERAND (val, 0)))
436 break;
aed164c3 437 }
438
33c25cf0 439 if (i == addr->n)
440 return;
441
e077c79b 442 parts->symbol = val;
33c25cf0 443 aff_combination_remove_elt (addr, i);
444}
445
9da9c22f 446/* Return true if ADDR contains an instance of BASE_HINT and it's moved to
447 PARTS->base. */
2451b8b8 448
9da9c22f 449static bool
2451b8b8 450move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
451 aff_tree *addr)
452{
453 unsigned i;
454 tree val = NULL_TREE;
29f67cfe 455 int qual;
2451b8b8 456
457 for (i = 0; i < addr->n; i++)
458 {
796b6678 459 if (addr->elts[i].coef != 1)
2451b8b8 460 continue;
461
462 val = addr->elts[i].val;
463 if (operand_equal_p (val, base_hint, 0))
464 break;
465 }
466
467 if (i == addr->n)
9da9c22f 468 return false;
2451b8b8 469
29f67cfe 470 /* Cast value to appropriate pointer type. We cannot use a pointer
471 to TYPE directly, as the back-end will assume registers of pointer
472 type are aligned, and just the base itself may not actually be.
473 We use void pointer to the type's address space instead. */
474 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
475 type = build_qualified_type (void_type_node, qual);
2451b8b8 476 parts->base = fold_convert (build_pointer_type (type), val);
477 aff_combination_remove_elt (addr, i);
9da9c22f 478 return true;
2451b8b8 479}
480
33c25cf0 481/* If ADDR contains an address of a dereferenced pointer, move it to
482 PARTS->base. */
483
484static void
485move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
486{
487 unsigned i;
488 tree val = NULL_TREE;
489
490 for (i = 0; i < addr->n; i++)
aed164c3 491 {
796b6678 492 if (addr->elts[i].coef != 1)
33c25cf0 493 continue;
494
495 val = addr->elts[i].val;
496 if (POINTER_TYPE_P (TREE_TYPE (val)))
497 break;
aed164c3 498 }
499
33c25cf0 500 if (i == addr->n)
501 return;
502
503 parts->base = val;
504 aff_combination_remove_elt (addr, i);
505}
506
a7ae69c9 507/* Moves the loop variant part V in linear address ADDR to be the index
508 of PARTS. */
509
510static void
511move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
512{
513 unsigned i;
514 tree val = NULL_TREE;
515
516 gcc_assert (!parts->index);
517 for (i = 0; i < addr->n; i++)
518 {
519 val = addr->elts[i].val;
520 if (operand_equal_p (val, v, 0))
521 break;
522 }
523
524 if (i == addr->n)
525 return;
526
527 parts->index = fold_convert (sizetype, val);
e913b5cd 528 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
a7ae69c9 529 aff_combination_remove_elt (addr, i);
530}
531
33c25cf0 532/* Adds ELT to PARTS. */
533
534static void
535add_to_parts (struct mem_address *parts, tree elt)
536{
537 tree type;
538
aed164c3 539 if (!parts->index)
540 {
0de36bdb 541 parts->index = fold_convert (sizetype, elt);
aed164c3 542 return;
543 }
544
33c25cf0 545 if (!parts->base)
546 {
547 parts->base = elt;
548 return;
549 }
550
aed164c3 551 /* Add ELT to base. */
33c25cf0 552 type = TREE_TYPE (parts->base);
36f0904b 553 if (POINTER_TYPE_P (type))
2cc66f2a 554 parts->base = fold_build_pointer_plus (parts->base, elt);
36f0904b 555 else
9da9c22f 556 parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
aed164c3 557}
558
c2f87792 559/* Returns true if multiplying by RATIO is allowed in an address. Test the
560 validity for a memory reference accessing memory of mode MODE in address
561 space AS. */
562
563static bool
564multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
565 addr_space_t as)
566{
567#define MAX_RATIO 128
568 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
569 static vec<sbitmap> valid_mult_list;
570 sbitmap valid_mult;
571
572 if (data_index >= valid_mult_list.length ())
573 valid_mult_list.safe_grow_cleared (data_index + 1);
574
575 valid_mult = valid_mult_list[data_index];
576 if (!valid_mult)
577 {
578 machine_mode address_mode = targetm.addr_space.address_mode (as);
579 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
580 rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
581 rtx addr, scaled;
582 HOST_WIDE_INT i;
583
584 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
585 bitmap_clear (valid_mult);
586 scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
587 addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
588 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
589 {
590 XEXP (scaled, 1) = gen_int_mode (i, address_mode);
591 if (memory_address_addr_space_p (mode, addr, as)
592 || memory_address_addr_space_p (mode, scaled, as))
593 bitmap_set_bit (valid_mult, i + MAX_RATIO);
594 }
595
596 if (dump_file && (dump_flags & TDF_DETAILS))
597 {
598 fprintf (dump_file, " allowed multipliers:");
599 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
600 if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
601 fprintf (dump_file, " %d", (int) i);
602 fprintf (dump_file, "\n");
603 fprintf (dump_file, "\n");
604 }
605
606 valid_mult_list[data_index] = valid_mult;
607 }
608
609 if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
610 return false;
611
612 return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
613}
614
aed164c3 615/* Finds the most expensive multiplication in ADDR that can be
616 expressed in an addressing mode and move the corresponding
33c25cf0 617 element(s) to PARTS. */
aed164c3 618
619static void
2451b8b8 620most_expensive_mult_to_index (tree type, struct mem_address *parts,
621 aff_tree *addr, bool speed)
aed164c3 622{
2451b8b8 623 addr_space_t as = TYPE_ADDR_SPACE (type);
3754d046 624 machine_mode address_mode = targetm.addr_space.address_mode (as);
d3610bea 625 HOST_WIDE_INT coef;
aed164c3 626 unsigned best_mult_cost = 0, acost;
627 tree mult_elt = NULL_TREE, elt;
628 unsigned i, j;
d3610bea 629 enum tree_code op_code;
aed164c3 630
ab2c1de8 631 offset_int best_mult = 0;
aed164c3 632 for (i = 0; i < addr->n; i++)
633 {
796b6678 634 if (!wi::fits_shwi_p (addr->elts[i].coef))
d3610bea 635 continue;
636
cf8f0e63 637 coef = addr->elts[i].coef.to_shwi ();
d3610bea 638 if (coef == 1
2451b8b8 639 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
aed164c3 640 continue;
d3610bea 641
72655676 642 acost = mult_by_coeff_cost (coef, address_mode, speed);
aed164c3 643
644 if (acost > best_mult_cost)
645 {
646 best_mult_cost = acost;
5de9d3ed 647 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
aed164c3 648 }
649 }
650
d3610bea 651 if (!best_mult_cost)
aed164c3 652 return;
653
d3610bea 654 /* Collect elements multiplied by best_mult. */
aed164c3 655 for (i = j = 0; i < addr->n; i++)
656 {
ab2c1de8 657 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
658 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
48e1416a 659
cf8f0e63 660 if (amult == best_mult)
d3610bea 661 op_code = PLUS_EXPR;
cf8f0e63 662 else if (amult_neg == best_mult)
d3610bea 663 op_code = MINUS_EXPR;
664 else
aed164c3 665 {
aed164c3 666 addr->elts[j] = addr->elts[i];
667 j++;
668 continue;
669 }
0de36bdb 670
33c25cf0 671 elt = fold_convert (sizetype, addr->elts[i].val);
d3610bea 672 if (mult_elt)
33c25cf0 673 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
d3610bea 674 else if (op_code == PLUS_EXPR)
aed164c3 675 mult_elt = elt;
676 else
33c25cf0 677 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
aed164c3 678 }
679 addr->n = j;
48e1416a 680
aed164c3 681 parts->index = mult_elt;
e913b5cd 682 parts->step = wide_int_to_tree (sizetype, best_mult);
aed164c3 683}
684
2451b8b8 685/* Splits address ADDR for a memory access of type TYPE into PARTS.
686 If BASE_HINT is non-NULL, it specifies an SSA name to be used
a7ae69c9 687 preferentially as base of the reference, and IV_CAND is the selected
9da9c22f 688 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
689 part of address is split to PARTS.base.
2451b8b8 690
aed164c3 691 TODO -- be more clever about the distribution of the elements of ADDR
692 to PARTS. Some architectures do not support anything but single
693 register in address, possibly with a small integer offset; while
694 create_mem_ref will simplify the address to an acceptable shape
d3610bea 695 later, it would be more efficient to know that asking for complicated
696 addressing modes is useless. */
aed164c3 697
698static void
9da9c22f 699addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
700 struct mem_address *parts, bool *var_in_base, bool speed)
aed164c3 701{
d3610bea 702 tree part;
aed164c3 703 unsigned i;
704
705 parts->symbol = NULL_TREE;
706 parts->base = NULL_TREE;
707 parts->index = NULL_TREE;
708 parts->step = NULL_TREE;
709
1aeea61f 710 if (maybe_ne (addr->offset, 0))
e913b5cd 711 parts->offset = wide_int_to_tree (sizetype, addr->offset);
aed164c3 712 else
713 parts->offset = NULL_TREE;
714
33c25cf0 715 /* Try to find a symbol. */
716 move_fixed_address_to_symbol (parts, addr);
717
9da9c22f 718 /* Since at the moment there is no reliable way to know how to
719 distinguish between pointer and its offset, we decide if var
720 part is the pointer based on guess. */
721 *var_in_base = (base_hint != NULL && parts->symbol == NULL);
722 if (*var_in_base)
723 *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
724 else
a7ae69c9 725 move_variant_to_index (parts, addr, iv_cand);
726
9da9c22f 727 /* First move the most expensive feasible multiplication to index. */
a7ae69c9 728 if (!parts->index)
729 most_expensive_mult_to_index (type, parts, addr, speed);
33c25cf0 730
9da9c22f 731 /* Move pointer into base. */
2451b8b8 732 if (!parts->symbol && !parts->base)
33c25cf0 733 move_pointer_to_base (parts, addr);
aed164c3 734
735 /* Then try to process the remaining elements. */
736 for (i = 0; i < addr->n; i++)
d3610bea 737 {
33c25cf0 738 part = fold_convert (sizetype, addr->elts[i].val);
796b6678 739 if (addr->elts[i].coef != 1)
33c25cf0 740 part = fold_build2 (MULT_EXPR, sizetype, part,
e913b5cd 741 wide_int_to_tree (sizetype, addr->elts[i].coef));
33c25cf0 742 add_to_parts (parts, part);
d3610bea 743 }
aed164c3 744 if (addr->rest)
33c25cf0 745 add_to_parts (parts, fold_convert (sizetype, addr->rest));
aed164c3 746}
747
748/* Force the PARTS to register. */
749
750static void
75a70cf9 751gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
aed164c3 752{
753 if (parts->base)
c2acfe90 754 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
755 is_gimple_mem_ref_addr, NULL_TREE,
75a70cf9 756 true, GSI_SAME_STMT);
aed164c3 757 if (parts->index)
75a70cf9 758 parts->index = force_gimple_operand_gsi (gsi, parts->index,
0d734975 759 true, NULL_TREE,
75a70cf9 760 true, GSI_SAME_STMT);
aed164c3 761}
762
18283ee2 763/* Return true if the OFFSET in PARTS is the only thing that is making
764 it an invalid address for type TYPE. */
765
766static bool
767mem_ref_valid_without_offset_p (tree type, mem_address parts)
768{
769 if (!parts.base)
770 parts.base = parts.offset;
771 parts.offset = NULL_TREE;
772 return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), &parts);
773}
774
775/* Fold PARTS->offset into PARTS->base, so that there is no longer
776 a separate offset. Emit any new instructions before GSI. */
777
778static void
779add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts)
780{
781 tree tmp = parts->offset;
782 if (parts->base)
783 {
784 tmp = fold_build_pointer_plus (parts->base, tmp);
785 tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr,
786 NULL_TREE, true, GSI_SAME_STMT);
787 }
788 parts->base = tmp;
789 parts->offset = NULL_TREE;
790}
791
aed164c3 792/* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
75a70cf9 793 computations are emitted in front of GSI. TYPE is the mode
a7ae69c9 794 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
795 and BASE_HINT is non NULL if IV_CAND comes from a base address
796 object. */
aed164c3 797
798tree
a7ae69c9 799create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
800 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
aed164c3 801{
9da9c22f 802 bool var_in_base;
aed164c3 803 tree mem_ref, tmp;
aed164c3 804 struct mem_address parts;
805
9da9c22f 806 addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed);
75a70cf9 807 gimplify_mem_ref_parts (gsi, &parts);
be4eb429 808 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
aed164c3 809 if (mem_ref)
810 return mem_ref;
811
812 /* The expression is too complicated. Try making it simpler. */
813
9da9c22f 814 /* Merge symbol into other parts. */
815 if (parts.symbol)
816 {
817 tmp = parts.symbol;
818 parts.symbol = NULL_TREE;
819 gcc_assert (is_gimple_val (tmp));
820
821 if (parts.base)
822 {
823 gcc_assert (useless_type_conversion_p (sizetype,
824 TREE_TYPE (parts.base)));
825
826 if (parts.index)
827 {
828 /* Add the symbol to base, eventually forcing it to register. */
829 tmp = fold_build_pointer_plus (tmp, parts.base);
830 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
831 is_gimple_mem_ref_addr,
832 NULL_TREE, true,
833 GSI_SAME_STMT);
834 }
835 else
836 {
837 /* Move base to index, then move the symbol to base. */
838 parts.index = parts.base;
839 }
840 parts.base = tmp;
841 }
842 else
843 parts.base = tmp;
844
845 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
846 if (mem_ref)
847 return mem_ref;
848 }
849
850 /* Move multiplication to index by transforming address expression:
851 [... + index << step + ...]
852 into:
853 index' = index << step;
854 [... + index' + ,,,]. */
aed164c3 855 if (parts.step && !integer_onep (parts.step))
856 {
aed164c3 857 gcc_assert (parts.index);
18283ee2 858 if (parts.offset && mem_ref_valid_without_offset_p (type, parts))
859 {
860 add_offset_to_base (gsi, &parts);
861 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
862 gcc_assert (mem_ref);
863 return mem_ref;
864 }
865
75a70cf9 866 parts.index = force_gimple_operand_gsi (gsi,
33c25cf0 867 fold_build2 (MULT_EXPR, sizetype,
868 parts.index, parts.step),
75a70cf9 869 true, NULL_TREE, true, GSI_SAME_STMT);
aed164c3 870 parts.step = NULL_TREE;
48e1416a 871
be4eb429 872 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
aed164c3 873 if (mem_ref)
874 return mem_ref;
875 }
876
9da9c22f 877 /* Add offset to invariant part by transforming address expression:
878 [base + index + offset]
879 into:
880 base' = base + offset;
881 [base' + index]
882 or:
883 index' = index + offset;
884 [base + index']
885 depending on which one is invariant. */
886 if (parts.offset && !integer_zerop (parts.offset))
aed164c3 887 {
9da9c22f 888 tree old_base = unshare_expr (parts.base);
889 tree old_index = unshare_expr (parts.index);
890 tree old_offset = unshare_expr (parts.offset);
48e1416a 891
9da9c22f 892 tmp = parts.offset;
893 parts.offset = NULL_TREE;
894 /* Add offset to invariant part. */
895 if (!var_in_base)
7fce04b0 896 {
9da9c22f 897 if (parts.base)
5e22b9e0 898 {
9da9c22f 899 tmp = fold_build_pointer_plus (parts.base, tmp);
900 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
901 is_gimple_mem_ref_addr,
902 NULL_TREE, true,
903 GSI_SAME_STMT);
5e22b9e0 904 }
9da9c22f 905 parts.base = tmp;
906 }
907 else
908 {
909 if (parts.index)
7fce04b0 910 {
9da9c22f 911 tmp = fold_build_pointer_plus (parts.index, tmp);
912 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
913 is_gimple_mem_ref_addr,
914 NULL_TREE, true,
915 GSI_SAME_STMT);
7fce04b0 916 }
9da9c22f 917 parts.index = tmp;
7fce04b0 918 }
aed164c3 919
be4eb429 920 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
aed164c3 921 if (mem_ref)
922 return mem_ref;
9da9c22f 923
924 /* Restore parts.base, index and offset so that we can check if
925 [base + offset] addressing mode is supported in next step.
926 This is necessary for targets only support [base + offset],
927 but not [base + index] addressing mode. */
928 parts.base = old_base;
929 parts.index = old_index;
930 parts.offset = old_offset;
aed164c3 931 }
932
9da9c22f 933 /* Transform [base + index + ...] into:
934 base' = base + index;
935 [base' + ...]. */
33c25cf0 936 if (parts.index)
aed164c3 937 {
9da9c22f 938 tmp = parts.index;
939 parts.index = NULL_TREE;
33c25cf0 940 /* Add index to base. */
941 if (parts.base)
942 {
9da9c22f 943 tmp = fold_build_pointer_plus (parts.base, tmp);
944 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
945 is_gimple_mem_ref_addr,
946 NULL_TREE, true, GSI_SAME_STMT);
33c25cf0 947 }
9da9c22f 948 parts.base = tmp;
aed164c3 949
be4eb429 950 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
aed164c3 951 if (mem_ref)
952 return mem_ref;
953 }
954
9da9c22f 955 /* Transform [base + offset] into:
956 base' = base + offset;
957 [base']. */
aed164c3 958 if (parts.offset && !integer_zerop (parts.offset))
959 {
18283ee2 960 add_offset_to_base (gsi, &parts);
be4eb429 961 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
aed164c3 962 if (mem_ref)
963 return mem_ref;
964 }
965
966 /* Verify that the address is in the simplest possible shape
967 (only a register). If we cannot create such a memory reference,
968 something is really wrong. */
969 gcc_assert (parts.symbol == NULL_TREE);
33c25cf0 970 gcc_assert (parts.index == NULL_TREE);
aed164c3 971 gcc_assert (!parts.step || integer_onep (parts.step));
972 gcc_assert (!parts.offset || integer_zerop (parts.offset));
973 gcc_unreachable ();
974}
975
976/* Copies components of the address from OP to ADDR. */
977
978void
979get_address_description (tree op, struct mem_address *addr)
980{
28daba6f 981 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
982 {
983 addr->symbol = TMR_BASE (op);
984 addr->base = TMR_INDEX2 (op);
985 }
986 else
987 {
988 addr->symbol = NULL_TREE;
989 if (TMR_INDEX2 (op))
990 {
991 gcc_assert (integer_zerop (TMR_BASE (op)));
992 addr->base = TMR_INDEX2 (op);
993 }
994 else
995 addr->base = TMR_BASE (op);
996 }
aed164c3 997 addr->index = TMR_INDEX (op);
998 addr->step = TMR_STEP (op);
999 addr->offset = TMR_OFFSET (op);
1000}
1001
74fcb726 1002/* Copies the reference information from OLD_REF to NEW_REF, where
1003 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
1004
1005void
1006copy_ref_info (tree new_ref, tree old_ref)
1007{
1008 tree new_ptr_base = NULL_TREE;
1009
1010 gcc_assert (TREE_CODE (new_ref) == MEM_REF
1011 || TREE_CODE (new_ref) == TARGET_MEM_REF);
1012
1013 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
1014 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
1015
1016 new_ptr_base = TREE_OPERAND (new_ref, 0);
1017
1018 /* We can transfer points-to information from an old pointer
1019 or decl base to the new one. */
1020 if (new_ptr_base
1021 && TREE_CODE (new_ptr_base) == SSA_NAME
1022 && !SSA_NAME_PTR_INFO (new_ptr_base))
1023 {
1024 tree base = get_base_address (old_ref);
1025 if (!base)
1026 ;
1027 else if ((TREE_CODE (base) == MEM_REF
1028 || TREE_CODE (base) == TARGET_MEM_REF)
1029 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1030 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
1031 {
1032 struct ptr_info_def *new_pi;
ceea063b 1033 unsigned int align, misalign;
1034
74fcb726 1035 duplicate_ssa_name_ptr_info
1036 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
1037 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
9d75589a 1038 /* We have to be careful about transferring alignment information. */
ceea063b 1039 if (get_ptr_info_alignment (new_pi, &align, &misalign)
1040 && TREE_CODE (old_ref) == MEM_REF
74fcb726 1041 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
1042 && (TMR_INDEX2 (new_ref)
71c68976 1043 /* TODO: Below conditions can be relaxed if TMR_INDEX
1044 is an indcution variable and its initial value and
1045 step are aligned. */
1046 || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
74fcb726 1047 || (TMR_STEP (new_ref)
f9ae6f95 1048 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
ceea063b 1049 < align)))))
74fcb726 1050 {
90ca1268 1051 poly_uint64 inc = (mem_ref_offset (old_ref)
1052 - mem_ref_offset (new_ref)).force_uhwi ();
ceea063b 1053 adjust_ptr_info_misalignment (new_pi, inc);
74fcb726 1054 }
1055 else
ceea063b 1056 mark_ptr_info_alignment_unknown (new_pi);
74fcb726 1057 }
53e9c5c4 1058 else if (VAR_P (base)
74fcb726 1059 || TREE_CODE (base) == PARM_DECL
1060 || TREE_CODE (base) == RESULT_DECL)
1061 {
1062 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1063 pt_solution_set_var (&pi->pt, base);
1064 }
1065 }
1066}
1067
aed164c3 1068/* Move constants in target_mem_ref REF to offset. Returns the new target
1069 mem ref if anything changes, NULL_TREE otherwise. */
1070
1071tree
1072maybe_fold_tmr (tree ref)
1073{
1074 struct mem_address addr;
1075 bool changed = false;
6d7105fe 1076 tree new_ref, off;
aed164c3 1077
1078 get_address_description (ref, &addr);
1079
28daba6f 1080 if (addr.base
1081 && TREE_CODE (addr.base) == INTEGER_CST
1082 && !integer_zerop (addr.base))
aed164c3 1083 {
9a14ba4f 1084 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1085 TREE_TYPE (addr.offset),
1086 addr.offset, addr.base);
aed164c3 1087 addr.base = NULL_TREE;
1088 changed = true;
1089 }
1090
28daba6f 1091 if (addr.symbol
1092 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
1093 {
1094 addr.offset = fold_binary_to_constant
1095 (PLUS_EXPR, TREE_TYPE (addr.offset),
1096 addr.offset,
1097 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
1098 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
1099 changed = true;
1100 }
1101 else if (addr.symbol
1102 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
1103 {
773078cb 1104 poly_int64 offset;
28daba6f 1105 addr.symbol = build_fold_addr_expr
1106 (get_addr_base_and_unit_offset
1107 (TREE_OPERAND (addr.symbol, 0), &offset));
1108 addr.offset = int_const_binop (PLUS_EXPR,
317e2a67 1109 addr.offset, size_int (offset));
28daba6f 1110 changed = true;
1111 }
1112
aed164c3 1113 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1114 {
1115 off = addr.index;
1116 if (addr.step)
1117 {
33c25cf0 1118 off = fold_binary_to_constant (MULT_EXPR, sizetype,
aed164c3 1119 off, addr.step);
1120 addr.step = NULL_TREE;
1121 }
1122
9a14ba4f 1123 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1124 TREE_TYPE (addr.offset),
1125 addr.offset, off);
aed164c3 1126 addr.index = NULL_TREE;
1127 changed = true;
1128 }
1129
1130 if (!changed)
1131 return NULL_TREE;
48e1416a 1132
be4eb429 1133 /* If we have propagated something into this TARGET_MEM_REF and thus
1134 ended up folding it, always create a new TARGET_MEM_REF regardless
1135 if it is valid in this for on the target - the propagation result
1136 wouldn't be anyway. */
6d7105fe 1137 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1138 TREE_TYPE (addr.offset), &addr, false);
1139 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1140 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1141 return new_ref;
aed164c3 1142}
1143
217ad6d6 1144/* Return the preferred index scale factor for accessing memory of mode
1145 MEM_MODE in the address space of pointer BASE. Assume that we're
1146 optimizing for speed if SPEED is true and for size otherwise. */
1147unsigned int
1148preferred_mem_scale_factor (tree base, machine_mode mem_mode,
1149 bool speed)
1150{
c296b868 1151 /* For BLKmode, we can't do anything so return 1. */
1152 if (mem_mode == BLKmode)
1153 return 1;
1154
217ad6d6 1155 struct mem_address parts = {};
1156 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1157 unsigned int fact = GET_MODE_UNIT_SIZE (mem_mode);
1158
1159 /* Addressing mode "base + index". */
1160 parts.index = integer_one_node;
1161 parts.base = integer_one_node;
1162 rtx addr = addr_for_mem_ref (&parts, as, false);
1163 unsigned cost = address_cost (addr, mem_mode, as, speed);
1164
1165 /* Addressing mode "base + index << scale". */
1166 parts.step = wide_int_to_tree (sizetype, fact);
1167 addr = addr_for_mem_ref (&parts, as, false);
1168 unsigned new_cost = address_cost (addr, mem_mode, as, speed);
1169
1170 /* Compare the cost of an address with an unscaled index with
1171 a scaled index and return factor if useful. */
1172 if (new_cost < cost)
1173 return GET_MODE_UNIT_SIZE (mem_mode);
1174 return 1;
1175}
1176
aed164c3 1177/* Dump PARTS to FILE. */
1178
1179extern void dump_mem_address (FILE *, struct mem_address *);
1180void
1181dump_mem_address (FILE *file, struct mem_address *parts)
1182{
1183 if (parts->symbol)
1184 {
1185 fprintf (file, "symbol: ");
e077c79b 1186 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
aed164c3 1187 fprintf (file, "\n");
1188 }
1189 if (parts->base)
1190 {
1191 fprintf (file, "base: ");
1192 print_generic_expr (file, parts->base, TDF_SLIM);
1193 fprintf (file, "\n");
1194 }
1195 if (parts->index)
1196 {
1197 fprintf (file, "index: ");
1198 print_generic_expr (file, parts->index, TDF_SLIM);
1199 fprintf (file, "\n");
1200 }
1201 if (parts->step)
1202 {
1203 fprintf (file, "step: ");
1204 print_generic_expr (file, parts->step, TDF_SLIM);
1205 fprintf (file, "\n");
1206 }
1207 if (parts->offset)
1208 {
1209 fprintf (file, "offset: ");
1210 print_generic_expr (file, parts->offset, TDF_SLIM);
1211 fprintf (file, "\n");
1212 }
1213}
1214
1215#include "gt-tree-ssa-address.h"