]> git.ipfire.org Git - thirdparty/gcc.git/blame - gcc/tree-ssa-address.c
PR fortran/95090 - ICE: identifier overflow
[thirdparty/gcc.git] / gcc / tree-ssa-address.c
CommitLineData
ac182688 1/* Memory address lowering and addressing mode selection.
8d9254fc 2 Copyright (C) 2004-2020 Free Software Foundation, Inc.
b8698a0f 3
ac182688 4This file is part of GCC.
b8698a0f 5
ac182688
ZD
6GCC is free software; you can redistribute it and/or modify it
7under the terms of the GNU General Public License as published by the
9dcd6f09 8Free Software Foundation; either version 3, or (at your option) any
ac182688 9later version.
b8698a0f 10
ac182688
ZD
11GCC is distributed in the hope that it will be useful, but WITHOUT
12ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14for more details.
b8698a0f 15
ac182688 16You should have received a copy of the GNU General Public License
9dcd6f09
NC
17along with GCC; see the file COPYING3. If not see
18<http://www.gnu.org/licenses/>. */
ac182688
ZD
19
20/* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
22
23#include "config.h"
24#include "system.h"
25#include "coretypes.h"
c7131fb2 26#include "backend.h"
957060b5
AM
27#include "target.h"
28#include "rtl.h"
ac182688 29#include "tree.h"
c7131fb2 30#include "gimple.h"
3df50a94 31#include "memmodel.h"
957060b5 32#include "stringpool.h"
f90aa46c 33#include "tree-vrp.h"
957060b5
AM
34#include "tree-ssanames.h"
35#include "expmed.h"
36#include "insn-config.h"
3df50a94 37#include "emit-rtl.h"
957060b5
AM
38#include "recog.h"
39#include "tree-pretty-print.h"
40e23961 40#include "fold-const.h"
d8a2d370 41#include "stor-layout.h"
18f429e2
AM
42#include "gimple-iterator.h"
43#include "gimplify-me.h"
e28030cf 44#include "tree-ssa-loop-ivopts.h"
d8a2d370 45#include "expr.h"
442b4905 46#include "tree-dfa.h"
7ee2468b 47#include "dumpfile.h"
40013784 48#include "tree-affine.h"
7a89b97a 49#include "gimplify.h"
40013784
SB
50
51/* FIXME: We compute address costs using RTL. */
c1bf2a39 52#include "tree-ssa-address.h"
ac182688
ZD
53
54/* TODO -- handling of symbols (according to Richard Hendersons
55 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
b8698a0f 56
ac182688
ZD
57 There are at least 5 different kinds of symbols that we can run up against:
58
59 (1) binds_local_p, small data area.
60 (2) binds_local_p, eg local statics
61 (3) !binds_local_p, eg global variables
62 (4) thread local, local_exec
63 (5) thread local, !local_exec
64
65 Now, (1) won't appear often in an array context, but it certainly can.
66 All you have to do is set -GN high enough, or explicitly mark any
67 random object __attribute__((section (".sdata"))).
68
69 All of these affect whether or not a symbol is in fact a valid address.
70 The only one tested here is (3). And that result may very well
71 be incorrect for (4) or (5).
72
73 An incorrect result here does not cause incorrect results out the
74 back end, because the expander in expr.c validizes the address. However
75 it would be nice to improve the handling here in order to produce more
76 precise results. */
77
78/* A "template" for memory address, used to determine whether the address is
79 valid for mode. */
80
a79683d5 81struct GTY (()) mem_addr_template {
ac182688
ZD
82 rtx ref; /* The template. */
83 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
84 filled in. */
85 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
86 be filled in. */
a79683d5 87};
ac182688 88
ac182688 89
d4ebfa65
BE
90/* The templates. Each of the low five bits of the index corresponds to one
91 component of TARGET_MEM_REF being present, while the high bits identify
92 the address space. See TEMPL_IDX. */
ac182688 93
9771b263 94static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
d4ebfa65
BE
95
96#define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
97 (((int) (AS) << 5) \
98 | ((SYMBOL != 0) << 4) \
ac182688
ZD
99 | ((BASE != 0) << 3) \
100 | ((INDEX != 0) << 2) \
101 | ((STEP != 0) << 1) \
102 | (OFFSET != 0))
103
104/* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
d4ebfa65
BE
105 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
106 to where step is placed to *STEP_P and offset to *OFFSET_P. */
ac182688
ZD
107
108static void
ef4bddc2 109gen_addr_rtx (machine_mode address_mode,
d4ebfa65 110 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
ac182688
ZD
111 rtx *addr, rtx **step_p, rtx **offset_p)
112{
113 rtx act_elem;
114
115 *addr = NULL_RTX;
116 if (step_p)
117 *step_p = NULL;
118 if (offset_p)
119 *offset_p = NULL;
120
c0d4fec7 121 if (index && index != const0_rtx)
ac182688
ZD
122 {
123 act_elem = index;
124 if (step)
125 {
d4ebfa65 126 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
ac182688
ZD
127
128 if (step_p)
129 *step_p = &XEXP (act_elem, 1);
130 }
131
132 *addr = act_elem;
133 }
134
35979cc2 135 if (base && base != const0_rtx)
ac182688
ZD
136 {
137 if (*addr)
d4ebfa65 138 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
ac182688
ZD
139 else
140 *addr = base;
141 }
142
143 if (symbol)
144 {
145 act_elem = symbol;
146 if (offset)
147 {
d4ebfa65 148 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
8893239d 149
ac182688 150 if (offset_p)
8893239d
RH
151 *offset_p = &XEXP (act_elem, 1);
152
153 if (GET_CODE (symbol) == SYMBOL_REF
154 || GET_CODE (symbol) == LABEL_REF
155 || GET_CODE (symbol) == CONST)
d4ebfa65 156 act_elem = gen_rtx_CONST (address_mode, act_elem);
ac182688
ZD
157 }
158
159 if (*addr)
d4ebfa65 160 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
ac182688
ZD
161 else
162 *addr = act_elem;
163 }
164 else if (offset)
165 {
166 if (*addr)
167 {
d4ebfa65 168 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
ac182688
ZD
169 if (offset_p)
170 *offset_p = &XEXP (*addr, 1);
171 }
172 else
173 {
174 *addr = offset;
175 if (offset_p)
176 *offset_p = addr;
177 }
178 }
179
180 if (!*addr)
181 *addr = const0_rtx;
182}
183
d4ebfa65
BE
184/* Returns address for TARGET_MEM_REF with parameters given by ADDR
185 in address space AS.
b8698a0f 186 If REALLY_EXPAND is false, just make fake registers instead
ac182688
ZD
187 of really expanding the operands, and perform the expansion in-place
188 by using one of the "templates". */
189
190rtx
d4ebfa65
BE
191addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
192 bool really_expand)
ac182688 193{
095a2d76
RS
194 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
195 scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
ac182688 196 rtx address, sym, bse, idx, st, off;
ac182688
ZD
197 struct mem_addr_template *templ;
198
199 if (addr->step && !integer_onep (addr->step))
8e6cdc90 200 st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode);
ac182688
ZD
201 else
202 st = NULL_RTX;
203
204 if (addr->offset && !integer_zerop (addr->offset))
807e902e 205 {
36fd6408
RS
206 poly_offset_int dc
207 = poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED);
807e902e
KZ
208 off = immed_wide_int_const (dc, pointer_mode);
209 }
ac182688
ZD
210 else
211 off = NULL_RTX;
212
213 if (!really_expand)
214 {
d4ebfa65
BE
215 unsigned int templ_index
216 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
217
9771b263
DN
218 if (templ_index >= vec_safe_length (mem_addr_template_list))
219 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
d4ebfa65 220
ac182688 221 /* Reuse the templates for addresses, so that we do not waste memory. */
9771b263 222 templ = &(*mem_addr_template_list)[templ_index];
d4ebfa65 223 if (!templ->ref)
ac182688 224 {
d4ebfa65 225 sym = (addr->symbol ?
a369b639 226 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
d4ebfa65
BE
227 : NULL_RTX);
228 bse = (addr->base ?
a369b639 229 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
d4ebfa65
BE
230 : NULL_RTX);
231 idx = (addr->index ?
a369b639 232 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
d4ebfa65
BE
233 : NULL_RTX);
234
a369b639 235 gen_addr_rtx (pointer_mode, sym, bse, idx,
d4ebfa65
BE
236 st? const0_rtx : NULL_RTX,
237 off? const0_rtx : NULL_RTX,
238 &templ->ref,
239 &templ->step_p,
240 &templ->off_p);
ac182688
ZD
241 }
242
ac182688
ZD
243 if (st)
244 *templ->step_p = st;
245 if (off)
246 *templ->off_p = off;
247
248 return templ->ref;
249 }
250
251 /* Otherwise really expand the expressions. */
252 sym = (addr->symbol
a369b639 253 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
ac182688
ZD
254 : NULL_RTX);
255 bse = (addr->base
a369b639 256 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
ac182688
ZD
257 : NULL_RTX);
258 idx = (addr->index
a369b639 259 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
ac182688
ZD
260 : NULL_RTX);
261
f0593c53
JL
262 /* addr->base could be an SSA_NAME that was set to a constant value. The
263 call to expand_expr may expose that constant. If so, fold the value
264 into OFF and clear BSE. Otherwise we may later try to pull a mode from
265 BSE to generate a REG, which won't work with constants because they
266 are modeless. */
267 if (bse && GET_CODE (bse) == CONST_INT)
268 {
269 if (off)
270 off = simplify_gen_binary (PLUS, pointer_mode, bse, off);
271 else
272 off = bse;
273 gcc_assert (GET_CODE (off) == CONST_INT);
274 bse = NULL_RTX;
275 }
a369b639
L
276 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
277 if (pointer_mode != address_mode)
278 address = convert_memory_address (address_mode, address);
ac182688
ZD
279 return address;
280}
281
c1bf2a39
AM
282/* implement addr_for_mem_ref() directly from a tree, which avoids exporting
283 the mem_address structure. */
284
285rtx
286addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
287{
288 struct mem_address addr;
289 get_address_description (exp, &addr);
290 return addr_for_mem_ref (&addr, as, really_expand);
291}
292
ac182688
ZD
293/* Returns address of MEM_REF in TYPE. */
294
295tree
296tree_mem_ref_addr (tree type, tree mem_ref)
297{
820410e0 298 tree addr;
ac182688
ZD
299 tree act_elem;
300 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
820410e0 301 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
ac182688 302
4d948885 303 addr_base = fold_convert (type, TMR_BASE (mem_ref));
ac182688 304
820410e0 305 act_elem = TMR_INDEX (mem_ref);
ac182688
ZD
306 if (act_elem)
307 {
820410e0 308 if (step)
0d82a1c8
RG
309 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
310 act_elem, step);
820410e0 311 addr_off = act_elem;
ac182688
ZD
312 }
313
4d948885 314 act_elem = TMR_INDEX2 (mem_ref);
ac182688
ZD
315 if (act_elem)
316 {
820410e0 317 if (addr_off)
0d82a1c8
RG
318 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
319 addr_off, act_elem);
ac182688 320 else
820410e0 321 addr_off = act_elem;
ac182688
ZD
322 }
323
6e682d7e 324 if (offset && !integer_zerop (offset))
ac182688 325 {
820410e0 326 if (addr_off)
0d82a1c8
RG
327 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
328 fold_convert (TREE_TYPE (addr_off), offset));
ac182688 329 else
820410e0 330 addr_off = offset;
ac182688
ZD
331 }
332
820410e0 333 if (addr_off)
5d49b6a7 334 addr = fold_build_pointer_plus (addr_base, addr_off);
820410e0 335 else
4d948885 336 addr = addr_base;
ac182688
ZD
337
338 return addr;
339}
340
341/* Returns true if a memory reference in MODE and with parameters given by
342 ADDR is valid on the current target. */
343
c2b64cea 344bool
ef4bddc2 345valid_mem_ref_p (machine_mode mode, addr_space_t as,
09e881c9 346 struct mem_address *addr)
ac182688
ZD
347{
348 rtx address;
349
d4ebfa65 350 address = addr_for_mem_ref (addr, as, false);
ac182688
ZD
351 if (!address)
352 return false;
353
09e881c9 354 return memory_address_addr_space_p (mode, address, as);
ac182688
ZD
355}
356
357/* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
358 is valid on the current target and if so, creates and returns the
863a7578 359 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
ac182688
ZD
360
361static tree
863a7578
RB
362create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
363 bool verify)
ac182688 364{
4d948885
RG
365 tree base, index2;
366
863a7578
RB
367 if (verify
368 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
ac182688
ZD
369 return NULL_TREE;
370
371 if (addr->step && integer_onep (addr->step))
372 addr->step = NULL_TREE;
373
4b228e61
RG
374 if (addr->offset)
375 addr->offset = fold_convert (alias_ptr_type, addr->offset);
376 else
377 addr->offset = build_int_cst (alias_ptr_type, 0);
ac182688 378
4d948885 379 if (addr->symbol)
a41e5e86 380 {
4d948885
RG
381 base = addr->symbol;
382 index2 = addr->base;
383 }
384 else if (addr->base
385 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
386 {
387 base = addr->base;
388 index2 = NULL_TREE;
a41e5e86 389 }
4d948885
RG
390 else
391 {
f0ebde5a 392 base = build_int_cst (build_pointer_type (type), 0);
4d948885
RG
393 index2 = addr->base;
394 }
395
ac8e1875
RG
396 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
397 ??? As IVOPTs does not follow restrictions to where the base
398 pointer may point to create a MEM_REF only if we know that
399 base is valid. */
35979cc2 400 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
4d948885
RG
401 && (!index2 || integer_zerop (index2))
402 && (!addr->index || integer_zerop (addr->index)))
403 return fold_build2 (MEM_REF, type, base, addr->offset);
a41e5e86 404
4b228e61 405 return build5 (TARGET_MEM_REF, type,
4d948885 406 base, addr->offset, addr->index, addr->step, index2);
ac182688
ZD
407}
408
409/* Returns true if OBJ is an object whose address is a link time constant. */
410
411static bool
412fixed_address_object_p (tree obj)
413{
8813a647
JJ
414 return (VAR_P (obj)
415 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
8c51effa 416 && ! DECL_DLLIMPORT_P (obj));
ac182688
ZD
417}
418
820410e0
ZD
419/* If ADDR contains an address of object that is a link time constant,
420 move it to PARTS->symbol. */
ac182688 421
c2b64cea 422void
820410e0 423move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
ac182688 424{
820410e0
ZD
425 unsigned i;
426 tree val = NULL_TREE;
73f30c63 427
820410e0 428 for (i = 0; i < addr->n; i++)
ac182688 429 {
807e902e 430 if (addr->elts[i].coef != 1)
820410e0
ZD
431 continue;
432
433 val = addr->elts[i].val;
434 if (TREE_CODE (val) == ADDR_EXPR
435 && fixed_address_object_p (TREE_OPERAND (val, 0)))
436 break;
ac182688
ZD
437 }
438
820410e0
ZD
439 if (i == addr->n)
440 return;
441
23a534a1 442 parts->symbol = val;
820410e0
ZD
443 aff_combination_remove_elt (addr, i);
444}
445
7a89b97a
BC
446/* Return true if ADDR contains an instance of BASE_HINT and it's moved to
447 PARTS->base. */
d7c0c068 448
7a89b97a 449static bool
d7c0c068
UW
450move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
451 aff_tree *addr)
452{
453 unsigned i;
454 tree val = NULL_TREE;
5456cefc 455 int qual;
d7c0c068
UW
456
457 for (i = 0; i < addr->n; i++)
458 {
807e902e 459 if (addr->elts[i].coef != 1)
d7c0c068
UW
460 continue;
461
462 val = addr->elts[i].val;
463 if (operand_equal_p (val, base_hint, 0))
464 break;
465 }
466
467 if (i == addr->n)
7a89b97a 468 return false;
d7c0c068 469
5456cefc
UW
470 /* Cast value to appropriate pointer type. We cannot use a pointer
471 to TYPE directly, as the back-end will assume registers of pointer
472 type are aligned, and just the base itself may not actually be.
473 We use void pointer to the type's address space instead. */
474 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
475 type = build_qualified_type (void_type_node, qual);
d7c0c068
UW
476 parts->base = fold_convert (build_pointer_type (type), val);
477 aff_combination_remove_elt (addr, i);
7a89b97a 478 return true;
d7c0c068
UW
479}
480
820410e0
ZD
481/* If ADDR contains an address of a dereferenced pointer, move it to
482 PARTS->base. */
483
484static void
485move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
486{
487 unsigned i;
488 tree val = NULL_TREE;
489
490 for (i = 0; i < addr->n; i++)
ac182688 491 {
807e902e 492 if (addr->elts[i].coef != 1)
820410e0
ZD
493 continue;
494
495 val = addr->elts[i].val;
496 if (POINTER_TYPE_P (TREE_TYPE (val)))
497 break;
ac182688
ZD
498 }
499
820410e0
ZD
500 if (i == addr->n)
501 return;
502
503 parts->base = val;
504 aff_combination_remove_elt (addr, i);
505}
506
880a1451
XDL
507/* Moves the loop variant part V in linear address ADDR to be the index
508 of PARTS. */
509
510static void
511move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
512{
513 unsigned i;
514 tree val = NULL_TREE;
515
516 gcc_assert (!parts->index);
517 for (i = 0; i < addr->n; i++)
518 {
519 val = addr->elts[i].val;
520 if (operand_equal_p (val, v, 0))
521 break;
522 }
523
524 if (i == addr->n)
525 return;
526
527 parts->index = fold_convert (sizetype, val);
807e902e 528 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
880a1451
XDL
529 aff_combination_remove_elt (addr, i);
530}
531
820410e0
ZD
532/* Adds ELT to PARTS. */
533
534static void
535add_to_parts (struct mem_address *parts, tree elt)
536{
537 tree type;
538
ac182688
ZD
539 if (!parts->index)
540 {
5be014d5 541 parts->index = fold_convert (sizetype, elt);
ac182688
ZD
542 return;
543 }
544
820410e0
ZD
545 if (!parts->base)
546 {
547 parts->base = elt;
548 return;
549 }
550
ac182688 551 /* Add ELT to base. */
820410e0 552 type = TREE_TYPE (parts->base);
6fe2f65a 553 if (POINTER_TYPE_P (type))
5d49b6a7 554 parts->base = fold_build_pointer_plus (parts->base, elt);
6fe2f65a 555 else
7a89b97a 556 parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
ac182688
ZD
557}
558
3df50a94
BC
559/* Returns true if multiplying by RATIO is allowed in an address. Test the
560 validity for a memory reference accessing memory of mode MODE in address
561 space AS. */
562
563static bool
564multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
565 addr_space_t as)
566{
567#define MAX_RATIO 128
568 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
569 static vec<sbitmap> valid_mult_list;
570 sbitmap valid_mult;
571
572 if (data_index >= valid_mult_list.length ())
573 valid_mult_list.safe_grow_cleared (data_index + 1);
574
575 valid_mult = valid_mult_list[data_index];
576 if (!valid_mult)
577 {
578 machine_mode address_mode = targetm.addr_space.address_mode (as);
579 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
580 rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
581 rtx addr, scaled;
582 HOST_WIDE_INT i;
583
584 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
585 bitmap_clear (valid_mult);
586 scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
587 addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
588 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
589 {
590 XEXP (scaled, 1) = gen_int_mode (i, address_mode);
591 if (memory_address_addr_space_p (mode, addr, as)
592 || memory_address_addr_space_p (mode, scaled, as))
593 bitmap_set_bit (valid_mult, i + MAX_RATIO);
594 }
595
596 if (dump_file && (dump_flags & TDF_DETAILS))
597 {
598 fprintf (dump_file, " allowed multipliers:");
599 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
600 if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
601 fprintf (dump_file, " %d", (int) i);
602 fprintf (dump_file, "\n");
603 fprintf (dump_file, "\n");
604 }
605
606 valid_mult_list[data_index] = valid_mult;
607 }
608
609 if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
610 return false;
611
612 return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
613}
614
ac182688
ZD
615/* Finds the most expensive multiplication in ADDR that can be
616 expressed in an addressing mode and move the corresponding
820410e0 617 element(s) to PARTS. */
ac182688
ZD
618
619static void
d7c0c068
UW
620most_expensive_mult_to_index (tree type, struct mem_address *parts,
621 aff_tree *addr, bool speed)
ac182688 622{
d7c0c068 623 addr_space_t as = TYPE_ADDR_SPACE (type);
ef4bddc2 624 machine_mode address_mode = targetm.addr_space.address_mode (as);
73f30c63 625 HOST_WIDE_INT coef;
ac182688
ZD
626 unsigned best_mult_cost = 0, acost;
627 tree mult_elt = NULL_TREE, elt;
628 unsigned i, j;
73f30c63 629 enum tree_code op_code;
ac182688 630
807e902e 631 offset_int best_mult = 0;
ac182688
ZD
632 for (i = 0; i < addr->n; i++)
633 {
807e902e 634 if (!wi::fits_shwi_p (addr->elts[i].coef))
73f30c63
ZD
635 continue;
636
27bcd47c 637 coef = addr->elts[i].coef.to_shwi ();
73f30c63 638 if (coef == 1
d7c0c068 639 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
ac182688 640 continue;
73f30c63 641
6dd8f4bb 642 acost = mult_by_coeff_cost (coef, address_mode, speed);
ac182688
ZD
643
644 if (acost > best_mult_cost)
645 {
646 best_mult_cost = acost;
807e902e 647 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
ac182688
ZD
648 }
649 }
650
73f30c63 651 if (!best_mult_cost)
ac182688
ZD
652 return;
653
73f30c63 654 /* Collect elements multiplied by best_mult. */
ac182688
ZD
655 for (i = j = 0; i < addr->n; i++)
656 {
807e902e
KZ
657 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
658 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
b8698a0f 659
27bcd47c 660 if (amult == best_mult)
73f30c63 661 op_code = PLUS_EXPR;
27bcd47c 662 else if (amult_neg == best_mult)
73f30c63
ZD
663 op_code = MINUS_EXPR;
664 else
ac182688 665 {
ac182688
ZD
666 addr->elts[j] = addr->elts[i];
667 j++;
668 continue;
669 }
5be014d5 670
820410e0 671 elt = fold_convert (sizetype, addr->elts[i].val);
73f30c63 672 if (mult_elt)
820410e0 673 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
73f30c63 674 else if (op_code == PLUS_EXPR)
ac182688
ZD
675 mult_elt = elt;
676 else
820410e0 677 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
ac182688
ZD
678 }
679 addr->n = j;
b8698a0f 680
ac182688 681 parts->index = mult_elt;
807e902e 682 parts->step = wide_int_to_tree (sizetype, best_mult);
ac182688
ZD
683}
684
d7c0c068
UW
685/* Splits address ADDR for a memory access of type TYPE into PARTS.
686 If BASE_HINT is non-NULL, it specifies an SSA name to be used
880a1451 687 preferentially as base of the reference, and IV_CAND is the selected
7a89b97a
BC
688 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
689 part of address is split to PARTS.base.
d7c0c068 690
ac182688
ZD
691 TODO -- be more clever about the distribution of the elements of ADDR
692 to PARTS. Some architectures do not support anything but single
693 register in address, possibly with a small integer offset; while
694 create_mem_ref will simplify the address to an acceptable shape
73f30c63
ZD
695 later, it would be more efficient to know that asking for complicated
696 addressing modes is useless. */
ac182688
ZD
697
698static void
7a89b97a
BC
699addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
700 struct mem_address *parts, bool *var_in_base, bool speed)
ac182688 701{
73f30c63 702 tree part;
ac182688
ZD
703 unsigned i;
704
705 parts->symbol = NULL_TREE;
706 parts->base = NULL_TREE;
707 parts->index = NULL_TREE;
708 parts->step = NULL_TREE;
709
cc8bea09 710 if (maybe_ne (addr->offset, 0))
807e902e 711 parts->offset = wide_int_to_tree (sizetype, addr->offset);
ac182688
ZD
712 else
713 parts->offset = NULL_TREE;
714
820410e0
ZD
715 /* Try to find a symbol. */
716 move_fixed_address_to_symbol (parts, addr);
717
7a89b97a
BC
718 /* Since at the moment there is no reliable way to know how to
719 distinguish between pointer and its offset, we decide if var
720 part is the pointer based on guess. */
721 *var_in_base = (base_hint != NULL && parts->symbol == NULL);
722 if (*var_in_base)
723 *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
724 else
880a1451
XDL
725 move_variant_to_index (parts, addr, iv_cand);
726
7a89b97a 727 /* First move the most expensive feasible multiplication to index. */
880a1451
XDL
728 if (!parts->index)
729 most_expensive_mult_to_index (type, parts, addr, speed);
820410e0 730
7a89b97a 731 /* Move pointer into base. */
d7c0c068 732 if (!parts->symbol && !parts->base)
820410e0 733 move_pointer_to_base (parts, addr);
ac182688
ZD
734
735 /* Then try to process the remaining elements. */
736 for (i = 0; i < addr->n; i++)
73f30c63 737 {
820410e0 738 part = fold_convert (sizetype, addr->elts[i].val);
807e902e 739 if (addr->elts[i].coef != 1)
820410e0 740 part = fold_build2 (MULT_EXPR, sizetype, part,
807e902e 741 wide_int_to_tree (sizetype, addr->elts[i].coef));
820410e0 742 add_to_parts (parts, part);
73f30c63 743 }
ac182688 744 if (addr->rest)
820410e0 745 add_to_parts (parts, fold_convert (sizetype, addr->rest));
ac182688
ZD
746}
747
748/* Force the PARTS to register. */
749
750static void
726a989a 751gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
ac182688
ZD
752{
753 if (parts->base)
bcf71673
RG
754 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
755 is_gimple_mem_ref_addr, NULL_TREE,
726a989a 756 true, GSI_SAME_STMT);
ac182688 757 if (parts->index)
726a989a 758 parts->index = force_gimple_operand_gsi (gsi, parts->index,
c6540bde 759 true, NULL_TREE,
726a989a 760 true, GSI_SAME_STMT);
ac182688
ZD
761}
762
729f495a
RS
763/* Return true if the OFFSET in PARTS is the only thing that is making
764 it an invalid address for type TYPE. */
765
766static bool
767mem_ref_valid_without_offset_p (tree type, mem_address parts)
768{
769 if (!parts.base)
770 parts.base = parts.offset;
771 parts.offset = NULL_TREE;
772 return valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), &parts);
773}
774
775/* Fold PARTS->offset into PARTS->base, so that there is no longer
776 a separate offset. Emit any new instructions before GSI. */
777
778static void
779add_offset_to_base (gimple_stmt_iterator *gsi, mem_address *parts)
780{
781 tree tmp = parts->offset;
782 if (parts->base)
783 {
784 tmp = fold_build_pointer_plus (parts->base, tmp);
785 tmp = force_gimple_operand_gsi_1 (gsi, tmp, is_gimple_mem_ref_addr,
786 NULL_TREE, true, GSI_SAME_STMT);
787 }
788 parts->base = tmp;
789 parts->offset = NULL_TREE;
790}
791
ac182688 792/* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
726a989a 793 computations are emitted in front of GSI. TYPE is the mode
880a1451
XDL
794 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
795 and BASE_HINT is non NULL if IV_CAND comes from a base address
796 object. */
ac182688
ZD
797
798tree
880a1451
XDL
799create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
800 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
ac182688 801{
7a89b97a 802 bool var_in_base;
ac182688 803 tree mem_ref, tmp;
ac182688
ZD
804 struct mem_address parts;
805
7a89b97a 806 addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed);
726a989a 807 gimplify_mem_ref_parts (gsi, &parts);
863a7578 808 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
ac182688
ZD
809 if (mem_ref)
810 return mem_ref;
811
812 /* The expression is too complicated. Try making it simpler. */
813
7a89b97a
BC
814 /* Merge symbol into other parts. */
815 if (parts.symbol)
816 {
817 tmp = parts.symbol;
818 parts.symbol = NULL_TREE;
819 gcc_assert (is_gimple_val (tmp));
820
821 if (parts.base)
822 {
823 gcc_assert (useless_type_conversion_p (sizetype,
824 TREE_TYPE (parts.base)));
825
826 if (parts.index)
827 {
828 /* Add the symbol to base, eventually forcing it to register. */
829 tmp = fold_build_pointer_plus (tmp, parts.base);
830 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
831 is_gimple_mem_ref_addr,
832 NULL_TREE, true,
833 GSI_SAME_STMT);
834 }
835 else
836 {
837 /* Move base to index, then move the symbol to base. */
838 parts.index = parts.base;
839 }
840 parts.base = tmp;
841 }
842 else
843 parts.base = tmp;
844
845 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
846 if (mem_ref)
847 return mem_ref;
848 }
849
850 /* Move multiplication to index by transforming address expression:
851 [... + index << step + ...]
852 into:
853 index' = index << step;
854 [... + index' + ,,,]. */
ac182688
ZD
855 if (parts.step && !integer_onep (parts.step))
856 {
ac182688 857 gcc_assert (parts.index);
729f495a
RS
858 if (parts.offset && mem_ref_valid_without_offset_p (type, parts))
859 {
860 add_offset_to_base (gsi, &parts);
861 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
862 gcc_assert (mem_ref);
863 return mem_ref;
864 }
865
726a989a 866 parts.index = force_gimple_operand_gsi (gsi,
820410e0
ZD
867 fold_build2 (MULT_EXPR, sizetype,
868 parts.index, parts.step),
726a989a 869 true, NULL_TREE, true, GSI_SAME_STMT);
ac182688 870 parts.step = NULL_TREE;
b8698a0f 871
863a7578 872 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
ac182688
ZD
873 if (mem_ref)
874 return mem_ref;
875 }
876
7a89b97a
BC
877 /* Add offset to invariant part by transforming address expression:
878 [base + index + offset]
879 into:
880 base' = base + offset;
881 [base' + index]
882 or:
883 index' = index + offset;
884 [base + index']
885 depending on which one is invariant. */
886 if (parts.offset && !integer_zerop (parts.offset))
ac182688 887 {
7a89b97a
BC
888 tree old_base = unshare_expr (parts.base);
889 tree old_index = unshare_expr (parts.index);
890 tree old_offset = unshare_expr (parts.offset);
b8698a0f 891
7a89b97a
BC
892 tmp = parts.offset;
893 parts.offset = NULL_TREE;
894 /* Add offset to invariant part. */
895 if (!var_in_base)
39278c14 896 {
7a89b97a 897 if (parts.base)
69bd3423 898 {
7a89b97a
BC
899 tmp = fold_build_pointer_plus (parts.base, tmp);
900 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
901 is_gimple_mem_ref_addr,
902 NULL_TREE, true,
903 GSI_SAME_STMT);
69bd3423 904 }
7a89b97a
BC
905 parts.base = tmp;
906 }
907 else
908 {
909 if (parts.index)
39278c14 910 {
7a89b97a
BC
911 tmp = fold_build_pointer_plus (parts.index, tmp);
912 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
913 is_gimple_mem_ref_addr,
914 NULL_TREE, true,
915 GSI_SAME_STMT);
39278c14 916 }
7a89b97a 917 parts.index = tmp;
39278c14 918 }
ac182688 919
863a7578 920 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
ac182688
ZD
921 if (mem_ref)
922 return mem_ref;
7a89b97a
BC
923
924 /* Restore parts.base, index and offset so that we can check if
925 [base + offset] addressing mode is supported in next step.
926 This is necessary for targets only support [base + offset],
927 but not [base + index] addressing mode. */
928 parts.base = old_base;
929 parts.index = old_index;
930 parts.offset = old_offset;
ac182688
ZD
931 }
932
7a89b97a
BC
933 /* Transform [base + index + ...] into:
934 base' = base + index;
935 [base' + ...]. */
820410e0 936 if (parts.index)
ac182688 937 {
7a89b97a
BC
938 tmp = parts.index;
939 parts.index = NULL_TREE;
820410e0
ZD
940 /* Add index to base. */
941 if (parts.base)
942 {
7a89b97a
BC
943 tmp = fold_build_pointer_plus (parts.base, tmp);
944 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
945 is_gimple_mem_ref_addr,
946 NULL_TREE, true, GSI_SAME_STMT);
820410e0 947 }
7a89b97a 948 parts.base = tmp;
ac182688 949
863a7578 950 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
ac182688
ZD
951 if (mem_ref)
952 return mem_ref;
953 }
954
7a89b97a
BC
955 /* Transform [base + offset] into:
956 base' = base + offset;
957 [base']. */
ac182688
ZD
958 if (parts.offset && !integer_zerop (parts.offset))
959 {
729f495a 960 add_offset_to_base (gsi, &parts);
863a7578 961 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
ac182688
ZD
962 if (mem_ref)
963 return mem_ref;
964 }
965
966 /* Verify that the address is in the simplest possible shape
967 (only a register). If we cannot create such a memory reference,
968 something is really wrong. */
969 gcc_assert (parts.symbol == NULL_TREE);
820410e0 970 gcc_assert (parts.index == NULL_TREE);
ac182688
ZD
971 gcc_assert (!parts.step || integer_onep (parts.step));
972 gcc_assert (!parts.offset || integer_zerop (parts.offset));
973 gcc_unreachable ();
974}
975
976/* Copies components of the address from OP to ADDR. */
977
978void
979get_address_description (tree op, struct mem_address *addr)
980{
4d948885
RG
981 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
982 {
983 addr->symbol = TMR_BASE (op);
984 addr->base = TMR_INDEX2 (op);
985 }
986 else
987 {
988 addr->symbol = NULL_TREE;
989 if (TMR_INDEX2 (op))
990 {
991 gcc_assert (integer_zerop (TMR_BASE (op)));
992 addr->base = TMR_INDEX2 (op);
993 }
994 else
995 addr->base = TMR_BASE (op);
996 }
ac182688
ZD
997 addr->index = TMR_INDEX (op);
998 addr->step = TMR_STEP (op);
999 addr->offset = TMR_OFFSET (op);
1000}
1001
f0286f95
BS
1002/* Copies the reference information from OLD_REF to NEW_REF, where
1003 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
1004
1005void
1006copy_ref_info (tree new_ref, tree old_ref)
1007{
1008 tree new_ptr_base = NULL_TREE;
1009
1010 gcc_assert (TREE_CODE (new_ref) == MEM_REF
1011 || TREE_CODE (new_ref) == TARGET_MEM_REF);
1012
1013 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
1014 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
1015
1016 new_ptr_base = TREE_OPERAND (new_ref, 0);
1017
1018 /* We can transfer points-to information from an old pointer
1019 or decl base to the new one. */
1020 if (new_ptr_base
1021 && TREE_CODE (new_ptr_base) == SSA_NAME
1022 && !SSA_NAME_PTR_INFO (new_ptr_base))
1023 {
1024 tree base = get_base_address (old_ref);
1025 if (!base)
1026 ;
1027 else if ((TREE_CODE (base) == MEM_REF
1028 || TREE_CODE (base) == TARGET_MEM_REF)
1029 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
1030 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
1031 {
1032 struct ptr_info_def *new_pi;
644ffefd
MJ
1033 unsigned int align, misalign;
1034
f0286f95
BS
1035 duplicate_ssa_name_ptr_info
1036 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
1037 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
073a8998 1038 /* We have to be careful about transferring alignment information. */
644ffefd
MJ
1039 if (get_ptr_info_alignment (new_pi, &align, &misalign)
1040 && TREE_CODE (old_ref) == MEM_REF
f0286f95
BS
1041 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
1042 && (TMR_INDEX2 (new_ref)
f3976023
BC
1043 /* TODO: Below conditions can be relaxed if TMR_INDEX
1044 is an indcution variable and its initial value and
1045 step are aligned. */
1046 || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
f0286f95
BS
1047 || (TMR_STEP (new_ref)
1048 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
644ffefd 1049 < align)))))
f0286f95 1050 {
aca52e6f
RS
1051 poly_uint64 inc = (mem_ref_offset (old_ref)
1052 - mem_ref_offset (new_ref)).force_uhwi ();
644ffefd 1053 adjust_ptr_info_misalignment (new_pi, inc);
f0286f95
BS
1054 }
1055 else
644ffefd 1056 mark_ptr_info_alignment_unknown (new_pi);
f0286f95 1057 }
8813a647 1058 else if (VAR_P (base)
f0286f95
BS
1059 || TREE_CODE (base) == PARM_DECL
1060 || TREE_CODE (base) == RESULT_DECL)
1061 {
1062 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1063 pt_solution_set_var (&pi->pt, base);
1064 }
1065 }
1066}
1067
ac182688
ZD
1068/* Move constants in target_mem_ref REF to offset. Returns the new target
1069 mem ref if anything changes, NULL_TREE otherwise. */
1070
1071tree
1072maybe_fold_tmr (tree ref)
1073{
1074 struct mem_address addr;
1075 bool changed = false;
1fc1ef37 1076 tree new_ref, off;
ac182688
ZD
1077
1078 get_address_description (ref, &addr);
1079
4d948885
RG
1080 if (addr.base
1081 && TREE_CODE (addr.base) == INTEGER_CST
1082 && !integer_zerop (addr.base))
ac182688 1083 {
4b228e61
RG
1084 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1085 TREE_TYPE (addr.offset),
1086 addr.offset, addr.base);
ac182688
ZD
1087 addr.base = NULL_TREE;
1088 changed = true;
1089 }
1090
4d948885
RG
1091 if (addr.symbol
1092 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
1093 {
1094 addr.offset = fold_binary_to_constant
1095 (PLUS_EXPR, TREE_TYPE (addr.offset),
1096 addr.offset,
1097 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
1098 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
1099 changed = true;
1100 }
1101 else if (addr.symbol
1102 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
1103 {
a90c8804 1104 poly_int64 offset;
4d948885
RG
1105 addr.symbol = build_fold_addr_expr
1106 (get_addr_base_and_unit_offset
1107 (TREE_OPERAND (addr.symbol, 0), &offset));
1108 addr.offset = int_const_binop (PLUS_EXPR,
d35936ab 1109 addr.offset, size_int (offset));
4d948885
RG
1110 changed = true;
1111 }
1112
ac182688
ZD
1113 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1114 {
1115 off = addr.index;
1116 if (addr.step)
1117 {
820410e0 1118 off = fold_binary_to_constant (MULT_EXPR, sizetype,
ac182688
ZD
1119 off, addr.step);
1120 addr.step = NULL_TREE;
1121 }
1122
4b228e61
RG
1123 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1124 TREE_TYPE (addr.offset),
1125 addr.offset, off);
ac182688
ZD
1126 addr.index = NULL_TREE;
1127 changed = true;
1128 }
1129
1130 if (!changed)
1131 return NULL_TREE;
b8698a0f 1132
863a7578
RB
1133 /* If we have propagated something into this TARGET_MEM_REF and thus
1134 ended up folding it, always create a new TARGET_MEM_REF regardless
1135 if it is valid in this for on the target - the propagation result
1136 wouldn't be anyway. */
1fc1ef37
EB
1137 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1138 TREE_TYPE (addr.offset), &addr, false);
1139 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1140 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1141 return new_ref;
ac182688
ZD
1142}
1143
fa9863e7
KV
1144/* Return the preferred index scale factor for accessing memory of mode
1145 MEM_MODE in the address space of pointer BASE. Assume that we're
1146 optimizing for speed if SPEED is true and for size otherwise. */
1147unsigned int
1148preferred_mem_scale_factor (tree base, machine_mode mem_mode,
1149 bool speed)
1150{
89649081
KV
1151 /* For BLKmode, we can't do anything so return 1. */
1152 if (mem_mode == BLKmode)
1153 return 1;
1154
fa9863e7
KV
1155 struct mem_address parts = {};
1156 addr_space_t as = TYPE_ADDR_SPACE (TREE_TYPE (base));
1157 unsigned int fact = GET_MODE_UNIT_SIZE (mem_mode);
1158
1159 /* Addressing mode "base + index". */
1160 parts.index = integer_one_node;
1161 parts.base = integer_one_node;
1162 rtx addr = addr_for_mem_ref (&parts, as, false);
1163 unsigned cost = address_cost (addr, mem_mode, as, speed);
1164
1165 /* Addressing mode "base + index << scale". */
1166 parts.step = wide_int_to_tree (sizetype, fact);
1167 addr = addr_for_mem_ref (&parts, as, false);
1168 unsigned new_cost = address_cost (addr, mem_mode, as, speed);
1169
1170 /* Compare the cost of an address with an unscaled index with
1171 a scaled index and return factor if useful. */
1172 if (new_cost < cost)
1173 return GET_MODE_UNIT_SIZE (mem_mode);
1174 return 1;
1175}
1176
ac182688
ZD
1177/* Dump PARTS to FILE. */
1178
1179extern void dump_mem_address (FILE *, struct mem_address *);
1180void
1181dump_mem_address (FILE *file, struct mem_address *parts)
1182{
1183 if (parts->symbol)
1184 {
1185 fprintf (file, "symbol: ");
23a534a1 1186 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
ac182688
ZD
1187 fprintf (file, "\n");
1188 }
1189 if (parts->base)
1190 {
1191 fprintf (file, "base: ");
1192 print_generic_expr (file, parts->base, TDF_SLIM);
1193 fprintf (file, "\n");
1194 }
1195 if (parts->index)
1196 {
1197 fprintf (file, "index: ");
1198 print_generic_expr (file, parts->index, TDF_SLIM);
1199 fprintf (file, "\n");
1200 }
1201 if (parts->step)
1202 {
1203 fprintf (file, "step: ");
1204 print_generic_expr (file, parts->step, TDF_SLIM);
1205 fprintf (file, "\n");
1206 }
1207 if (parts->offset)
1208 {
1209 fprintf (file, "offset: ");
1210 print_generic_expr (file, parts->offset, TDF_SLIM);
1211 fprintf (file, "\n");
1212 }
1213}
1214
1215#include "gt-tree-ssa-address.h"