]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-address.c
poly_int: MEM_REF offsets
[thirdparty/gcc.git] / gcc / tree-ssa-address.c
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "memmodel.h"
32 #include "stringpool.h"
33 #include "tree-vrp.h"
34 #include "tree-ssanames.h"
35 #include "expmed.h"
36 #include "insn-config.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "tree-pretty-print.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "tree-ssa-loop-ivopts.h"
45 #include "expr.h"
46 #include "tree-dfa.h"
47 #include "dumpfile.h"
48 #include "tree-affine.h"
49 #include "gimplify.h"
50
51 /* FIXME: We compute address costs using RTL. */
52 #include "tree-ssa-address.h"
53
54 /* TODO -- handling of symbols (according to Richard Hendersons
55 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
56
57 There are at least 5 different kinds of symbols that we can run up against:
58
59 (1) binds_local_p, small data area.
60 (2) binds_local_p, eg local statics
61 (3) !binds_local_p, eg global variables
62 (4) thread local, local_exec
63 (5) thread local, !local_exec
64
65 Now, (1) won't appear often in an array context, but it certainly can.
66 All you have to do is set -GN high enough, or explicitly mark any
67 random object __attribute__((section (".sdata"))).
68
69 All of these affect whether or not a symbol is in fact a valid address.
70 The only one tested here is (3). And that result may very well
71 be incorrect for (4) or (5).
72
73 An incorrect result here does not cause incorrect results out the
74 back end, because the expander in expr.c validizes the address. However
75 it would be nice to improve the handling here in order to produce more
76 precise results. */
77
78 /* A "template" for memory address, used to determine whether the address is
79 valid for mode. */
80
81 struct GTY (()) mem_addr_template {
82 rtx ref; /* The template. */
83 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
84 filled in. */
85 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
86 be filled in. */
87 };
88
89
90 /* The templates. Each of the low five bits of the index corresponds to one
91 component of TARGET_MEM_REF being present, while the high bits identify
92 the address space. See TEMPL_IDX. */
93
94 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
95
96 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
97 (((int) (AS) << 5) \
98 | ((SYMBOL != 0) << 4) \
99 | ((BASE != 0) << 3) \
100 | ((INDEX != 0) << 2) \
101 | ((STEP != 0) << 1) \
102 | (OFFSET != 0))
103
104 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
105 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
106 to where step is placed to *STEP_P and offset to *OFFSET_P. */
107
108 static void
109 gen_addr_rtx (machine_mode address_mode,
110 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
111 rtx *addr, rtx **step_p, rtx **offset_p)
112 {
113 rtx act_elem;
114
115 *addr = NULL_RTX;
116 if (step_p)
117 *step_p = NULL;
118 if (offset_p)
119 *offset_p = NULL;
120
121 if (index && index != const0_rtx)
122 {
123 act_elem = index;
124 if (step)
125 {
126 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
127
128 if (step_p)
129 *step_p = &XEXP (act_elem, 1);
130 }
131
132 *addr = act_elem;
133 }
134
135 if (base && base != const0_rtx)
136 {
137 if (*addr)
138 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
139 else
140 *addr = base;
141 }
142
143 if (symbol)
144 {
145 act_elem = symbol;
146 if (offset)
147 {
148 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
149
150 if (offset_p)
151 *offset_p = &XEXP (act_elem, 1);
152
153 if (GET_CODE (symbol) == SYMBOL_REF
154 || GET_CODE (symbol) == LABEL_REF
155 || GET_CODE (symbol) == CONST)
156 act_elem = gen_rtx_CONST (address_mode, act_elem);
157 }
158
159 if (*addr)
160 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
161 else
162 *addr = act_elem;
163 }
164 else if (offset)
165 {
166 if (*addr)
167 {
168 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
169 if (offset_p)
170 *offset_p = &XEXP (*addr, 1);
171 }
172 else
173 {
174 *addr = offset;
175 if (offset_p)
176 *offset_p = addr;
177 }
178 }
179
180 if (!*addr)
181 *addr = const0_rtx;
182 }
183
184 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
185 in address space AS.
186 If REALLY_EXPAND is false, just make fake registers instead
187 of really expanding the operands, and perform the expansion in-place
188 by using one of the "templates". */
189
190 rtx
191 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
192 bool really_expand)
193 {
194 scalar_int_mode address_mode = targetm.addr_space.address_mode (as);
195 scalar_int_mode pointer_mode = targetm.addr_space.pointer_mode (as);
196 rtx address, sym, bse, idx, st, off;
197 struct mem_addr_template *templ;
198
199 if (addr->step && !integer_onep (addr->step))
200 st = immed_wide_int_const (wi::to_wide (addr->step), pointer_mode);
201 else
202 st = NULL_RTX;
203
204 if (addr->offset && !integer_zerop (addr->offset))
205 {
206 poly_offset_int dc
207 = poly_offset_int::from (wi::to_poly_wide (addr->offset), SIGNED);
208 off = immed_wide_int_const (dc, pointer_mode);
209 }
210 else
211 off = NULL_RTX;
212
213 if (!really_expand)
214 {
215 unsigned int templ_index
216 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
217
218 if (templ_index >= vec_safe_length (mem_addr_template_list))
219 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
220
221 /* Reuse the templates for addresses, so that we do not waste memory. */
222 templ = &(*mem_addr_template_list)[templ_index];
223 if (!templ->ref)
224 {
225 sym = (addr->symbol ?
226 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
227 : NULL_RTX);
228 bse = (addr->base ?
229 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
230 : NULL_RTX);
231 idx = (addr->index ?
232 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
233 : NULL_RTX);
234
235 gen_addr_rtx (pointer_mode, sym, bse, idx,
236 st? const0_rtx : NULL_RTX,
237 off? const0_rtx : NULL_RTX,
238 &templ->ref,
239 &templ->step_p,
240 &templ->off_p);
241 }
242
243 if (st)
244 *templ->step_p = st;
245 if (off)
246 *templ->off_p = off;
247
248 return templ->ref;
249 }
250
251 /* Otherwise really expand the expressions. */
252 sym = (addr->symbol
253 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
254 : NULL_RTX);
255 bse = (addr->base
256 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
257 : NULL_RTX);
258 idx = (addr->index
259 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
260 : NULL_RTX);
261
262 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
263 if (pointer_mode != address_mode)
264 address = convert_memory_address (address_mode, address);
265 return address;
266 }
267
268 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
269 the mem_address structure. */
270
271 rtx
272 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
273 {
274 struct mem_address addr;
275 get_address_description (exp, &addr);
276 return addr_for_mem_ref (&addr, as, really_expand);
277 }
278
279 /* Returns address of MEM_REF in TYPE. */
280
281 tree
282 tree_mem_ref_addr (tree type, tree mem_ref)
283 {
284 tree addr;
285 tree act_elem;
286 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
287 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
288
289 addr_base = fold_convert (type, TMR_BASE (mem_ref));
290
291 act_elem = TMR_INDEX (mem_ref);
292 if (act_elem)
293 {
294 if (step)
295 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
296 act_elem, step);
297 addr_off = act_elem;
298 }
299
300 act_elem = TMR_INDEX2 (mem_ref);
301 if (act_elem)
302 {
303 if (addr_off)
304 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
305 addr_off, act_elem);
306 else
307 addr_off = act_elem;
308 }
309
310 if (offset && !integer_zerop (offset))
311 {
312 if (addr_off)
313 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
314 fold_convert (TREE_TYPE (addr_off), offset));
315 else
316 addr_off = offset;
317 }
318
319 if (addr_off)
320 addr = fold_build_pointer_plus (addr_base, addr_off);
321 else
322 addr = addr_base;
323
324 return addr;
325 }
326
327 /* Returns true if a memory reference in MODE and with parameters given by
328 ADDR is valid on the current target. */
329
330 bool
331 valid_mem_ref_p (machine_mode mode, addr_space_t as,
332 struct mem_address *addr)
333 {
334 rtx address;
335
336 address = addr_for_mem_ref (addr, as, false);
337 if (!address)
338 return false;
339
340 return memory_address_addr_space_p (mode, address, as);
341 }
342
343 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
344 is valid on the current target and if so, creates and returns the
345 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
346
347 static tree
348 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
349 bool verify)
350 {
351 tree base, index2;
352
353 if (verify
354 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
355 return NULL_TREE;
356
357 if (addr->step && integer_onep (addr->step))
358 addr->step = NULL_TREE;
359
360 if (addr->offset)
361 addr->offset = fold_convert (alias_ptr_type, addr->offset);
362 else
363 addr->offset = build_int_cst (alias_ptr_type, 0);
364
365 if (addr->symbol)
366 {
367 base = addr->symbol;
368 index2 = addr->base;
369 }
370 else if (addr->base
371 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
372 {
373 base = addr->base;
374 index2 = NULL_TREE;
375 }
376 else
377 {
378 base = build_int_cst (build_pointer_type (type), 0);
379 index2 = addr->base;
380 }
381
382 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
383 ??? As IVOPTs does not follow restrictions to where the base
384 pointer may point to create a MEM_REF only if we know that
385 base is valid. */
386 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
387 && (!index2 || integer_zerop (index2))
388 && (!addr->index || integer_zerop (addr->index)))
389 return fold_build2 (MEM_REF, type, base, addr->offset);
390
391 return build5 (TARGET_MEM_REF, type,
392 base, addr->offset, addr->index, addr->step, index2);
393 }
394
395 /* Returns true if OBJ is an object whose address is a link time constant. */
396
397 static bool
398 fixed_address_object_p (tree obj)
399 {
400 return (VAR_P (obj)
401 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
402 && ! DECL_DLLIMPORT_P (obj));
403 }
404
405 /* If ADDR contains an address of object that is a link time constant,
406 move it to PARTS->symbol. */
407
408 void
409 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
410 {
411 unsigned i;
412 tree val = NULL_TREE;
413
414 for (i = 0; i < addr->n; i++)
415 {
416 if (addr->elts[i].coef != 1)
417 continue;
418
419 val = addr->elts[i].val;
420 if (TREE_CODE (val) == ADDR_EXPR
421 && fixed_address_object_p (TREE_OPERAND (val, 0)))
422 break;
423 }
424
425 if (i == addr->n)
426 return;
427
428 parts->symbol = val;
429 aff_combination_remove_elt (addr, i);
430 }
431
432 /* Return true if ADDR contains an instance of BASE_HINT and it's moved to
433 PARTS->base. */
434
435 static bool
436 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
437 aff_tree *addr)
438 {
439 unsigned i;
440 tree val = NULL_TREE;
441 int qual;
442
443 for (i = 0; i < addr->n; i++)
444 {
445 if (addr->elts[i].coef != 1)
446 continue;
447
448 val = addr->elts[i].val;
449 if (operand_equal_p (val, base_hint, 0))
450 break;
451 }
452
453 if (i == addr->n)
454 return false;
455
456 /* Cast value to appropriate pointer type. We cannot use a pointer
457 to TYPE directly, as the back-end will assume registers of pointer
458 type are aligned, and just the base itself may not actually be.
459 We use void pointer to the type's address space instead. */
460 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
461 type = build_qualified_type (void_type_node, qual);
462 parts->base = fold_convert (build_pointer_type (type), val);
463 aff_combination_remove_elt (addr, i);
464 return true;
465 }
466
467 /* If ADDR contains an address of a dereferenced pointer, move it to
468 PARTS->base. */
469
470 static void
471 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
472 {
473 unsigned i;
474 tree val = NULL_TREE;
475
476 for (i = 0; i < addr->n; i++)
477 {
478 if (addr->elts[i].coef != 1)
479 continue;
480
481 val = addr->elts[i].val;
482 if (POINTER_TYPE_P (TREE_TYPE (val)))
483 break;
484 }
485
486 if (i == addr->n)
487 return;
488
489 parts->base = val;
490 aff_combination_remove_elt (addr, i);
491 }
492
493 /* Moves the loop variant part V in linear address ADDR to be the index
494 of PARTS. */
495
496 static void
497 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
498 {
499 unsigned i;
500 tree val = NULL_TREE;
501
502 gcc_assert (!parts->index);
503 for (i = 0; i < addr->n; i++)
504 {
505 val = addr->elts[i].val;
506 if (operand_equal_p (val, v, 0))
507 break;
508 }
509
510 if (i == addr->n)
511 return;
512
513 parts->index = fold_convert (sizetype, val);
514 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
515 aff_combination_remove_elt (addr, i);
516 }
517
518 /* Adds ELT to PARTS. */
519
520 static void
521 add_to_parts (struct mem_address *parts, tree elt)
522 {
523 tree type;
524
525 if (!parts->index)
526 {
527 parts->index = fold_convert (sizetype, elt);
528 return;
529 }
530
531 if (!parts->base)
532 {
533 parts->base = elt;
534 return;
535 }
536
537 /* Add ELT to base. */
538 type = TREE_TYPE (parts->base);
539 if (POINTER_TYPE_P (type))
540 parts->base = fold_build_pointer_plus (parts->base, elt);
541 else
542 parts->base = fold_build2 (PLUS_EXPR, type, parts->base, elt);
543 }
544
545 /* Returns true if multiplying by RATIO is allowed in an address. Test the
546 validity for a memory reference accessing memory of mode MODE in address
547 space AS. */
548
549 static bool
550 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
551 addr_space_t as)
552 {
553 #define MAX_RATIO 128
554 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
555 static vec<sbitmap> valid_mult_list;
556 sbitmap valid_mult;
557
558 if (data_index >= valid_mult_list.length ())
559 valid_mult_list.safe_grow_cleared (data_index + 1);
560
561 valid_mult = valid_mult_list[data_index];
562 if (!valid_mult)
563 {
564 machine_mode address_mode = targetm.addr_space.address_mode (as);
565 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
566 rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
567 rtx addr, scaled;
568 HOST_WIDE_INT i;
569
570 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
571 bitmap_clear (valid_mult);
572 scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
573 addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
574 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
575 {
576 XEXP (scaled, 1) = gen_int_mode (i, address_mode);
577 if (memory_address_addr_space_p (mode, addr, as)
578 || memory_address_addr_space_p (mode, scaled, as))
579 bitmap_set_bit (valid_mult, i + MAX_RATIO);
580 }
581
582 if (dump_file && (dump_flags & TDF_DETAILS))
583 {
584 fprintf (dump_file, " allowed multipliers:");
585 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
586 if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
587 fprintf (dump_file, " %d", (int) i);
588 fprintf (dump_file, "\n");
589 fprintf (dump_file, "\n");
590 }
591
592 valid_mult_list[data_index] = valid_mult;
593 }
594
595 if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
596 return false;
597
598 return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
599 }
600
601 /* Finds the most expensive multiplication in ADDR that can be
602 expressed in an addressing mode and move the corresponding
603 element(s) to PARTS. */
604
605 static void
606 most_expensive_mult_to_index (tree type, struct mem_address *parts,
607 aff_tree *addr, bool speed)
608 {
609 addr_space_t as = TYPE_ADDR_SPACE (type);
610 machine_mode address_mode = targetm.addr_space.address_mode (as);
611 HOST_WIDE_INT coef;
612 unsigned best_mult_cost = 0, acost;
613 tree mult_elt = NULL_TREE, elt;
614 unsigned i, j;
615 enum tree_code op_code;
616
617 offset_int best_mult = 0;
618 for (i = 0; i < addr->n; i++)
619 {
620 if (!wi::fits_shwi_p (addr->elts[i].coef))
621 continue;
622
623 coef = addr->elts[i].coef.to_shwi ();
624 if (coef == 1
625 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
626 continue;
627
628 acost = mult_by_coeff_cost (coef, address_mode, speed);
629
630 if (acost > best_mult_cost)
631 {
632 best_mult_cost = acost;
633 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
634 }
635 }
636
637 if (!best_mult_cost)
638 return;
639
640 /* Collect elements multiplied by best_mult. */
641 for (i = j = 0; i < addr->n; i++)
642 {
643 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
644 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
645
646 if (amult == best_mult)
647 op_code = PLUS_EXPR;
648 else if (amult_neg == best_mult)
649 op_code = MINUS_EXPR;
650 else
651 {
652 addr->elts[j] = addr->elts[i];
653 j++;
654 continue;
655 }
656
657 elt = fold_convert (sizetype, addr->elts[i].val);
658 if (mult_elt)
659 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
660 else if (op_code == PLUS_EXPR)
661 mult_elt = elt;
662 else
663 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
664 }
665 addr->n = j;
666
667 parts->index = mult_elt;
668 parts->step = wide_int_to_tree (sizetype, best_mult);
669 }
670
671 /* Splits address ADDR for a memory access of type TYPE into PARTS.
672 If BASE_HINT is non-NULL, it specifies an SSA name to be used
673 preferentially as base of the reference, and IV_CAND is the selected
674 iv candidate used in ADDR. Store true to VAR_IN_BASE if variant
675 part of address is split to PARTS.base.
676
677 TODO -- be more clever about the distribution of the elements of ADDR
678 to PARTS. Some architectures do not support anything but single
679 register in address, possibly with a small integer offset; while
680 create_mem_ref will simplify the address to an acceptable shape
681 later, it would be more efficient to know that asking for complicated
682 addressing modes is useless. */
683
684 static void
685 addr_to_parts (tree type, aff_tree *addr, tree iv_cand, tree base_hint,
686 struct mem_address *parts, bool *var_in_base, bool speed)
687 {
688 tree part;
689 unsigned i;
690
691 parts->symbol = NULL_TREE;
692 parts->base = NULL_TREE;
693 parts->index = NULL_TREE;
694 parts->step = NULL_TREE;
695
696 if (maybe_ne (addr->offset, 0))
697 parts->offset = wide_int_to_tree (sizetype, addr->offset);
698 else
699 parts->offset = NULL_TREE;
700
701 /* Try to find a symbol. */
702 move_fixed_address_to_symbol (parts, addr);
703
704 /* Since at the moment there is no reliable way to know how to
705 distinguish between pointer and its offset, we decide if var
706 part is the pointer based on guess. */
707 *var_in_base = (base_hint != NULL && parts->symbol == NULL);
708 if (*var_in_base)
709 *var_in_base = move_hint_to_base (type, parts, base_hint, addr);
710 else
711 move_variant_to_index (parts, addr, iv_cand);
712
713 /* First move the most expensive feasible multiplication to index. */
714 if (!parts->index)
715 most_expensive_mult_to_index (type, parts, addr, speed);
716
717 /* Move pointer into base. */
718 if (!parts->symbol && !parts->base)
719 move_pointer_to_base (parts, addr);
720
721 /* Then try to process the remaining elements. */
722 for (i = 0; i < addr->n; i++)
723 {
724 part = fold_convert (sizetype, addr->elts[i].val);
725 if (addr->elts[i].coef != 1)
726 part = fold_build2 (MULT_EXPR, sizetype, part,
727 wide_int_to_tree (sizetype, addr->elts[i].coef));
728 add_to_parts (parts, part);
729 }
730 if (addr->rest)
731 add_to_parts (parts, fold_convert (sizetype, addr->rest));
732 }
733
734 /* Force the PARTS to register. */
735
736 static void
737 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
738 {
739 if (parts->base)
740 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
741 is_gimple_mem_ref_addr, NULL_TREE,
742 true, GSI_SAME_STMT);
743 if (parts->index)
744 parts->index = force_gimple_operand_gsi (gsi, parts->index,
745 true, NULL_TREE,
746 true, GSI_SAME_STMT);
747 }
748
749 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
750 computations are emitted in front of GSI. TYPE is the mode
751 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
752 and BASE_HINT is non NULL if IV_CAND comes from a base address
753 object. */
754
755 tree
756 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
757 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
758 {
759 bool var_in_base;
760 tree mem_ref, tmp;
761 struct mem_address parts;
762
763 addr_to_parts (type, addr, iv_cand, base_hint, &parts, &var_in_base, speed);
764 gimplify_mem_ref_parts (gsi, &parts);
765 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
766 if (mem_ref)
767 return mem_ref;
768
769 /* The expression is too complicated. Try making it simpler. */
770
771 /* Merge symbol into other parts. */
772 if (parts.symbol)
773 {
774 tmp = parts.symbol;
775 parts.symbol = NULL_TREE;
776 gcc_assert (is_gimple_val (tmp));
777
778 if (parts.base)
779 {
780 gcc_assert (useless_type_conversion_p (sizetype,
781 TREE_TYPE (parts.base)));
782
783 if (parts.index)
784 {
785 /* Add the symbol to base, eventually forcing it to register. */
786 tmp = fold_build_pointer_plus (tmp, parts.base);
787 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
788 is_gimple_mem_ref_addr,
789 NULL_TREE, true,
790 GSI_SAME_STMT);
791 }
792 else
793 {
794 /* Move base to index, then move the symbol to base. */
795 parts.index = parts.base;
796 }
797 parts.base = tmp;
798 }
799 else
800 parts.base = tmp;
801
802 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
803 if (mem_ref)
804 return mem_ref;
805 }
806
807 /* Move multiplication to index by transforming address expression:
808 [... + index << step + ...]
809 into:
810 index' = index << step;
811 [... + index' + ,,,]. */
812 if (parts.step && !integer_onep (parts.step))
813 {
814 gcc_assert (parts.index);
815 parts.index = force_gimple_operand_gsi (gsi,
816 fold_build2 (MULT_EXPR, sizetype,
817 parts.index, parts.step),
818 true, NULL_TREE, true, GSI_SAME_STMT);
819 parts.step = NULL_TREE;
820
821 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
822 if (mem_ref)
823 return mem_ref;
824 }
825
826 /* Add offset to invariant part by transforming address expression:
827 [base + index + offset]
828 into:
829 base' = base + offset;
830 [base' + index]
831 or:
832 index' = index + offset;
833 [base + index']
834 depending on which one is invariant. */
835 if (parts.offset && !integer_zerop (parts.offset))
836 {
837 tree old_base = unshare_expr (parts.base);
838 tree old_index = unshare_expr (parts.index);
839 tree old_offset = unshare_expr (parts.offset);
840
841 tmp = parts.offset;
842 parts.offset = NULL_TREE;
843 /* Add offset to invariant part. */
844 if (!var_in_base)
845 {
846 if (parts.base)
847 {
848 tmp = fold_build_pointer_plus (parts.base, tmp);
849 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
850 is_gimple_mem_ref_addr,
851 NULL_TREE, true,
852 GSI_SAME_STMT);
853 }
854 parts.base = tmp;
855 }
856 else
857 {
858 if (parts.index)
859 {
860 tmp = fold_build_pointer_plus (parts.index, tmp);
861 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
862 is_gimple_mem_ref_addr,
863 NULL_TREE, true,
864 GSI_SAME_STMT);
865 }
866 parts.index = tmp;
867 }
868
869 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
870 if (mem_ref)
871 return mem_ref;
872
873 /* Restore parts.base, index and offset so that we can check if
874 [base + offset] addressing mode is supported in next step.
875 This is necessary for targets only support [base + offset],
876 but not [base + index] addressing mode. */
877 parts.base = old_base;
878 parts.index = old_index;
879 parts.offset = old_offset;
880 }
881
882 /* Transform [base + index + ...] into:
883 base' = base + index;
884 [base' + ...]. */
885 if (parts.index)
886 {
887 tmp = parts.index;
888 parts.index = NULL_TREE;
889 /* Add index to base. */
890 if (parts.base)
891 {
892 tmp = fold_build_pointer_plus (parts.base, tmp);
893 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
894 is_gimple_mem_ref_addr,
895 NULL_TREE, true, GSI_SAME_STMT);
896 }
897 parts.base = tmp;
898
899 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
900 if (mem_ref)
901 return mem_ref;
902 }
903
904 /* Transform [base + offset] into:
905 base' = base + offset;
906 [base']. */
907 if (parts.offset && !integer_zerop (parts.offset))
908 {
909 tmp = parts.offset;
910 parts.offset = NULL_TREE;
911 /* Add offset to base. */
912 if (parts.base)
913 {
914 tmp = fold_build_pointer_plus (parts.base, tmp);
915 tmp = force_gimple_operand_gsi_1 (gsi, tmp,
916 is_gimple_mem_ref_addr,
917 NULL_TREE, true, GSI_SAME_STMT);
918 }
919 parts.base = tmp;
920
921 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
922 if (mem_ref)
923 return mem_ref;
924 }
925
926 /* Verify that the address is in the simplest possible shape
927 (only a register). If we cannot create such a memory reference,
928 something is really wrong. */
929 gcc_assert (parts.symbol == NULL_TREE);
930 gcc_assert (parts.index == NULL_TREE);
931 gcc_assert (!parts.step || integer_onep (parts.step));
932 gcc_assert (!parts.offset || integer_zerop (parts.offset));
933 gcc_unreachable ();
934 }
935
936 /* Copies components of the address from OP to ADDR. */
937
938 void
939 get_address_description (tree op, struct mem_address *addr)
940 {
941 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
942 {
943 addr->symbol = TMR_BASE (op);
944 addr->base = TMR_INDEX2 (op);
945 }
946 else
947 {
948 addr->symbol = NULL_TREE;
949 if (TMR_INDEX2 (op))
950 {
951 gcc_assert (integer_zerop (TMR_BASE (op)));
952 addr->base = TMR_INDEX2 (op);
953 }
954 else
955 addr->base = TMR_BASE (op);
956 }
957 addr->index = TMR_INDEX (op);
958 addr->step = TMR_STEP (op);
959 addr->offset = TMR_OFFSET (op);
960 }
961
962 /* Copies the reference information from OLD_REF to NEW_REF, where
963 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
964
965 void
966 copy_ref_info (tree new_ref, tree old_ref)
967 {
968 tree new_ptr_base = NULL_TREE;
969
970 gcc_assert (TREE_CODE (new_ref) == MEM_REF
971 || TREE_CODE (new_ref) == TARGET_MEM_REF);
972
973 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
974 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
975
976 new_ptr_base = TREE_OPERAND (new_ref, 0);
977
978 /* We can transfer points-to information from an old pointer
979 or decl base to the new one. */
980 if (new_ptr_base
981 && TREE_CODE (new_ptr_base) == SSA_NAME
982 && !SSA_NAME_PTR_INFO (new_ptr_base))
983 {
984 tree base = get_base_address (old_ref);
985 if (!base)
986 ;
987 else if ((TREE_CODE (base) == MEM_REF
988 || TREE_CODE (base) == TARGET_MEM_REF)
989 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
990 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
991 {
992 struct ptr_info_def *new_pi;
993 unsigned int align, misalign;
994
995 duplicate_ssa_name_ptr_info
996 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
997 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
998 /* We have to be careful about transferring alignment information. */
999 if (get_ptr_info_alignment (new_pi, &align, &misalign)
1000 && TREE_CODE (old_ref) == MEM_REF
1001 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
1002 && (TMR_INDEX2 (new_ref)
1003 /* TODO: Below conditions can be relaxed if TMR_INDEX
1004 is an indcution variable and its initial value and
1005 step are aligned. */
1006 || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
1007 || (TMR_STEP (new_ref)
1008 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
1009 < align)))))
1010 {
1011 poly_uint64 inc = (mem_ref_offset (old_ref)
1012 - mem_ref_offset (new_ref)).force_uhwi ();
1013 adjust_ptr_info_misalignment (new_pi, inc);
1014 }
1015 else
1016 mark_ptr_info_alignment_unknown (new_pi);
1017 }
1018 else if (VAR_P (base)
1019 || TREE_CODE (base) == PARM_DECL
1020 || TREE_CODE (base) == RESULT_DECL)
1021 {
1022 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
1023 pt_solution_set_var (&pi->pt, base);
1024 }
1025 }
1026 }
1027
1028 /* Move constants in target_mem_ref REF to offset. Returns the new target
1029 mem ref if anything changes, NULL_TREE otherwise. */
1030
1031 tree
1032 maybe_fold_tmr (tree ref)
1033 {
1034 struct mem_address addr;
1035 bool changed = false;
1036 tree new_ref, off;
1037
1038 get_address_description (ref, &addr);
1039
1040 if (addr.base
1041 && TREE_CODE (addr.base) == INTEGER_CST
1042 && !integer_zerop (addr.base))
1043 {
1044 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1045 TREE_TYPE (addr.offset),
1046 addr.offset, addr.base);
1047 addr.base = NULL_TREE;
1048 changed = true;
1049 }
1050
1051 if (addr.symbol
1052 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
1053 {
1054 addr.offset = fold_binary_to_constant
1055 (PLUS_EXPR, TREE_TYPE (addr.offset),
1056 addr.offset,
1057 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
1058 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
1059 changed = true;
1060 }
1061 else if (addr.symbol
1062 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
1063 {
1064 poly_int64 offset;
1065 addr.symbol = build_fold_addr_expr
1066 (get_addr_base_and_unit_offset
1067 (TREE_OPERAND (addr.symbol, 0), &offset));
1068 addr.offset = int_const_binop (PLUS_EXPR,
1069 addr.offset, size_int (offset));
1070 changed = true;
1071 }
1072
1073 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1074 {
1075 off = addr.index;
1076 if (addr.step)
1077 {
1078 off = fold_binary_to_constant (MULT_EXPR, sizetype,
1079 off, addr.step);
1080 addr.step = NULL_TREE;
1081 }
1082
1083 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1084 TREE_TYPE (addr.offset),
1085 addr.offset, off);
1086 addr.index = NULL_TREE;
1087 changed = true;
1088 }
1089
1090 if (!changed)
1091 return NULL_TREE;
1092
1093 /* If we have propagated something into this TARGET_MEM_REF and thus
1094 ended up folding it, always create a new TARGET_MEM_REF regardless
1095 if it is valid in this for on the target - the propagation result
1096 wouldn't be anyway. */
1097 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1098 TREE_TYPE (addr.offset), &addr, false);
1099 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1100 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1101 return new_ref;
1102 }
1103
1104 /* Dump PARTS to FILE. */
1105
1106 extern void dump_mem_address (FILE *, struct mem_address *);
1107 void
1108 dump_mem_address (FILE *file, struct mem_address *parts)
1109 {
1110 if (parts->symbol)
1111 {
1112 fprintf (file, "symbol: ");
1113 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1114 fprintf (file, "\n");
1115 }
1116 if (parts->base)
1117 {
1118 fprintf (file, "base: ");
1119 print_generic_expr (file, parts->base, TDF_SLIM);
1120 fprintf (file, "\n");
1121 }
1122 if (parts->index)
1123 {
1124 fprintf (file, "index: ");
1125 print_generic_expr (file, parts->index, TDF_SLIM);
1126 fprintf (file, "\n");
1127 }
1128 if (parts->step)
1129 {
1130 fprintf (file, "step: ");
1131 print_generic_expr (file, parts->step, TDF_SLIM);
1132 fprintf (file, "\n");
1133 }
1134 if (parts->offset)
1135 {
1136 fprintf (file, "offset: ");
1137 print_generic_expr (file, parts->offset, TDF_SLIM);
1138 fprintf (file, "\n");
1139 }
1140 }
1141
1142 #include "gt-tree-ssa-address.h"