]> git.ipfire.org Git - thirdparty/gcc.git/blob - gcc/tree-ssa-address.c
tree-ssa-loop-ivopts.c (multiplier_allowed_in_address_p): Move from ...
[thirdparty/gcc.git] / gcc / tree-ssa-address.c
1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2017 Free Software Foundation, Inc.
3
4 This file is part of GCC.
5
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
9 later version.
10
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
14 for more details.
15
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
19
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
22
23 #include "config.h"
24 #include "system.h"
25 #include "coretypes.h"
26 #include "backend.h"
27 #include "target.h"
28 #include "rtl.h"
29 #include "tree.h"
30 #include "gimple.h"
31 #include "memmodel.h"
32 #include "stringpool.h"
33 #include "tree-vrp.h"
34 #include "tree-ssanames.h"
35 #include "expmed.h"
36 #include "insn-config.h"
37 #include "emit-rtl.h"
38 #include "recog.h"
39 #include "tree-pretty-print.h"
40 #include "fold-const.h"
41 #include "stor-layout.h"
42 #include "gimple-iterator.h"
43 #include "gimplify-me.h"
44 #include "tree-ssa-loop-ivopts.h"
45 #include "expr.h"
46 #include "tree-dfa.h"
47 #include "dumpfile.h"
48 #include "tree-affine.h"
49
50 /* FIXME: We compute address costs using RTL. */
51 #include "tree-ssa-address.h"
52
53 /* TODO -- handling of symbols (according to Richard Hendersons
54 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
55
56 There are at least 5 different kinds of symbols that we can run up against:
57
58 (1) binds_local_p, small data area.
59 (2) binds_local_p, eg local statics
60 (3) !binds_local_p, eg global variables
61 (4) thread local, local_exec
62 (5) thread local, !local_exec
63
64 Now, (1) won't appear often in an array context, but it certainly can.
65 All you have to do is set -GN high enough, or explicitly mark any
66 random object __attribute__((section (".sdata"))).
67
68 All of these affect whether or not a symbol is in fact a valid address.
69 The only one tested here is (3). And that result may very well
70 be incorrect for (4) or (5).
71
72 An incorrect result here does not cause incorrect results out the
73 back end, because the expander in expr.c validizes the address. However
74 it would be nice to improve the handling here in order to produce more
75 precise results. */
76
77 /* A "template" for memory address, used to determine whether the address is
78 valid for mode. */
79
80 struct GTY (()) mem_addr_template {
81 rtx ref; /* The template. */
82 rtx * GTY ((skip)) step_p; /* The point in template where the step should be
83 filled in. */
84 rtx * GTY ((skip)) off_p; /* The point in template where the offset should
85 be filled in. */
86 };
87
88
89 /* The templates. Each of the low five bits of the index corresponds to one
90 component of TARGET_MEM_REF being present, while the high bits identify
91 the address space. See TEMPL_IDX. */
92
93 static GTY(()) vec<mem_addr_template, va_gc> *mem_addr_template_list;
94
95 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
96 (((int) (AS) << 5) \
97 | ((SYMBOL != 0) << 4) \
98 | ((BASE != 0) << 3) \
99 | ((INDEX != 0) << 2) \
100 | ((STEP != 0) << 1) \
101 | (OFFSET != 0))
102
103 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
104 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
105 to where step is placed to *STEP_P and offset to *OFFSET_P. */
106
107 static void
108 gen_addr_rtx (machine_mode address_mode,
109 rtx symbol, rtx base, rtx index, rtx step, rtx offset,
110 rtx *addr, rtx **step_p, rtx **offset_p)
111 {
112 rtx act_elem;
113
114 *addr = NULL_RTX;
115 if (step_p)
116 *step_p = NULL;
117 if (offset_p)
118 *offset_p = NULL;
119
120 if (index && index != const0_rtx)
121 {
122 act_elem = index;
123 if (step)
124 {
125 act_elem = gen_rtx_MULT (address_mode, act_elem, step);
126
127 if (step_p)
128 *step_p = &XEXP (act_elem, 1);
129 }
130
131 *addr = act_elem;
132 }
133
134 if (base && base != const0_rtx)
135 {
136 if (*addr)
137 *addr = simplify_gen_binary (PLUS, address_mode, base, *addr);
138 else
139 *addr = base;
140 }
141
142 if (symbol)
143 {
144 act_elem = symbol;
145 if (offset)
146 {
147 act_elem = gen_rtx_PLUS (address_mode, act_elem, offset);
148
149 if (offset_p)
150 *offset_p = &XEXP (act_elem, 1);
151
152 if (GET_CODE (symbol) == SYMBOL_REF
153 || GET_CODE (symbol) == LABEL_REF
154 || GET_CODE (symbol) == CONST)
155 act_elem = gen_rtx_CONST (address_mode, act_elem);
156 }
157
158 if (*addr)
159 *addr = gen_rtx_PLUS (address_mode, *addr, act_elem);
160 else
161 *addr = act_elem;
162 }
163 else if (offset)
164 {
165 if (*addr)
166 {
167 *addr = gen_rtx_PLUS (address_mode, *addr, offset);
168 if (offset_p)
169 *offset_p = &XEXP (*addr, 1);
170 }
171 else
172 {
173 *addr = offset;
174 if (offset_p)
175 *offset_p = addr;
176 }
177 }
178
179 if (!*addr)
180 *addr = const0_rtx;
181 }
182
183 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
184 in address space AS.
185 If REALLY_EXPAND is false, just make fake registers instead
186 of really expanding the operands, and perform the expansion in-place
187 by using one of the "templates". */
188
189 rtx
190 addr_for_mem_ref (struct mem_address *addr, addr_space_t as,
191 bool really_expand)
192 {
193 machine_mode address_mode = targetm.addr_space.address_mode (as);
194 machine_mode pointer_mode = targetm.addr_space.pointer_mode (as);
195 rtx address, sym, bse, idx, st, off;
196 struct mem_addr_template *templ;
197
198 if (addr->step && !integer_onep (addr->step))
199 st = immed_wide_int_const (addr->step, pointer_mode);
200 else
201 st = NULL_RTX;
202
203 if (addr->offset && !integer_zerop (addr->offset))
204 {
205 offset_int dc = offset_int::from (addr->offset, SIGNED);
206 off = immed_wide_int_const (dc, pointer_mode);
207 }
208 else
209 off = NULL_RTX;
210
211 if (!really_expand)
212 {
213 unsigned int templ_index
214 = TEMPL_IDX (as, addr->symbol, addr->base, addr->index, st, off);
215
216 if (templ_index >= vec_safe_length (mem_addr_template_list))
217 vec_safe_grow_cleared (mem_addr_template_list, templ_index + 1);
218
219 /* Reuse the templates for addresses, so that we do not waste memory. */
220 templ = &(*mem_addr_template_list)[templ_index];
221 if (!templ->ref)
222 {
223 sym = (addr->symbol ?
224 gen_rtx_SYMBOL_REF (pointer_mode, ggc_strdup ("test_symbol"))
225 : NULL_RTX);
226 bse = (addr->base ?
227 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 1)
228 : NULL_RTX);
229 idx = (addr->index ?
230 gen_raw_REG (pointer_mode, LAST_VIRTUAL_REGISTER + 2)
231 : NULL_RTX);
232
233 gen_addr_rtx (pointer_mode, sym, bse, idx,
234 st? const0_rtx : NULL_RTX,
235 off? const0_rtx : NULL_RTX,
236 &templ->ref,
237 &templ->step_p,
238 &templ->off_p);
239 }
240
241 if (st)
242 *templ->step_p = st;
243 if (off)
244 *templ->off_p = off;
245
246 return templ->ref;
247 }
248
249 /* Otherwise really expand the expressions. */
250 sym = (addr->symbol
251 ? expand_expr (addr->symbol, NULL_RTX, pointer_mode, EXPAND_NORMAL)
252 : NULL_RTX);
253 bse = (addr->base
254 ? expand_expr (addr->base, NULL_RTX, pointer_mode, EXPAND_NORMAL)
255 : NULL_RTX);
256 idx = (addr->index
257 ? expand_expr (addr->index, NULL_RTX, pointer_mode, EXPAND_NORMAL)
258 : NULL_RTX);
259
260 gen_addr_rtx (pointer_mode, sym, bse, idx, st, off, &address, NULL, NULL);
261 if (pointer_mode != address_mode)
262 address = convert_memory_address (address_mode, address);
263 return address;
264 }
265
266 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
267 the mem_address structure. */
268
269 rtx
270 addr_for_mem_ref (tree exp, addr_space_t as, bool really_expand)
271 {
272 struct mem_address addr;
273 get_address_description (exp, &addr);
274 return addr_for_mem_ref (&addr, as, really_expand);
275 }
276
277 /* Returns address of MEM_REF in TYPE. */
278
279 tree
280 tree_mem_ref_addr (tree type, tree mem_ref)
281 {
282 tree addr;
283 tree act_elem;
284 tree step = TMR_STEP (mem_ref), offset = TMR_OFFSET (mem_ref);
285 tree addr_base = NULL_TREE, addr_off = NULL_TREE;
286
287 addr_base = fold_convert (type, TMR_BASE (mem_ref));
288
289 act_elem = TMR_INDEX (mem_ref);
290 if (act_elem)
291 {
292 if (step)
293 act_elem = fold_build2 (MULT_EXPR, TREE_TYPE (act_elem),
294 act_elem, step);
295 addr_off = act_elem;
296 }
297
298 act_elem = TMR_INDEX2 (mem_ref);
299 if (act_elem)
300 {
301 if (addr_off)
302 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off),
303 addr_off, act_elem);
304 else
305 addr_off = act_elem;
306 }
307
308 if (offset && !integer_zerop (offset))
309 {
310 if (addr_off)
311 addr_off = fold_build2 (PLUS_EXPR, TREE_TYPE (addr_off), addr_off,
312 fold_convert (TREE_TYPE (addr_off), offset));
313 else
314 addr_off = offset;
315 }
316
317 if (addr_off)
318 addr = fold_build_pointer_plus (addr_base, addr_off);
319 else
320 addr = addr_base;
321
322 return addr;
323 }
324
325 /* Returns true if a memory reference in MODE and with parameters given by
326 ADDR is valid on the current target. */
327
328 bool
329 valid_mem_ref_p (machine_mode mode, addr_space_t as,
330 struct mem_address *addr)
331 {
332 rtx address;
333
334 address = addr_for_mem_ref (addr, as, false);
335 if (!address)
336 return false;
337
338 return memory_address_addr_space_p (mode, address, as);
339 }
340
341 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
342 is valid on the current target and if so, creates and returns the
343 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
344
345 static tree
346 create_mem_ref_raw (tree type, tree alias_ptr_type, struct mem_address *addr,
347 bool verify)
348 {
349 tree base, index2;
350
351 if (verify
352 && !valid_mem_ref_p (TYPE_MODE (type), TYPE_ADDR_SPACE (type), addr))
353 return NULL_TREE;
354
355 if (addr->step && integer_onep (addr->step))
356 addr->step = NULL_TREE;
357
358 if (addr->offset)
359 addr->offset = fold_convert (alias_ptr_type, addr->offset);
360 else
361 addr->offset = build_int_cst (alias_ptr_type, 0);
362
363 if (addr->symbol)
364 {
365 base = addr->symbol;
366 index2 = addr->base;
367 }
368 else if (addr->base
369 && POINTER_TYPE_P (TREE_TYPE (addr->base)))
370 {
371 base = addr->base;
372 index2 = NULL_TREE;
373 }
374 else
375 {
376 base = build_int_cst (build_pointer_type (type), 0);
377 index2 = addr->base;
378 }
379
380 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
381 ??? As IVOPTs does not follow restrictions to where the base
382 pointer may point to create a MEM_REF only if we know that
383 base is valid. */
384 if ((TREE_CODE (base) == ADDR_EXPR || TREE_CODE (base) == INTEGER_CST)
385 && (!index2 || integer_zerop (index2))
386 && (!addr->index || integer_zerop (addr->index)))
387 return fold_build2 (MEM_REF, type, base, addr->offset);
388
389 return build5 (TARGET_MEM_REF, type,
390 base, addr->offset, addr->index, addr->step, index2);
391 }
392
393 /* Returns true if OBJ is an object whose address is a link time constant. */
394
395 static bool
396 fixed_address_object_p (tree obj)
397 {
398 return (VAR_P (obj)
399 && (TREE_STATIC (obj) || DECL_EXTERNAL (obj))
400 && ! DECL_DLLIMPORT_P (obj));
401 }
402
403 /* If ADDR contains an address of object that is a link time constant,
404 move it to PARTS->symbol. */
405
406 void
407 move_fixed_address_to_symbol (struct mem_address *parts, aff_tree *addr)
408 {
409 unsigned i;
410 tree val = NULL_TREE;
411
412 for (i = 0; i < addr->n; i++)
413 {
414 if (addr->elts[i].coef != 1)
415 continue;
416
417 val = addr->elts[i].val;
418 if (TREE_CODE (val) == ADDR_EXPR
419 && fixed_address_object_p (TREE_OPERAND (val, 0)))
420 break;
421 }
422
423 if (i == addr->n)
424 return;
425
426 parts->symbol = val;
427 aff_combination_remove_elt (addr, i);
428 }
429
430 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
431
432 static void
433 move_hint_to_base (tree type, struct mem_address *parts, tree base_hint,
434 aff_tree *addr)
435 {
436 unsigned i;
437 tree val = NULL_TREE;
438 int qual;
439
440 for (i = 0; i < addr->n; i++)
441 {
442 if (addr->elts[i].coef != 1)
443 continue;
444
445 val = addr->elts[i].val;
446 if (operand_equal_p (val, base_hint, 0))
447 break;
448 }
449
450 if (i == addr->n)
451 return;
452
453 /* Cast value to appropriate pointer type. We cannot use a pointer
454 to TYPE directly, as the back-end will assume registers of pointer
455 type are aligned, and just the base itself may not actually be.
456 We use void pointer to the type's address space instead. */
457 qual = ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type));
458 type = build_qualified_type (void_type_node, qual);
459 parts->base = fold_convert (build_pointer_type (type), val);
460 aff_combination_remove_elt (addr, i);
461 }
462
463 /* If ADDR contains an address of a dereferenced pointer, move it to
464 PARTS->base. */
465
466 static void
467 move_pointer_to_base (struct mem_address *parts, aff_tree *addr)
468 {
469 unsigned i;
470 tree val = NULL_TREE;
471
472 for (i = 0; i < addr->n; i++)
473 {
474 if (addr->elts[i].coef != 1)
475 continue;
476
477 val = addr->elts[i].val;
478 if (POINTER_TYPE_P (TREE_TYPE (val)))
479 break;
480 }
481
482 if (i == addr->n)
483 return;
484
485 parts->base = val;
486 aff_combination_remove_elt (addr, i);
487 }
488
489 /* Moves the loop variant part V in linear address ADDR to be the index
490 of PARTS. */
491
492 static void
493 move_variant_to_index (struct mem_address *parts, aff_tree *addr, tree v)
494 {
495 unsigned i;
496 tree val = NULL_TREE;
497
498 gcc_assert (!parts->index);
499 for (i = 0; i < addr->n; i++)
500 {
501 val = addr->elts[i].val;
502 if (operand_equal_p (val, v, 0))
503 break;
504 }
505
506 if (i == addr->n)
507 return;
508
509 parts->index = fold_convert (sizetype, val);
510 parts->step = wide_int_to_tree (sizetype, addr->elts[i].coef);
511 aff_combination_remove_elt (addr, i);
512 }
513
514 /* Adds ELT to PARTS. */
515
516 static void
517 add_to_parts (struct mem_address *parts, tree elt)
518 {
519 tree type;
520
521 if (!parts->index)
522 {
523 parts->index = fold_convert (sizetype, elt);
524 return;
525 }
526
527 if (!parts->base)
528 {
529 parts->base = elt;
530 return;
531 }
532
533 /* Add ELT to base. */
534 type = TREE_TYPE (parts->base);
535 if (POINTER_TYPE_P (type))
536 parts->base = fold_build_pointer_plus (parts->base, elt);
537 else
538 parts->base = fold_build2 (PLUS_EXPR, type,
539 parts->base, elt);
540 }
541
542 /* Returns true if multiplying by RATIO is allowed in an address. Test the
543 validity for a memory reference accessing memory of mode MODE in address
544 space AS. */
545
546 static bool
547 multiplier_allowed_in_address_p (HOST_WIDE_INT ratio, machine_mode mode,
548 addr_space_t as)
549 {
550 #define MAX_RATIO 128
551 unsigned int data_index = (int) as * MAX_MACHINE_MODE + (int) mode;
552 static vec<sbitmap> valid_mult_list;
553 sbitmap valid_mult;
554
555 if (data_index >= valid_mult_list.length ())
556 valid_mult_list.safe_grow_cleared (data_index + 1);
557
558 valid_mult = valid_mult_list[data_index];
559 if (!valid_mult)
560 {
561 machine_mode address_mode = targetm.addr_space.address_mode (as);
562 rtx reg1 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 1);
563 rtx reg2 = gen_raw_REG (address_mode, LAST_VIRTUAL_REGISTER + 2);
564 rtx addr, scaled;
565 HOST_WIDE_INT i;
566
567 valid_mult = sbitmap_alloc (2 * MAX_RATIO + 1);
568 bitmap_clear (valid_mult);
569 scaled = gen_rtx_fmt_ee (MULT, address_mode, reg1, NULL_RTX);
570 addr = gen_rtx_fmt_ee (PLUS, address_mode, scaled, reg2);
571 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
572 {
573 XEXP (scaled, 1) = gen_int_mode (i, address_mode);
574 if (memory_address_addr_space_p (mode, addr, as)
575 || memory_address_addr_space_p (mode, scaled, as))
576 bitmap_set_bit (valid_mult, i + MAX_RATIO);
577 }
578
579 if (dump_file && (dump_flags & TDF_DETAILS))
580 {
581 fprintf (dump_file, " allowed multipliers:");
582 for (i = -MAX_RATIO; i <= MAX_RATIO; i++)
583 if (bitmap_bit_p (valid_mult, i + MAX_RATIO))
584 fprintf (dump_file, " %d", (int) i);
585 fprintf (dump_file, "\n");
586 fprintf (dump_file, "\n");
587 }
588
589 valid_mult_list[data_index] = valid_mult;
590 }
591
592 if (ratio > MAX_RATIO || ratio < -MAX_RATIO)
593 return false;
594
595 return bitmap_bit_p (valid_mult, ratio + MAX_RATIO);
596 }
597
598 /* Finds the most expensive multiplication in ADDR that can be
599 expressed in an addressing mode and move the corresponding
600 element(s) to PARTS. */
601
602 static void
603 most_expensive_mult_to_index (tree type, struct mem_address *parts,
604 aff_tree *addr, bool speed)
605 {
606 addr_space_t as = TYPE_ADDR_SPACE (type);
607 machine_mode address_mode = targetm.addr_space.address_mode (as);
608 HOST_WIDE_INT coef;
609 unsigned best_mult_cost = 0, acost;
610 tree mult_elt = NULL_TREE, elt;
611 unsigned i, j;
612 enum tree_code op_code;
613
614 offset_int best_mult = 0;
615 for (i = 0; i < addr->n; i++)
616 {
617 if (!wi::fits_shwi_p (addr->elts[i].coef))
618 continue;
619
620 coef = addr->elts[i].coef.to_shwi ();
621 if (coef == 1
622 || !multiplier_allowed_in_address_p (coef, TYPE_MODE (type), as))
623 continue;
624
625 acost = mult_by_coeff_cost (coef, address_mode, speed);
626
627 if (acost > best_mult_cost)
628 {
629 best_mult_cost = acost;
630 best_mult = offset_int::from (addr->elts[i].coef, SIGNED);
631 }
632 }
633
634 if (!best_mult_cost)
635 return;
636
637 /* Collect elements multiplied by best_mult. */
638 for (i = j = 0; i < addr->n; i++)
639 {
640 offset_int amult = offset_int::from (addr->elts[i].coef, SIGNED);
641 offset_int amult_neg = -wi::sext (amult, TYPE_PRECISION (addr->type));
642
643 if (amult == best_mult)
644 op_code = PLUS_EXPR;
645 else if (amult_neg == best_mult)
646 op_code = MINUS_EXPR;
647 else
648 {
649 addr->elts[j] = addr->elts[i];
650 j++;
651 continue;
652 }
653
654 elt = fold_convert (sizetype, addr->elts[i].val);
655 if (mult_elt)
656 mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
657 else if (op_code == PLUS_EXPR)
658 mult_elt = elt;
659 else
660 mult_elt = fold_build1 (NEGATE_EXPR, sizetype, elt);
661 }
662 addr->n = j;
663
664 parts->index = mult_elt;
665 parts->step = wide_int_to_tree (sizetype, best_mult);
666 }
667
668 /* Splits address ADDR for a memory access of type TYPE into PARTS.
669 If BASE_HINT is non-NULL, it specifies an SSA name to be used
670 preferentially as base of the reference, and IV_CAND is the selected
671 iv candidate used in ADDR.
672
673 TODO -- be more clever about the distribution of the elements of ADDR
674 to PARTS. Some architectures do not support anything but single
675 register in address, possibly with a small integer offset; while
676 create_mem_ref will simplify the address to an acceptable shape
677 later, it would be more efficient to know that asking for complicated
678 addressing modes is useless. */
679
680 static void
681 addr_to_parts (tree type, aff_tree *addr, tree iv_cand,
682 tree base_hint, struct mem_address *parts,
683 bool speed)
684 {
685 tree part;
686 unsigned i;
687
688 parts->symbol = NULL_TREE;
689 parts->base = NULL_TREE;
690 parts->index = NULL_TREE;
691 parts->step = NULL_TREE;
692
693 if (addr->offset != 0)
694 parts->offset = wide_int_to_tree (sizetype, addr->offset);
695 else
696 parts->offset = NULL_TREE;
697
698 /* Try to find a symbol. */
699 move_fixed_address_to_symbol (parts, addr);
700
701 /* No need to do address parts reassociation if the number of parts
702 is <= 2 -- in that case, no loop invariant code motion can be
703 exposed. */
704
705 if (!base_hint && (addr->n > 2))
706 move_variant_to_index (parts, addr, iv_cand);
707
708 /* First move the most expensive feasible multiplication
709 to index. */
710 if (!parts->index)
711 most_expensive_mult_to_index (type, parts, addr, speed);
712
713 /* Try to find a base of the reference. Since at the moment
714 there is no reliable way how to distinguish between pointer and its
715 offset, this is just a guess. */
716 if (!parts->symbol && base_hint)
717 move_hint_to_base (type, parts, base_hint, addr);
718 if (!parts->symbol && !parts->base)
719 move_pointer_to_base (parts, addr);
720
721 /* Then try to process the remaining elements. */
722 for (i = 0; i < addr->n; i++)
723 {
724 part = fold_convert (sizetype, addr->elts[i].val);
725 if (addr->elts[i].coef != 1)
726 part = fold_build2 (MULT_EXPR, sizetype, part,
727 wide_int_to_tree (sizetype, addr->elts[i].coef));
728 add_to_parts (parts, part);
729 }
730 if (addr->rest)
731 add_to_parts (parts, fold_convert (sizetype, addr->rest));
732 }
733
734 /* Force the PARTS to register. */
735
736 static void
737 gimplify_mem_ref_parts (gimple_stmt_iterator *gsi, struct mem_address *parts)
738 {
739 if (parts->base)
740 parts->base = force_gimple_operand_gsi_1 (gsi, parts->base,
741 is_gimple_mem_ref_addr, NULL_TREE,
742 true, GSI_SAME_STMT);
743 if (parts->index)
744 parts->index = force_gimple_operand_gsi (gsi, parts->index,
745 true, NULL_TREE,
746 true, GSI_SAME_STMT);
747 }
748
749 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
750 computations are emitted in front of GSI. TYPE is the mode
751 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
752 and BASE_HINT is non NULL if IV_CAND comes from a base address
753 object. */
754
755 tree
756 create_mem_ref (gimple_stmt_iterator *gsi, tree type, aff_tree *addr,
757 tree alias_ptr_type, tree iv_cand, tree base_hint, bool speed)
758 {
759 tree mem_ref, tmp;
760 struct mem_address parts;
761
762 addr_to_parts (type, addr, iv_cand, base_hint, &parts, speed);
763 gimplify_mem_ref_parts (gsi, &parts);
764 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
765 if (mem_ref)
766 return mem_ref;
767
768 /* The expression is too complicated. Try making it simpler. */
769
770 if (parts.step && !integer_onep (parts.step))
771 {
772 /* Move the multiplication to index. */
773 gcc_assert (parts.index);
774 parts.index = force_gimple_operand_gsi (gsi,
775 fold_build2 (MULT_EXPR, sizetype,
776 parts.index, parts.step),
777 true, NULL_TREE, true, GSI_SAME_STMT);
778 parts.step = NULL_TREE;
779
780 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
781 if (mem_ref)
782 return mem_ref;
783 }
784
785 if (parts.symbol)
786 {
787 tmp = parts.symbol;
788 gcc_assert (is_gimple_val (tmp));
789
790 /* Add the symbol to base, eventually forcing it to register. */
791 if (parts.base)
792 {
793 gcc_assert (useless_type_conversion_p
794 (sizetype, TREE_TYPE (parts.base)));
795
796 if (parts.index)
797 {
798 parts.base = force_gimple_operand_gsi_1 (gsi,
799 fold_build_pointer_plus (tmp, parts.base),
800 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
801 }
802 else
803 {
804 parts.index = parts.base;
805 parts.base = tmp;
806 }
807 }
808 else
809 parts.base = tmp;
810 parts.symbol = NULL_TREE;
811
812 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
813 if (mem_ref)
814 return mem_ref;
815 }
816
817 if (parts.index)
818 {
819 /* Add index to base. */
820 if (parts.base)
821 {
822 parts.base = force_gimple_operand_gsi_1 (gsi,
823 fold_build_pointer_plus (parts.base, parts.index),
824 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
825 }
826 else
827 parts.base = parts.index;
828 parts.index = NULL_TREE;
829
830 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
831 if (mem_ref)
832 return mem_ref;
833 }
834
835 if (parts.offset && !integer_zerop (parts.offset))
836 {
837 /* Try adding offset to base. */
838 if (parts.base)
839 {
840 parts.base = force_gimple_operand_gsi_1 (gsi,
841 fold_build_pointer_plus (parts.base, parts.offset),
842 is_gimple_mem_ref_addr, NULL_TREE, true, GSI_SAME_STMT);
843 }
844 else
845 parts.base = parts.offset;
846
847 parts.offset = NULL_TREE;
848
849 mem_ref = create_mem_ref_raw (type, alias_ptr_type, &parts, true);
850 if (mem_ref)
851 return mem_ref;
852 }
853
854 /* Verify that the address is in the simplest possible shape
855 (only a register). If we cannot create such a memory reference,
856 something is really wrong. */
857 gcc_assert (parts.symbol == NULL_TREE);
858 gcc_assert (parts.index == NULL_TREE);
859 gcc_assert (!parts.step || integer_onep (parts.step));
860 gcc_assert (!parts.offset || integer_zerop (parts.offset));
861 gcc_unreachable ();
862 }
863
864 /* Copies components of the address from OP to ADDR. */
865
866 void
867 get_address_description (tree op, struct mem_address *addr)
868 {
869 if (TREE_CODE (TMR_BASE (op)) == ADDR_EXPR)
870 {
871 addr->symbol = TMR_BASE (op);
872 addr->base = TMR_INDEX2 (op);
873 }
874 else
875 {
876 addr->symbol = NULL_TREE;
877 if (TMR_INDEX2 (op))
878 {
879 gcc_assert (integer_zerop (TMR_BASE (op)));
880 addr->base = TMR_INDEX2 (op);
881 }
882 else
883 addr->base = TMR_BASE (op);
884 }
885 addr->index = TMR_INDEX (op);
886 addr->step = TMR_STEP (op);
887 addr->offset = TMR_OFFSET (op);
888 }
889
890 /* Copies the reference information from OLD_REF to NEW_REF, where
891 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
892
893 void
894 copy_ref_info (tree new_ref, tree old_ref)
895 {
896 tree new_ptr_base = NULL_TREE;
897
898 gcc_assert (TREE_CODE (new_ref) == MEM_REF
899 || TREE_CODE (new_ref) == TARGET_MEM_REF);
900
901 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (old_ref);
902 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (old_ref);
903
904 new_ptr_base = TREE_OPERAND (new_ref, 0);
905
906 /* We can transfer points-to information from an old pointer
907 or decl base to the new one. */
908 if (new_ptr_base
909 && TREE_CODE (new_ptr_base) == SSA_NAME
910 && !SSA_NAME_PTR_INFO (new_ptr_base))
911 {
912 tree base = get_base_address (old_ref);
913 if (!base)
914 ;
915 else if ((TREE_CODE (base) == MEM_REF
916 || TREE_CODE (base) == TARGET_MEM_REF)
917 && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
918 && SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)))
919 {
920 struct ptr_info_def *new_pi;
921 unsigned int align, misalign;
922
923 duplicate_ssa_name_ptr_info
924 (new_ptr_base, SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0)));
925 new_pi = SSA_NAME_PTR_INFO (new_ptr_base);
926 /* We have to be careful about transferring alignment information. */
927 if (get_ptr_info_alignment (new_pi, &align, &misalign)
928 && TREE_CODE (old_ref) == MEM_REF
929 && !(TREE_CODE (new_ref) == TARGET_MEM_REF
930 && (TMR_INDEX2 (new_ref)
931 /* TODO: Below conditions can be relaxed if TMR_INDEX
932 is an indcution variable and its initial value and
933 step are aligned. */
934 || (TMR_INDEX (new_ref) && !TMR_STEP (new_ref))
935 || (TMR_STEP (new_ref)
936 && (TREE_INT_CST_LOW (TMR_STEP (new_ref))
937 < align)))))
938 {
939 unsigned int inc = (mem_ref_offset (old_ref).to_short_addr ()
940 - mem_ref_offset (new_ref).to_short_addr ());
941 adjust_ptr_info_misalignment (new_pi, inc);
942 }
943 else
944 mark_ptr_info_alignment_unknown (new_pi);
945 }
946 else if (VAR_P (base)
947 || TREE_CODE (base) == PARM_DECL
948 || TREE_CODE (base) == RESULT_DECL)
949 {
950 struct ptr_info_def *pi = get_ptr_info (new_ptr_base);
951 pt_solution_set_var (&pi->pt, base);
952 }
953 }
954 }
955
956 /* Move constants in target_mem_ref REF to offset. Returns the new target
957 mem ref if anything changes, NULL_TREE otherwise. */
958
959 tree
960 maybe_fold_tmr (tree ref)
961 {
962 struct mem_address addr;
963 bool changed = false;
964 tree new_ref, off;
965
966 get_address_description (ref, &addr);
967
968 if (addr.base
969 && TREE_CODE (addr.base) == INTEGER_CST
970 && !integer_zerop (addr.base))
971 {
972 addr.offset = fold_binary_to_constant (PLUS_EXPR,
973 TREE_TYPE (addr.offset),
974 addr.offset, addr.base);
975 addr.base = NULL_TREE;
976 changed = true;
977 }
978
979 if (addr.symbol
980 && TREE_CODE (TREE_OPERAND (addr.symbol, 0)) == MEM_REF)
981 {
982 addr.offset = fold_binary_to_constant
983 (PLUS_EXPR, TREE_TYPE (addr.offset),
984 addr.offset,
985 TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 1));
986 addr.symbol = TREE_OPERAND (TREE_OPERAND (addr.symbol, 0), 0);
987 changed = true;
988 }
989 else if (addr.symbol
990 && handled_component_p (TREE_OPERAND (addr.symbol, 0)))
991 {
992 HOST_WIDE_INT offset;
993 addr.symbol = build_fold_addr_expr
994 (get_addr_base_and_unit_offset
995 (TREE_OPERAND (addr.symbol, 0), &offset));
996 addr.offset = int_const_binop (PLUS_EXPR,
997 addr.offset, size_int (offset));
998 changed = true;
999 }
1000
1001 if (addr.index && TREE_CODE (addr.index) == INTEGER_CST)
1002 {
1003 off = addr.index;
1004 if (addr.step)
1005 {
1006 off = fold_binary_to_constant (MULT_EXPR, sizetype,
1007 off, addr.step);
1008 addr.step = NULL_TREE;
1009 }
1010
1011 addr.offset = fold_binary_to_constant (PLUS_EXPR,
1012 TREE_TYPE (addr.offset),
1013 addr.offset, off);
1014 addr.index = NULL_TREE;
1015 changed = true;
1016 }
1017
1018 if (!changed)
1019 return NULL_TREE;
1020
1021 /* If we have propagated something into this TARGET_MEM_REF and thus
1022 ended up folding it, always create a new TARGET_MEM_REF regardless
1023 if it is valid in this for on the target - the propagation result
1024 wouldn't be anyway. */
1025 new_ref = create_mem_ref_raw (TREE_TYPE (ref),
1026 TREE_TYPE (addr.offset), &addr, false);
1027 TREE_SIDE_EFFECTS (new_ref) = TREE_SIDE_EFFECTS (ref);
1028 TREE_THIS_VOLATILE (new_ref) = TREE_THIS_VOLATILE (ref);
1029 return new_ref;
1030 }
1031
1032 /* Dump PARTS to FILE. */
1033
1034 extern void dump_mem_address (FILE *, struct mem_address *);
1035 void
1036 dump_mem_address (FILE *file, struct mem_address *parts)
1037 {
1038 if (parts->symbol)
1039 {
1040 fprintf (file, "symbol: ");
1041 print_generic_expr (file, TREE_OPERAND (parts->symbol, 0), TDF_SLIM);
1042 fprintf (file, "\n");
1043 }
1044 if (parts->base)
1045 {
1046 fprintf (file, "base: ");
1047 print_generic_expr (file, parts->base, TDF_SLIM);
1048 fprintf (file, "\n");
1049 }
1050 if (parts->index)
1051 {
1052 fprintf (file, "index: ");
1053 print_generic_expr (file, parts->index, TDF_SLIM);
1054 fprintf (file, "\n");
1055 }
1056 if (parts->step)
1057 {
1058 fprintf (file, "step: ");
1059 print_generic_expr (file, parts->step, TDF_SLIM);
1060 fprintf (file, "\n");
1061 }
1062 if (parts->offset)
1063 {
1064 fprintf (file, "offset: ");
1065 print_generic_expr (file, parts->offset, TDF_SLIM);
1066 fprintf (file, "\n");
1067 }
1068 }
1069
1070 #include "gt-tree-ssa-address.h"