1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
25 #include "coretypes.h"
31 #include "fold-const.h"
32 #include "stor-layout.h"
35 #include "hard-reg-set.h"
37 #include "basic-block.h"
38 #include "tree-pretty-print.h"
39 #include "tree-ssa-alias.h"
40 #include "internal-fn.h"
41 #include "gimple-expr.h"
44 #include "gimple-iterator.h"
45 #include "gimplify-me.h"
46 #include "stringpool.h"
47 #include "tree-ssanames.h"
48 #include "tree-ssa-loop-ivopts.h"
51 #include "insn-config.h"
62 #include "tree-inline.h"
63 #include "tree-affine.h"
65 /* FIXME: We compute address costs using RTL. */
68 #include "tree-ssa-address.h"
70 /* TODO -- handling of symbols (according to Richard Hendersons
71 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
73 There are at least 5 different kinds of symbols that we can run up against:
75 (1) binds_local_p, small data area.
76 (2) binds_local_p, eg local statics
77 (3) !binds_local_p, eg global variables
78 (4) thread local, local_exec
79 (5) thread local, !local_exec
81 Now, (1) won't appear often in an array context, but it certainly can.
82 All you have to do is set -GN high enough, or explicitly mark any
83 random object __attribute__((section (".sdata"))).
85 All of these affect whether or not a symbol is in fact a valid address.
86 The only one tested here is (3). And that result may very well
87 be incorrect for (4) or (5).
89 An incorrect result here does not cause incorrect results out the
90 back end, because the expander in expr.c validizes the address. However
91 it would be nice to improve the handling here in order to produce more
94 /* A "template" for memory address, used to determine whether the address is
97 typedef struct GTY (()) mem_addr_template
{
98 rtx ref
; /* The template. */
99 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
101 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
106 /* The templates. Each of the low five bits of the index corresponds to one
107 component of TARGET_MEM_REF being present, while the high bits identify
108 the address space. See TEMPL_IDX. */
110 static GTY(()) vec
<mem_addr_template
, va_gc
> *mem_addr_template_list
;
112 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
114 | ((SYMBOL != 0) << 4) \
115 | ((BASE != 0) << 3) \
116 | ((INDEX != 0) << 2) \
117 | ((STEP != 0) << 1) \
120 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
121 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
122 to where step is placed to *STEP_P and offset to *OFFSET_P. */
125 gen_addr_rtx (machine_mode address_mode
,
126 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
127 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
142 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
145 *step_p
= &XEXP (act_elem
, 1);
151 if (base
&& base
!= const0_rtx
)
154 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
164 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
167 *offset_p
= &XEXP (act_elem
, 1);
169 if (GET_CODE (symbol
) == SYMBOL_REF
170 || GET_CODE (symbol
) == LABEL_REF
171 || GET_CODE (symbol
) == CONST
)
172 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
176 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
184 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
186 *offset_p
= &XEXP (*addr
, 1);
200 /* Description of a memory address. */
204 tree symbol
, base
, index
, step
, offset
;
207 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
209 If REALLY_EXPAND is false, just make fake registers instead
210 of really expanding the operands, and perform the expansion in-place
211 by using one of the "templates". */
214 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
217 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
218 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
219 rtx address
, sym
, bse
, idx
, st
, off
;
220 struct mem_addr_template
*templ
;
222 if (addr
->step
&& !integer_onep (addr
->step
))
223 st
= immed_wide_int_const (addr
->step
, pointer_mode
);
227 if (addr
->offset
&& !integer_zerop (addr
->offset
))
229 offset_int dc
= offset_int::from (addr
->offset
, SIGNED
);
230 off
= immed_wide_int_const (dc
, pointer_mode
);
237 unsigned int templ_index
238 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
240 if (templ_index
>= vec_safe_length (mem_addr_template_list
))
241 vec_safe_grow_cleared (mem_addr_template_list
, templ_index
+ 1);
243 /* Reuse the templates for addresses, so that we do not waste memory. */
244 templ
= &(*mem_addr_template_list
)[templ_index
];
247 sym
= (addr
->symbol
?
248 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
251 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
254 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
257 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
258 st
? const0_rtx
: NULL_RTX
,
259 off
? const0_rtx
: NULL_RTX
,
273 /* Otherwise really expand the expressions. */
275 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
278 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
281 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
284 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
285 if (pointer_mode
!= address_mode
)
286 address
= convert_memory_address (address_mode
, address
);
290 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
291 the mem_address structure. */
294 addr_for_mem_ref (tree exp
, addr_space_t as
, bool really_expand
)
296 struct mem_address addr
;
297 get_address_description (exp
, &addr
);
298 return addr_for_mem_ref (&addr
, as
, really_expand
);
301 /* Returns address of MEM_REF in TYPE. */
304 tree_mem_ref_addr (tree type
, tree mem_ref
)
308 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
309 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
311 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
313 act_elem
= TMR_INDEX (mem_ref
);
317 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
322 act_elem
= TMR_INDEX2 (mem_ref
);
326 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
332 if (offset
&& !integer_zerop (offset
))
335 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
336 fold_convert (TREE_TYPE (addr_off
), offset
));
342 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
349 /* Returns true if a memory reference in MODE and with parameters given by
350 ADDR is valid on the current target. */
353 valid_mem_ref_p (machine_mode mode
, addr_space_t as
,
354 struct mem_address
*addr
)
358 address
= addr_for_mem_ref (addr
, as
, false);
362 return memory_address_addr_space_p (mode
, address
, as
);
365 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
366 is valid on the current target and if so, creates and returns the
367 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
370 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
376 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
379 if (addr
->step
&& integer_onep (addr
->step
))
380 addr
->step
= NULL_TREE
;
383 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
385 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
393 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
400 base
= build_int_cst (ptr_type_node
, 0);
404 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
405 ??? As IVOPTs does not follow restrictions to where the base
406 pointer may point to create a MEM_REF only if we know that
408 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
409 && (!index2
|| integer_zerop (index2
))
410 && (!addr
->index
|| integer_zerop (addr
->index
)))
411 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
413 return build5 (TARGET_MEM_REF
, type
,
414 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
417 /* Returns true if OBJ is an object whose address is a link time constant. */
420 fixed_address_object_p (tree obj
)
422 return (TREE_CODE (obj
) == VAR_DECL
423 && (TREE_STATIC (obj
)
424 || DECL_EXTERNAL (obj
))
425 && ! DECL_DLLIMPORT_P (obj
));
428 /* If ADDR contains an address of object that is a link time constant,
429 move it to PARTS->symbol. */
432 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
435 tree val
= NULL_TREE
;
437 for (i
= 0; i
< addr
->n
; i
++)
439 if (addr
->elts
[i
].coef
!= 1)
442 val
= addr
->elts
[i
].val
;
443 if (TREE_CODE (val
) == ADDR_EXPR
444 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
452 aff_combination_remove_elt (addr
, i
);
455 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
458 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
462 tree val
= NULL_TREE
;
465 for (i
= 0; i
< addr
->n
; i
++)
467 if (addr
->elts
[i
].coef
!= 1)
470 val
= addr
->elts
[i
].val
;
471 if (operand_equal_p (val
, base_hint
, 0))
478 /* Cast value to appropriate pointer type. We cannot use a pointer
479 to TYPE directly, as the back-end will assume registers of pointer
480 type are aligned, and just the base itself may not actually be.
481 We use void pointer to the type's address space instead. */
482 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
483 type
= build_qualified_type (void_type_node
, qual
);
484 parts
->base
= fold_convert (build_pointer_type (type
), val
);
485 aff_combination_remove_elt (addr
, i
);
488 /* If ADDR contains an address of a dereferenced pointer, move it to
492 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
495 tree val
= NULL_TREE
;
497 for (i
= 0; i
< addr
->n
; i
++)
499 if (addr
->elts
[i
].coef
!= 1)
502 val
= addr
->elts
[i
].val
;
503 if (POINTER_TYPE_P (TREE_TYPE (val
)))
511 aff_combination_remove_elt (addr
, i
);
514 /* Moves the loop variant part V in linear address ADDR to be the index
518 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
521 tree val
= NULL_TREE
;
523 gcc_assert (!parts
->index
);
524 for (i
= 0; i
< addr
->n
; i
++)
526 val
= addr
->elts
[i
].val
;
527 if (operand_equal_p (val
, v
, 0))
534 parts
->index
= fold_convert (sizetype
, val
);
535 parts
->step
= wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
536 aff_combination_remove_elt (addr
, i
);
539 /* Adds ELT to PARTS. */
542 add_to_parts (struct mem_address
*parts
, tree elt
)
548 parts
->index
= fold_convert (sizetype
, elt
);
558 /* Add ELT to base. */
559 type
= TREE_TYPE (parts
->base
);
560 if (POINTER_TYPE_P (type
))
561 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
563 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
567 /* Finds the most expensive multiplication in ADDR that can be
568 expressed in an addressing mode and move the corresponding
569 element(s) to PARTS. */
572 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
573 aff_tree
*addr
, bool speed
)
575 addr_space_t as
= TYPE_ADDR_SPACE (type
);
576 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
578 unsigned best_mult_cost
= 0, acost
;
579 tree mult_elt
= NULL_TREE
, elt
;
581 enum tree_code op_code
;
583 offset_int best_mult
= 0;
584 for (i
= 0; i
< addr
->n
; i
++)
586 if (!wi::fits_shwi_p (addr
->elts
[i
].coef
))
589 coef
= addr
->elts
[i
].coef
.to_shwi ();
591 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
594 acost
= mult_by_coeff_cost (coef
, address_mode
, speed
);
596 if (acost
> best_mult_cost
)
598 best_mult_cost
= acost
;
599 best_mult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
606 /* Collect elements multiplied by best_mult. */
607 for (i
= j
= 0; i
< addr
->n
; i
++)
609 offset_int amult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
610 offset_int amult_neg
= -wi::sext (amult
, TYPE_PRECISION (addr
->type
));
612 if (amult
== best_mult
)
614 else if (amult_neg
== best_mult
)
615 op_code
= MINUS_EXPR
;
618 addr
->elts
[j
] = addr
->elts
[i
];
623 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
625 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
626 else if (op_code
== PLUS_EXPR
)
629 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
633 parts
->index
= mult_elt
;
634 parts
->step
= wide_int_to_tree (sizetype
, best_mult
);
637 /* Splits address ADDR for a memory access of type TYPE into PARTS.
638 If BASE_HINT is non-NULL, it specifies an SSA name to be used
639 preferentially as base of the reference, and IV_CAND is the selected
640 iv candidate used in ADDR.
642 TODO -- be more clever about the distribution of the elements of ADDR
643 to PARTS. Some architectures do not support anything but single
644 register in address, possibly with a small integer offset; while
645 create_mem_ref will simplify the address to an acceptable shape
646 later, it would be more efficient to know that asking for complicated
647 addressing modes is useless. */
650 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
651 tree base_hint
, struct mem_address
*parts
,
657 parts
->symbol
= NULL_TREE
;
658 parts
->base
= NULL_TREE
;
659 parts
->index
= NULL_TREE
;
660 parts
->step
= NULL_TREE
;
662 if (addr
->offset
!= 0)
663 parts
->offset
= wide_int_to_tree (sizetype
, addr
->offset
);
665 parts
->offset
= NULL_TREE
;
667 /* Try to find a symbol. */
668 move_fixed_address_to_symbol (parts
, addr
);
670 /* No need to do address parts reassociation if the number of parts
671 is <= 2 -- in that case, no loop invariant code motion can be
674 if (!base_hint
&& (addr
->n
> 2))
675 move_variant_to_index (parts
, addr
, iv_cand
);
677 /* First move the most expensive feasible multiplication
680 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
682 /* Try to find a base of the reference. Since at the moment
683 there is no reliable way how to distinguish between pointer and its
684 offset, this is just a guess. */
685 if (!parts
->symbol
&& base_hint
)
686 move_hint_to_base (type
, parts
, base_hint
, addr
);
687 if (!parts
->symbol
&& !parts
->base
)
688 move_pointer_to_base (parts
, addr
);
690 /* Then try to process the remaining elements. */
691 for (i
= 0; i
< addr
->n
; i
++)
693 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
694 if (addr
->elts
[i
].coef
!= 1)
695 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
696 wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
697 add_to_parts (parts
, part
);
700 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
703 /* Force the PARTS to register. */
706 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
709 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
710 is_gimple_mem_ref_addr
, NULL_TREE
,
711 true, GSI_SAME_STMT
);
713 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
715 true, GSI_SAME_STMT
);
718 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
719 computations are emitted in front of GSI. TYPE is the mode
720 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
721 and BASE_HINT is non NULL if IV_CAND comes from a base address
725 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
726 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
729 struct mem_address parts
;
731 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
732 gimplify_mem_ref_parts (gsi
, &parts
);
733 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
737 /* The expression is too complicated. Try making it simpler. */
739 if (parts
.step
&& !integer_onep (parts
.step
))
741 /* Move the multiplication to index. */
742 gcc_assert (parts
.index
);
743 parts
.index
= force_gimple_operand_gsi (gsi
,
744 fold_build2 (MULT_EXPR
, sizetype
,
745 parts
.index
, parts
.step
),
746 true, NULL_TREE
, true, GSI_SAME_STMT
);
747 parts
.step
= NULL_TREE
;
749 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
757 gcc_assert (is_gimple_val (tmp
));
759 /* Add the symbol to base, eventually forcing it to register. */
762 gcc_assert (useless_type_conversion_p
763 (sizetype
, TREE_TYPE (parts
.base
)));
767 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
768 fold_build_pointer_plus (tmp
, parts
.base
),
769 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
773 parts
.index
= parts
.base
;
779 parts
.symbol
= NULL_TREE
;
781 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
788 /* Add index to base. */
791 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
792 fold_build_pointer_plus (parts
.base
, parts
.index
),
793 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
796 parts
.base
= parts
.index
;
797 parts
.index
= NULL_TREE
;
799 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
804 if (parts
.offset
&& !integer_zerop (parts
.offset
))
806 /* Try adding offset to base. */
809 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
810 fold_build_pointer_plus (parts
.base
, parts
.offset
),
811 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
814 parts
.base
= parts
.offset
;
816 parts
.offset
= NULL_TREE
;
818 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
823 /* Verify that the address is in the simplest possible shape
824 (only a register). If we cannot create such a memory reference,
825 something is really wrong. */
826 gcc_assert (parts
.symbol
== NULL_TREE
);
827 gcc_assert (parts
.index
== NULL_TREE
);
828 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
829 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
833 /* Copies components of the address from OP to ADDR. */
836 get_address_description (tree op
, struct mem_address
*addr
)
838 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
840 addr
->symbol
= TMR_BASE (op
);
841 addr
->base
= TMR_INDEX2 (op
);
845 addr
->symbol
= NULL_TREE
;
848 gcc_assert (integer_zerop (TMR_BASE (op
)));
849 addr
->base
= TMR_INDEX2 (op
);
852 addr
->base
= TMR_BASE (op
);
854 addr
->index
= TMR_INDEX (op
);
855 addr
->step
= TMR_STEP (op
);
856 addr
->offset
= TMR_OFFSET (op
);
859 /* Copies the reference information from OLD_REF to NEW_REF, where
860 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
863 copy_ref_info (tree new_ref
, tree old_ref
)
865 tree new_ptr_base
= NULL_TREE
;
867 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
868 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
870 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
871 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
873 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
875 /* We can transfer points-to information from an old pointer
876 or decl base to the new one. */
878 && TREE_CODE (new_ptr_base
) == SSA_NAME
879 && !SSA_NAME_PTR_INFO (new_ptr_base
))
881 tree base
= get_base_address (old_ref
);
884 else if ((TREE_CODE (base
) == MEM_REF
885 || TREE_CODE (base
) == TARGET_MEM_REF
)
886 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
887 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
889 struct ptr_info_def
*new_pi
;
890 unsigned int align
, misalign
;
892 duplicate_ssa_name_ptr_info
893 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
894 new_pi
= SSA_NAME_PTR_INFO (new_ptr_base
);
895 /* We have to be careful about transferring alignment information. */
896 if (get_ptr_info_alignment (new_pi
, &align
, &misalign
)
897 && TREE_CODE (old_ref
) == MEM_REF
898 && !(TREE_CODE (new_ref
) == TARGET_MEM_REF
899 && (TMR_INDEX2 (new_ref
)
900 || (TMR_STEP (new_ref
)
901 && (TREE_INT_CST_LOW (TMR_STEP (new_ref
))
904 unsigned int inc
= (mem_ref_offset (old_ref
).to_short_addr ()
905 - mem_ref_offset (new_ref
).to_short_addr ());
906 adjust_ptr_info_misalignment (new_pi
, inc
);
909 mark_ptr_info_alignment_unknown (new_pi
);
911 else if (TREE_CODE (base
) == VAR_DECL
912 || TREE_CODE (base
) == PARM_DECL
913 || TREE_CODE (base
) == RESULT_DECL
)
915 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
916 pt_solution_set_var (&pi
->pt
, base
);
921 /* Move constants in target_mem_ref REF to offset. Returns the new target
922 mem ref if anything changes, NULL_TREE otherwise. */
925 maybe_fold_tmr (tree ref
)
927 struct mem_address addr
;
928 bool changed
= false;
931 get_address_description (ref
, &addr
);
934 && TREE_CODE (addr
.base
) == INTEGER_CST
935 && !integer_zerop (addr
.base
))
937 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
938 TREE_TYPE (addr
.offset
),
939 addr
.offset
, addr
.base
);
940 addr
.base
= NULL_TREE
;
945 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
947 addr
.offset
= fold_binary_to_constant
948 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
950 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
951 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
955 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
957 HOST_WIDE_INT offset
;
958 addr
.symbol
= build_fold_addr_expr
959 (get_addr_base_and_unit_offset
960 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
961 addr
.offset
= int_const_binop (PLUS_EXPR
,
962 addr
.offset
, size_int (offset
));
966 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
971 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
973 addr
.step
= NULL_TREE
;
976 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
977 TREE_TYPE (addr
.offset
),
979 addr
.index
= NULL_TREE
;
986 /* If we have propagated something into this TARGET_MEM_REF and thus
987 ended up folding it, always create a new TARGET_MEM_REF regardless
988 if it is valid in this for on the target - the propagation result
989 wouldn't be anyway. */
990 new_ref
= create_mem_ref_raw (TREE_TYPE (ref
),
991 TREE_TYPE (addr
.offset
), &addr
, false);
992 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (ref
);
993 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (ref
);
997 /* Dump PARTS to FILE. */
999 extern void dump_mem_address (FILE *, struct mem_address
*);
1001 dump_mem_address (FILE *file
, struct mem_address
*parts
)
1005 fprintf (file
, "symbol: ");
1006 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
1007 fprintf (file
, "\n");
1011 fprintf (file
, "base: ");
1012 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
1013 fprintf (file
, "\n");
1017 fprintf (file
, "index: ");
1018 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
1019 fprintf (file
, "\n");
1023 fprintf (file
, "step: ");
1024 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
1025 fprintf (file
, "\n");
1029 fprintf (file
, "offset: ");
1030 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
1031 fprintf (file
, "\n");
1035 #include "gt-tree-ssa-address.h"