1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004, 2006, 2007, 2008, 2009, 2010, 2011
3 Free Software Foundation, Inc.
5 This file is part of GCC.
7 GCC is free software; you can redistribute it and/or modify it
8 under the terms of the GNU General Public License as published by the
9 Free Software Foundation; either version 3, or (at your option) any
12 GCC is distributed in the hope that it will be useful, but WITHOUT
13 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
14 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
17 You should have received a copy of the GNU General Public License
18 along with GCC; see the file COPYING3. If not see
19 <http://www.gnu.org/licenses/>. */
21 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
22 that directly map to addressing modes of the target. */
26 #include "coretypes.h"
30 #include "basic-block.h"
31 #include "tree-pretty-print.h"
32 #include "tree-flow.h"
35 #include "tree-inline.h"
36 #include "tree-affine.h"
38 /* FIXME: We compute address costs using RTL. */
39 #include "insn-config.h"
46 /* TODO -- handling of symbols (according to Richard Hendersons
47 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
49 There are at least 5 different kinds of symbols that we can run up against:
51 (1) binds_local_p, small data area.
52 (2) binds_local_p, eg local statics
53 (3) !binds_local_p, eg global variables
54 (4) thread local, local_exec
55 (5) thread local, !local_exec
57 Now, (1) won't appear often in an array context, but it certainly can.
58 All you have to do is set -GN high enough, or explicitly mark any
59 random object __attribute__((section (".sdata"))).
61 All of these affect whether or not a symbol is in fact a valid address.
62 The only one tested here is (3). And that result may very well
63 be incorrect for (4) or (5).
65 An incorrect result here does not cause incorrect results out the
66 back end, because the expander in expr.c validizes the address. However
67 it would be nice to improve the handling here in order to produce more
70 /* A "template" for memory address, used to determine whether the address is
73 typedef struct GTY (()) mem_addr_template
{
74 rtx ref
; /* The template. */
75 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
77 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
81 DEF_VEC_O (mem_addr_template
);
82 DEF_VEC_ALLOC_O (mem_addr_template
, gc
);
84 /* The templates. Each of the low five bits of the index corresponds to one
85 component of TARGET_MEM_REF being present, while the high bits identify
86 the address space. See TEMPL_IDX. */
88 static GTY(()) VEC (mem_addr_template
, gc
) *mem_addr_template_list
;
90 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
92 | ((SYMBOL != 0) << 4) \
93 | ((BASE != 0) << 3) \
94 | ((INDEX != 0) << 2) \
95 | ((STEP != 0) << 1) \
98 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
99 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
100 to where step is placed to *STEP_P and offset to *OFFSET_P. */
103 gen_addr_rtx (enum machine_mode address_mode
,
104 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
105 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
120 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
123 *step_p
= &XEXP (act_elem
, 1);
129 if (base
&& base
!= const0_rtx
)
132 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
142 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
145 *offset_p
= &XEXP (act_elem
, 1);
147 if (GET_CODE (symbol
) == SYMBOL_REF
148 || GET_CODE (symbol
) == LABEL_REF
149 || GET_CODE (symbol
) == CONST
)
150 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
154 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
162 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
164 *offset_p
= &XEXP (*addr
, 1);
178 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
180 If REALLY_EXPAND is false, just make fake registers instead
181 of really expanding the operands, and perform the expansion in-place
182 by using one of the "templates". */
185 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
188 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
189 enum machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
190 rtx address
, sym
, bse
, idx
, st
, off
;
191 struct mem_addr_template
*templ
;
193 if (addr
->step
&& !integer_onep (addr
->step
))
194 st
= immed_double_int_const (tree_to_double_int (addr
->step
), pointer_mode
);
198 if (addr
->offset
&& !integer_zerop (addr
->offset
))
199 off
= immed_double_int_const
200 (double_int_sext (tree_to_double_int (addr
->offset
),
201 TYPE_PRECISION (TREE_TYPE (addr
->offset
))),
208 unsigned int templ_index
209 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
212 >= VEC_length (mem_addr_template
, mem_addr_template_list
))
213 VEC_safe_grow_cleared (mem_addr_template
, gc
, mem_addr_template_list
,
216 /* Reuse the templates for addresses, so that we do not waste memory. */
217 templ
= VEC_index (mem_addr_template
, mem_addr_template_list
, templ_index
);
220 sym
= (addr
->symbol
?
221 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
224 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
227 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
230 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
231 st
? const0_rtx
: NULL_RTX
,
232 off
? const0_rtx
: NULL_RTX
,
246 /* Otherwise really expand the expressions. */
248 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
251 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
254 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
257 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
258 if (pointer_mode
!= address_mode
)
259 address
= convert_memory_address (address_mode
, address
);
263 /* Returns address of MEM_REF in TYPE. */
266 tree_mem_ref_addr (tree type
, tree mem_ref
)
270 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
271 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
273 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
275 act_elem
= TMR_INDEX (mem_ref
);
279 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
284 act_elem
= TMR_INDEX2 (mem_ref
);
288 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
294 if (offset
&& !integer_zerop (offset
))
297 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
298 fold_convert (TREE_TYPE (addr_off
), offset
));
304 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
311 /* Returns true if a memory reference in MODE and with parameters given by
312 ADDR is valid on the current target. */
315 valid_mem_ref_p (enum machine_mode mode
, addr_space_t as
,
316 struct mem_address
*addr
)
320 address
= addr_for_mem_ref (addr
, as
, false);
324 return memory_address_addr_space_p (mode
, address
, as
);
327 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
328 is valid on the current target and if so, creates and returns the
329 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
332 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
338 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
341 if (addr
->step
&& integer_onep (addr
->step
))
342 addr
->step
= NULL_TREE
;
345 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
347 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
355 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
362 base
= build_int_cst (ptr_type_node
, 0);
366 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
367 ??? As IVOPTs does not follow restrictions to where the base
368 pointer may point to create a MEM_REF only if we know that
370 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
371 && (!index2
|| integer_zerop (index2
))
372 && (!addr
->index
|| integer_zerop (addr
->index
)))
373 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
375 return build5 (TARGET_MEM_REF
, type
,
376 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
379 /* Returns true if OBJ is an object whose address is a link time constant. */
382 fixed_address_object_p (tree obj
)
384 return (TREE_CODE (obj
) == VAR_DECL
385 && (TREE_STATIC (obj
)
386 || DECL_EXTERNAL (obj
))
387 && ! DECL_DLLIMPORT_P (obj
));
390 /* If ADDR contains an address of object that is a link time constant,
391 move it to PARTS->symbol. */
394 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
397 tree val
= NULL_TREE
;
399 for (i
= 0; i
< addr
->n
; i
++)
401 if (!double_int_one_p (addr
->elts
[i
].coef
))
404 val
= addr
->elts
[i
].val
;
405 if (TREE_CODE (val
) == ADDR_EXPR
406 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
414 aff_combination_remove_elt (addr
, i
);
417 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
420 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
424 tree val
= NULL_TREE
;
427 for (i
= 0; i
< addr
->n
; i
++)
429 if (!double_int_one_p (addr
->elts
[i
].coef
))
432 val
= addr
->elts
[i
].val
;
433 if (operand_equal_p (val
, base_hint
, 0))
440 /* Cast value to appropriate pointer type. We cannot use a pointer
441 to TYPE directly, as the back-end will assume registers of pointer
442 type are aligned, and just the base itself may not actually be.
443 We use void pointer to the type's address space instead. */
444 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
445 type
= build_qualified_type (void_type_node
, qual
);
446 parts
->base
= fold_convert (build_pointer_type (type
), val
);
447 aff_combination_remove_elt (addr
, i
);
450 /* If ADDR contains an address of a dereferenced pointer, move it to
454 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
457 tree val
= NULL_TREE
;
459 for (i
= 0; i
< addr
->n
; i
++)
461 if (!double_int_one_p (addr
->elts
[i
].coef
))
464 val
= addr
->elts
[i
].val
;
465 if (POINTER_TYPE_P (TREE_TYPE (val
)))
473 aff_combination_remove_elt (addr
, i
);
476 /* Moves the loop variant part V in linear address ADDR to be the index
480 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
483 tree val
= NULL_TREE
;
485 gcc_assert (!parts
->index
);
486 for (i
= 0; i
< addr
->n
; i
++)
488 val
= addr
->elts
[i
].val
;
489 if (operand_equal_p (val
, v
, 0))
496 parts
->index
= fold_convert (sizetype
, val
);
497 parts
->step
= double_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
498 aff_combination_remove_elt (addr
, i
);
501 /* Adds ELT to PARTS. */
504 add_to_parts (struct mem_address
*parts
, tree elt
)
510 parts
->index
= fold_convert (sizetype
, elt
);
520 /* Add ELT to base. */
521 type
= TREE_TYPE (parts
->base
);
522 if (POINTER_TYPE_P (type
))
523 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
525 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
529 /* Finds the most expensive multiplication in ADDR that can be
530 expressed in an addressing mode and move the corresponding
531 element(s) to PARTS. */
534 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
535 aff_tree
*addr
, bool speed
)
537 addr_space_t as
= TYPE_ADDR_SPACE (type
);
538 enum machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
540 double_int best_mult
, amult
, amult_neg
;
541 unsigned best_mult_cost
= 0, acost
;
542 tree mult_elt
= NULL_TREE
, elt
;
544 enum tree_code op_code
;
546 best_mult
= double_int_zero
;
547 for (i
= 0; i
< addr
->n
; i
++)
549 if (!double_int_fits_in_shwi_p (addr
->elts
[i
].coef
))
552 coef
= double_int_to_shwi (addr
->elts
[i
].coef
);
554 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
557 acost
= multiply_by_const_cost (coef
, address_mode
, speed
);
559 if (acost
> best_mult_cost
)
561 best_mult_cost
= acost
;
562 best_mult
= addr
->elts
[i
].coef
;
569 /* Collect elements multiplied by best_mult. */
570 for (i
= j
= 0; i
< addr
->n
; i
++)
572 amult
= addr
->elts
[i
].coef
;
573 amult_neg
= double_int_ext_for_comb (double_int_neg (amult
), addr
);
575 if (double_int_equal_p (amult
, best_mult
))
577 else if (double_int_equal_p (amult_neg
, best_mult
))
578 op_code
= MINUS_EXPR
;
581 addr
->elts
[j
] = addr
->elts
[i
];
586 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
588 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
589 else if (op_code
== PLUS_EXPR
)
592 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
596 parts
->index
= mult_elt
;
597 parts
->step
= double_int_to_tree (sizetype
, best_mult
);
600 /* Splits address ADDR for a memory access of type TYPE into PARTS.
601 If BASE_HINT is non-NULL, it specifies an SSA name to be used
602 preferentially as base of the reference, and IV_CAND is the selected
603 iv candidate used in ADDR.
605 TODO -- be more clever about the distribution of the elements of ADDR
606 to PARTS. Some architectures do not support anything but single
607 register in address, possibly with a small integer offset; while
608 create_mem_ref will simplify the address to an acceptable shape
609 later, it would be more efficient to know that asking for complicated
610 addressing modes is useless. */
613 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
614 tree base_hint
, struct mem_address
*parts
,
620 parts
->symbol
= NULL_TREE
;
621 parts
->base
= NULL_TREE
;
622 parts
->index
= NULL_TREE
;
623 parts
->step
= NULL_TREE
;
625 if (!double_int_zero_p (addr
->offset
))
626 parts
->offset
= double_int_to_tree (sizetype
, addr
->offset
);
628 parts
->offset
= NULL_TREE
;
630 /* Try to find a symbol. */
631 move_fixed_address_to_symbol (parts
, addr
);
633 /* No need to do address parts reassociation if the number of parts
634 is <= 2 -- in that case, no loop invariant code motion can be
637 if (!base_hint
&& (addr
->n
> 2))
638 move_variant_to_index (parts
, addr
, iv_cand
);
640 /* First move the most expensive feasible multiplication
643 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
645 /* Try to find a base of the reference. Since at the moment
646 there is no reliable way how to distinguish between pointer and its
647 offset, this is just a guess. */
648 if (!parts
->symbol
&& base_hint
)
649 move_hint_to_base (type
, parts
, base_hint
, addr
);
650 if (!parts
->symbol
&& !parts
->base
)
651 move_pointer_to_base (parts
, addr
);
653 /* Then try to process the remaining elements. */
654 for (i
= 0; i
< addr
->n
; i
++)
656 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
657 if (!double_int_one_p (addr
->elts
[i
].coef
))
658 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
659 double_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
660 add_to_parts (parts
, part
);
663 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
666 /* Force the PARTS to register. */
669 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
672 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
673 is_gimple_mem_ref_addr
, NULL_TREE
,
674 true, GSI_SAME_STMT
);
676 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
678 true, GSI_SAME_STMT
);
681 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
682 computations are emitted in front of GSI. TYPE is the mode
683 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
684 and BASE_HINT is non NULL if IV_CAND comes from a base address
688 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
689 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
692 struct mem_address parts
;
694 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
695 gimplify_mem_ref_parts (gsi
, &parts
);
696 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
700 /* The expression is too complicated. Try making it simpler. */
702 if (parts
.step
&& !integer_onep (parts
.step
))
704 /* Move the multiplication to index. */
705 gcc_assert (parts
.index
);
706 parts
.index
= force_gimple_operand_gsi (gsi
,
707 fold_build2 (MULT_EXPR
, sizetype
,
708 parts
.index
, parts
.step
),
709 true, NULL_TREE
, true, GSI_SAME_STMT
);
710 parts
.step
= NULL_TREE
;
712 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
720 gcc_assert (is_gimple_val (tmp
));
722 /* Add the symbol to base, eventually forcing it to register. */
725 gcc_assert (useless_type_conversion_p
726 (sizetype
, TREE_TYPE (parts
.base
)));
730 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
731 fold_build_pointer_plus (tmp
, parts
.base
),
732 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
736 parts
.index
= parts
.base
;
742 parts
.symbol
= NULL_TREE
;
744 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
751 /* Add index to base. */
754 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
755 fold_build_pointer_plus (parts
.base
, parts
.index
),
756 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
759 parts
.base
= parts
.index
;
760 parts
.index
= NULL_TREE
;
762 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
767 if (parts
.offset
&& !integer_zerop (parts
.offset
))
769 /* Try adding offset to base. */
772 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
773 fold_build_pointer_plus (parts
.base
, parts
.offset
),
774 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
777 parts
.base
= parts
.offset
;
779 parts
.offset
= NULL_TREE
;
781 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
786 /* Verify that the address is in the simplest possible shape
787 (only a register). If we cannot create such a memory reference,
788 something is really wrong. */
789 gcc_assert (parts
.symbol
== NULL_TREE
);
790 gcc_assert (parts
.index
== NULL_TREE
);
791 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
792 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
796 /* Copies components of the address from OP to ADDR. */
799 get_address_description (tree op
, struct mem_address
*addr
)
801 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
803 addr
->symbol
= TMR_BASE (op
);
804 addr
->base
= TMR_INDEX2 (op
);
808 addr
->symbol
= NULL_TREE
;
811 gcc_assert (integer_zerop (TMR_BASE (op
)));
812 addr
->base
= TMR_INDEX2 (op
);
815 addr
->base
= TMR_BASE (op
);
817 addr
->index
= TMR_INDEX (op
);
818 addr
->step
= TMR_STEP (op
);
819 addr
->offset
= TMR_OFFSET (op
);
822 /* Copies the additional information attached to target_mem_ref FROM to TO. */
825 copy_mem_ref_info (tree to
, tree from
)
827 /* And the info about the original reference. */
828 TREE_SIDE_EFFECTS (to
) = TREE_SIDE_EFFECTS (from
);
829 TREE_THIS_VOLATILE (to
) = TREE_THIS_VOLATILE (from
);
832 /* Copies the reference information from OLD_REF to NEW_REF, where
833 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
836 copy_ref_info (tree new_ref
, tree old_ref
)
838 tree new_ptr_base
= NULL_TREE
;
840 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
841 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
843 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
844 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
846 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
848 /* We can transfer points-to information from an old pointer
849 or decl base to the new one. */
851 && TREE_CODE (new_ptr_base
) == SSA_NAME
852 && !SSA_NAME_PTR_INFO (new_ptr_base
))
854 tree base
= get_base_address (old_ref
);
857 else if ((TREE_CODE (base
) == MEM_REF
858 || TREE_CODE (base
) == TARGET_MEM_REF
)
859 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
860 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
862 struct ptr_info_def
*new_pi
;
863 unsigned int align
, misalign
;
865 duplicate_ssa_name_ptr_info
866 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
867 new_pi
= SSA_NAME_PTR_INFO (new_ptr_base
);
868 /* We have to be careful about transferring alignment information. */
869 if (get_ptr_info_alignment (new_pi
, &align
, &misalign
)
870 && TREE_CODE (old_ref
) == MEM_REF
871 && !(TREE_CODE (new_ref
) == TARGET_MEM_REF
872 && (TMR_INDEX2 (new_ref
)
873 || (TMR_STEP (new_ref
)
874 && (TREE_INT_CST_LOW (TMR_STEP (new_ref
))
877 unsigned int inc
= double_int_sub (mem_ref_offset (old_ref
),
878 mem_ref_offset (new_ref
)).low
;
879 adjust_ptr_info_misalignment (new_pi
, inc
);
882 mark_ptr_info_alignment_unknown (new_pi
);
884 else if (TREE_CODE (base
) == VAR_DECL
885 || TREE_CODE (base
) == PARM_DECL
886 || TREE_CODE (base
) == RESULT_DECL
)
888 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
889 pt_solution_set_var (&pi
->pt
, base
);
894 /* Move constants in target_mem_ref REF to offset. Returns the new target
895 mem ref if anything changes, NULL_TREE otherwise. */
898 maybe_fold_tmr (tree ref
)
900 struct mem_address addr
;
901 bool changed
= false;
904 get_address_description (ref
, &addr
);
907 && TREE_CODE (addr
.base
) == INTEGER_CST
908 && !integer_zerop (addr
.base
))
910 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
911 TREE_TYPE (addr
.offset
),
912 addr
.offset
, addr
.base
);
913 addr
.base
= NULL_TREE
;
918 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
920 addr
.offset
= fold_binary_to_constant
921 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
923 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
924 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
928 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
930 HOST_WIDE_INT offset
;
931 addr
.symbol
= build_fold_addr_expr
932 (get_addr_base_and_unit_offset
933 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
934 addr
.offset
= int_const_binop (PLUS_EXPR
,
935 addr
.offset
, size_int (offset
));
939 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
944 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
946 addr
.step
= NULL_TREE
;
949 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
950 TREE_TYPE (addr
.offset
),
952 addr
.index
= NULL_TREE
;
959 /* If we have propagated something into this TARGET_MEM_REF and thus
960 ended up folding it, always create a new TARGET_MEM_REF regardless
961 if it is valid in this for on the target - the propagation result
962 wouldn't be anyway. */
963 ret
= create_mem_ref_raw (TREE_TYPE (ref
),
964 TREE_TYPE (addr
.offset
), &addr
, false);
965 copy_mem_ref_info (ret
, ref
);
969 /* Dump PARTS to FILE. */
971 extern void dump_mem_address (FILE *, struct mem_address
*);
973 dump_mem_address (FILE *file
, struct mem_address
*parts
)
977 fprintf (file
, "symbol: ");
978 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
979 fprintf (file
, "\n");
983 fprintf (file
, "base: ");
984 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
985 fprintf (file
, "\n");
989 fprintf (file
, "index: ");
990 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
991 fprintf (file
, "\n");
995 fprintf (file
, "step: ");
996 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
997 fprintf (file
, "\n");
1001 fprintf (file
, "offset: ");
1002 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
1003 fprintf (file
, "\n");
1007 #include "gt-tree-ssa-address.h"