1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
25 #include "coretypes.h"
30 #include "double-int.h"
37 #include "fold-const.h"
38 #include "stor-layout.h"
41 #include "hard-reg-set.h"
44 #include "basic-block.h"
45 #include "tree-pretty-print.h"
46 #include "tree-ssa-alias.h"
47 #include "internal-fn.h"
48 #include "gimple-expr.h"
51 #include "gimple-iterator.h"
52 #include "gimplify-me.h"
53 #include "stringpool.h"
54 #include "tree-ssanames.h"
55 #include "tree-ssa-loop-ivopts.h"
60 #include "tree-inline.h"
61 #include "tree-affine.h"
63 /* FIXME: We compute address costs using RTL. */
64 #include "insn-config.h"
70 #include "tree-ssa-address.h"
72 /* TODO -- handling of symbols (according to Richard Hendersons
73 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
75 There are at least 5 different kinds of symbols that we can run up against:
77 (1) binds_local_p, small data area.
78 (2) binds_local_p, eg local statics
79 (3) !binds_local_p, eg global variables
80 (4) thread local, local_exec
81 (5) thread local, !local_exec
83 Now, (1) won't appear often in an array context, but it certainly can.
84 All you have to do is set -GN high enough, or explicitly mark any
85 random object __attribute__((section (".sdata"))).
87 All of these affect whether or not a symbol is in fact a valid address.
88 The only one tested here is (3). And that result may very well
89 be incorrect for (4) or (5).
91 An incorrect result here does not cause incorrect results out the
92 back end, because the expander in expr.c validizes the address. However
93 it would be nice to improve the handling here in order to produce more
96 /* A "template" for memory address, used to determine whether the address is
99 typedef struct GTY (()) mem_addr_template
{
100 rtx ref
; /* The template. */
101 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
103 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
108 /* The templates. Each of the low five bits of the index corresponds to one
109 component of TARGET_MEM_REF being present, while the high bits identify
110 the address space. See TEMPL_IDX. */
112 static GTY(()) vec
<mem_addr_template
, va_gc
> *mem_addr_template_list
;
114 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
116 | ((SYMBOL != 0) << 4) \
117 | ((BASE != 0) << 3) \
118 | ((INDEX != 0) << 2) \
119 | ((STEP != 0) << 1) \
122 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
123 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
124 to where step is placed to *STEP_P and offset to *OFFSET_P. */
127 gen_addr_rtx (machine_mode address_mode
,
128 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
129 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
144 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
147 *step_p
= &XEXP (act_elem
, 1);
153 if (base
&& base
!= const0_rtx
)
156 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
166 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
169 *offset_p
= &XEXP (act_elem
, 1);
171 if (GET_CODE (symbol
) == SYMBOL_REF
172 || GET_CODE (symbol
) == LABEL_REF
173 || GET_CODE (symbol
) == CONST
)
174 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
178 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
186 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
188 *offset_p
= &XEXP (*addr
, 1);
202 /* Description of a memory address. */
206 tree symbol
, base
, index
, step
, offset
;
209 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
211 If REALLY_EXPAND is false, just make fake registers instead
212 of really expanding the operands, and perform the expansion in-place
213 by using one of the "templates". */
216 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
219 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
220 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
221 rtx address
, sym
, bse
, idx
, st
, off
;
222 struct mem_addr_template
*templ
;
224 if (addr
->step
&& !integer_onep (addr
->step
))
225 st
= immed_wide_int_const (addr
->step
, pointer_mode
);
229 if (addr
->offset
&& !integer_zerop (addr
->offset
))
231 offset_int dc
= offset_int::from (addr
->offset
, SIGNED
);
232 off
= immed_wide_int_const (dc
, pointer_mode
);
239 unsigned int templ_index
240 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
242 if (templ_index
>= vec_safe_length (mem_addr_template_list
))
243 vec_safe_grow_cleared (mem_addr_template_list
, templ_index
+ 1);
245 /* Reuse the templates for addresses, so that we do not waste memory. */
246 templ
= &(*mem_addr_template_list
)[templ_index
];
249 sym
= (addr
->symbol
?
250 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
253 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
256 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
259 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
260 st
? const0_rtx
: NULL_RTX
,
261 off
? const0_rtx
: NULL_RTX
,
275 /* Otherwise really expand the expressions. */
277 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
280 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
283 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
286 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
287 if (pointer_mode
!= address_mode
)
288 address
= convert_memory_address (address_mode
, address
);
292 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
293 the mem_address structure. */
296 addr_for_mem_ref (tree exp
, addr_space_t as
, bool really_expand
)
298 struct mem_address addr
;
299 get_address_description (exp
, &addr
);
300 return addr_for_mem_ref (&addr
, as
, really_expand
);
303 /* Returns address of MEM_REF in TYPE. */
306 tree_mem_ref_addr (tree type
, tree mem_ref
)
310 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
311 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
313 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
315 act_elem
= TMR_INDEX (mem_ref
);
319 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
324 act_elem
= TMR_INDEX2 (mem_ref
);
328 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
334 if (offset
&& !integer_zerop (offset
))
337 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
338 fold_convert (TREE_TYPE (addr_off
), offset
));
344 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
351 /* Returns true if a memory reference in MODE and with parameters given by
352 ADDR is valid on the current target. */
355 valid_mem_ref_p (machine_mode mode
, addr_space_t as
,
356 struct mem_address
*addr
)
360 address
= addr_for_mem_ref (addr
, as
, false);
364 return memory_address_addr_space_p (mode
, address
, as
);
367 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
368 is valid on the current target and if so, creates and returns the
369 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
372 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
378 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
381 if (addr
->step
&& integer_onep (addr
->step
))
382 addr
->step
= NULL_TREE
;
385 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
387 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
395 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
402 base
= build_int_cst (ptr_type_node
, 0);
406 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
407 ??? As IVOPTs does not follow restrictions to where the base
408 pointer may point to create a MEM_REF only if we know that
410 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
411 && (!index2
|| integer_zerop (index2
))
412 && (!addr
->index
|| integer_zerop (addr
->index
)))
413 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
415 return build5 (TARGET_MEM_REF
, type
,
416 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
419 /* Returns true if OBJ is an object whose address is a link time constant. */
422 fixed_address_object_p (tree obj
)
424 return (TREE_CODE (obj
) == VAR_DECL
425 && (TREE_STATIC (obj
)
426 || DECL_EXTERNAL (obj
))
427 && ! DECL_DLLIMPORT_P (obj
));
430 /* If ADDR contains an address of object that is a link time constant,
431 move it to PARTS->symbol. */
434 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
437 tree val
= NULL_TREE
;
439 for (i
= 0; i
< addr
->n
; i
++)
441 if (addr
->elts
[i
].coef
!= 1)
444 val
= addr
->elts
[i
].val
;
445 if (TREE_CODE (val
) == ADDR_EXPR
446 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
454 aff_combination_remove_elt (addr
, i
);
457 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
460 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
464 tree val
= NULL_TREE
;
467 for (i
= 0; i
< addr
->n
; i
++)
469 if (addr
->elts
[i
].coef
!= 1)
472 val
= addr
->elts
[i
].val
;
473 if (operand_equal_p (val
, base_hint
, 0))
480 /* Cast value to appropriate pointer type. We cannot use a pointer
481 to TYPE directly, as the back-end will assume registers of pointer
482 type are aligned, and just the base itself may not actually be.
483 We use void pointer to the type's address space instead. */
484 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
485 type
= build_qualified_type (void_type_node
, qual
);
486 parts
->base
= fold_convert (build_pointer_type (type
), val
);
487 aff_combination_remove_elt (addr
, i
);
490 /* If ADDR contains an address of a dereferenced pointer, move it to
494 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
497 tree val
= NULL_TREE
;
499 for (i
= 0; i
< addr
->n
; i
++)
501 if (addr
->elts
[i
].coef
!= 1)
504 val
= addr
->elts
[i
].val
;
505 if (POINTER_TYPE_P (TREE_TYPE (val
)))
513 aff_combination_remove_elt (addr
, i
);
516 /* Moves the loop variant part V in linear address ADDR to be the index
520 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
523 tree val
= NULL_TREE
;
525 gcc_assert (!parts
->index
);
526 for (i
= 0; i
< addr
->n
; i
++)
528 val
= addr
->elts
[i
].val
;
529 if (operand_equal_p (val
, v
, 0))
536 parts
->index
= fold_convert (sizetype
, val
);
537 parts
->step
= wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
538 aff_combination_remove_elt (addr
, i
);
541 /* Adds ELT to PARTS. */
544 add_to_parts (struct mem_address
*parts
, tree elt
)
550 parts
->index
= fold_convert (sizetype
, elt
);
560 /* Add ELT to base. */
561 type
= TREE_TYPE (parts
->base
);
562 if (POINTER_TYPE_P (type
))
563 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
565 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
569 /* Finds the most expensive multiplication in ADDR that can be
570 expressed in an addressing mode and move the corresponding
571 element(s) to PARTS. */
574 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
575 aff_tree
*addr
, bool speed
)
577 addr_space_t as
= TYPE_ADDR_SPACE (type
);
578 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
580 unsigned best_mult_cost
= 0, acost
;
581 tree mult_elt
= NULL_TREE
, elt
;
583 enum tree_code op_code
;
585 offset_int best_mult
= 0;
586 for (i
= 0; i
< addr
->n
; i
++)
588 if (!wi::fits_shwi_p (addr
->elts
[i
].coef
))
591 coef
= addr
->elts
[i
].coef
.to_shwi ();
593 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
596 acost
= mult_by_coeff_cost (coef
, address_mode
, speed
);
598 if (acost
> best_mult_cost
)
600 best_mult_cost
= acost
;
601 best_mult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
608 /* Collect elements multiplied by best_mult. */
609 for (i
= j
= 0; i
< addr
->n
; i
++)
611 offset_int amult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
612 offset_int amult_neg
= -wi::sext (amult
, TYPE_PRECISION (addr
->type
));
614 if (amult
== best_mult
)
616 else if (amult_neg
== best_mult
)
617 op_code
= MINUS_EXPR
;
620 addr
->elts
[j
] = addr
->elts
[i
];
625 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
627 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
628 else if (op_code
== PLUS_EXPR
)
631 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
635 parts
->index
= mult_elt
;
636 parts
->step
= wide_int_to_tree (sizetype
, best_mult
);
639 /* Splits address ADDR for a memory access of type TYPE into PARTS.
640 If BASE_HINT is non-NULL, it specifies an SSA name to be used
641 preferentially as base of the reference, and IV_CAND is the selected
642 iv candidate used in ADDR.
644 TODO -- be more clever about the distribution of the elements of ADDR
645 to PARTS. Some architectures do not support anything but single
646 register in address, possibly with a small integer offset; while
647 create_mem_ref will simplify the address to an acceptable shape
648 later, it would be more efficient to know that asking for complicated
649 addressing modes is useless. */
652 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
653 tree base_hint
, struct mem_address
*parts
,
659 parts
->symbol
= NULL_TREE
;
660 parts
->base
= NULL_TREE
;
661 parts
->index
= NULL_TREE
;
662 parts
->step
= NULL_TREE
;
664 if (addr
->offset
!= 0)
665 parts
->offset
= wide_int_to_tree (sizetype
, addr
->offset
);
667 parts
->offset
= NULL_TREE
;
669 /* Try to find a symbol. */
670 move_fixed_address_to_symbol (parts
, addr
);
672 /* No need to do address parts reassociation if the number of parts
673 is <= 2 -- in that case, no loop invariant code motion can be
676 if (!base_hint
&& (addr
->n
> 2))
677 move_variant_to_index (parts
, addr
, iv_cand
);
679 /* First move the most expensive feasible multiplication
682 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
684 /* Try to find a base of the reference. Since at the moment
685 there is no reliable way how to distinguish between pointer and its
686 offset, this is just a guess. */
687 if (!parts
->symbol
&& base_hint
)
688 move_hint_to_base (type
, parts
, base_hint
, addr
);
689 if (!parts
->symbol
&& !parts
->base
)
690 move_pointer_to_base (parts
, addr
);
692 /* Then try to process the remaining elements. */
693 for (i
= 0; i
< addr
->n
; i
++)
695 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
696 if (addr
->elts
[i
].coef
!= 1)
697 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
698 wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
699 add_to_parts (parts
, part
);
702 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
705 /* Force the PARTS to register. */
708 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
711 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
712 is_gimple_mem_ref_addr
, NULL_TREE
,
713 true, GSI_SAME_STMT
);
715 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
717 true, GSI_SAME_STMT
);
720 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
721 computations are emitted in front of GSI. TYPE is the mode
722 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
723 and BASE_HINT is non NULL if IV_CAND comes from a base address
727 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
728 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
731 struct mem_address parts
;
733 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
734 gimplify_mem_ref_parts (gsi
, &parts
);
735 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
739 /* The expression is too complicated. Try making it simpler. */
741 if (parts
.step
&& !integer_onep (parts
.step
))
743 /* Move the multiplication to index. */
744 gcc_assert (parts
.index
);
745 parts
.index
= force_gimple_operand_gsi (gsi
,
746 fold_build2 (MULT_EXPR
, sizetype
,
747 parts
.index
, parts
.step
),
748 true, NULL_TREE
, true, GSI_SAME_STMT
);
749 parts
.step
= NULL_TREE
;
751 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
759 gcc_assert (is_gimple_val (tmp
));
761 /* Add the symbol to base, eventually forcing it to register. */
764 gcc_assert (useless_type_conversion_p
765 (sizetype
, TREE_TYPE (parts
.base
)));
769 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
770 fold_build_pointer_plus (tmp
, parts
.base
),
771 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
775 parts
.index
= parts
.base
;
781 parts
.symbol
= NULL_TREE
;
783 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
790 /* Add index to base. */
793 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
794 fold_build_pointer_plus (parts
.base
, parts
.index
),
795 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
798 parts
.base
= parts
.index
;
799 parts
.index
= NULL_TREE
;
801 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
806 if (parts
.offset
&& !integer_zerop (parts
.offset
))
808 /* Try adding offset to base. */
811 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
812 fold_build_pointer_plus (parts
.base
, parts
.offset
),
813 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
816 parts
.base
= parts
.offset
;
818 parts
.offset
= NULL_TREE
;
820 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
825 /* Verify that the address is in the simplest possible shape
826 (only a register). If we cannot create such a memory reference,
827 something is really wrong. */
828 gcc_assert (parts
.symbol
== NULL_TREE
);
829 gcc_assert (parts
.index
== NULL_TREE
);
830 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
831 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
835 /* Copies components of the address from OP to ADDR. */
838 get_address_description (tree op
, struct mem_address
*addr
)
840 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
842 addr
->symbol
= TMR_BASE (op
);
843 addr
->base
= TMR_INDEX2 (op
);
847 addr
->symbol
= NULL_TREE
;
850 gcc_assert (integer_zerop (TMR_BASE (op
)));
851 addr
->base
= TMR_INDEX2 (op
);
854 addr
->base
= TMR_BASE (op
);
856 addr
->index
= TMR_INDEX (op
);
857 addr
->step
= TMR_STEP (op
);
858 addr
->offset
= TMR_OFFSET (op
);
861 /* Copies the reference information from OLD_REF to NEW_REF, where
862 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
865 copy_ref_info (tree new_ref
, tree old_ref
)
867 tree new_ptr_base
= NULL_TREE
;
869 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
870 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
872 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
873 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
875 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
877 /* We can transfer points-to information from an old pointer
878 or decl base to the new one. */
880 && TREE_CODE (new_ptr_base
) == SSA_NAME
881 && !SSA_NAME_PTR_INFO (new_ptr_base
))
883 tree base
= get_base_address (old_ref
);
886 else if ((TREE_CODE (base
) == MEM_REF
887 || TREE_CODE (base
) == TARGET_MEM_REF
)
888 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
889 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
891 struct ptr_info_def
*new_pi
;
892 unsigned int align
, misalign
;
894 duplicate_ssa_name_ptr_info
895 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
896 new_pi
= SSA_NAME_PTR_INFO (new_ptr_base
);
897 /* We have to be careful about transferring alignment information. */
898 if (get_ptr_info_alignment (new_pi
, &align
, &misalign
)
899 && TREE_CODE (old_ref
) == MEM_REF
900 && !(TREE_CODE (new_ref
) == TARGET_MEM_REF
901 && (TMR_INDEX2 (new_ref
)
902 || (TMR_STEP (new_ref
)
903 && (TREE_INT_CST_LOW (TMR_STEP (new_ref
))
906 unsigned int inc
= (mem_ref_offset (old_ref
).to_short_addr ()
907 - mem_ref_offset (new_ref
).to_short_addr ());
908 adjust_ptr_info_misalignment (new_pi
, inc
);
911 mark_ptr_info_alignment_unknown (new_pi
);
913 else if (TREE_CODE (base
) == VAR_DECL
914 || TREE_CODE (base
) == PARM_DECL
915 || TREE_CODE (base
) == RESULT_DECL
)
917 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
918 pt_solution_set_var (&pi
->pt
, base
);
923 /* Move constants in target_mem_ref REF to offset. Returns the new target
924 mem ref if anything changes, NULL_TREE otherwise. */
927 maybe_fold_tmr (tree ref
)
929 struct mem_address addr
;
930 bool changed
= false;
933 get_address_description (ref
, &addr
);
936 && TREE_CODE (addr
.base
) == INTEGER_CST
937 && !integer_zerop (addr
.base
))
939 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
940 TREE_TYPE (addr
.offset
),
941 addr
.offset
, addr
.base
);
942 addr
.base
= NULL_TREE
;
947 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
949 addr
.offset
= fold_binary_to_constant
950 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
952 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
953 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
957 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
959 HOST_WIDE_INT offset
;
960 addr
.symbol
= build_fold_addr_expr
961 (get_addr_base_and_unit_offset
962 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
963 addr
.offset
= int_const_binop (PLUS_EXPR
,
964 addr
.offset
, size_int (offset
));
968 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
973 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
975 addr
.step
= NULL_TREE
;
978 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
979 TREE_TYPE (addr
.offset
),
981 addr
.index
= NULL_TREE
;
988 /* If we have propagated something into this TARGET_MEM_REF and thus
989 ended up folding it, always create a new TARGET_MEM_REF regardless
990 if it is valid in this for on the target - the propagation result
991 wouldn't be anyway. */
992 new_ref
= create_mem_ref_raw (TREE_TYPE (ref
),
993 TREE_TYPE (addr
.offset
), &addr
, false);
994 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (ref
);
995 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (ref
);
999 /* Dump PARTS to FILE. */
1001 extern void dump_mem_address (FILE *, struct mem_address
*);
1003 dump_mem_address (FILE *file
, struct mem_address
*parts
)
1007 fprintf (file
, "symbol: ");
1008 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
1009 fprintf (file
, "\n");
1013 fprintf (file
, "base: ");
1014 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
1015 fprintf (file
, "\n");
1019 fprintf (file
, "index: ");
1020 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
1021 fprintf (file
, "\n");
1025 fprintf (file
, "step: ");
1026 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
1027 fprintf (file
, "\n");
1031 fprintf (file
, "offset: ");
1032 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
1033 fprintf (file
, "\n");
1037 #include "gt-tree-ssa-address.h"