1 /* Memory address lowering and addressing mode selection.
2 Copyright (C) 2004-2015 Free Software Foundation, Inc.
4 This file is part of GCC.
6 GCC is free software; you can redistribute it and/or modify it
7 under the terms of the GNU General Public License as published by the
8 Free Software Foundation; either version 3, or (at your option) any
11 GCC is distributed in the hope that it will be useful, but WITHOUT
12 ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
16 You should have received a copy of the GNU General Public License
17 along with GCC; see the file COPYING3. If not see
18 <http://www.gnu.org/licenses/>. */
20 /* Utility functions for manipulation with TARGET_MEM_REFs -- tree expressions
21 that directly map to addressing modes of the target. */
25 #include "coretypes.h"
34 #include "fold-const.h"
35 #include "stor-layout.h"
38 #include "hard-reg-set.h"
40 #include "basic-block.h"
41 #include "tree-pretty-print.h"
42 #include "tree-ssa-alias.h"
43 #include "internal-fn.h"
44 #include "gimple-expr.h"
47 #include "gimple-iterator.h"
48 #include "gimplify-me.h"
49 #include "stringpool.h"
50 #include "tree-ssanames.h"
51 #include "tree-ssa-loop-ivopts.h"
55 #include "statistics.h"
56 #include "insn-config.h"
67 #include "tree-inline.h"
68 #include "tree-affine.h"
70 /* FIXME: We compute address costs using RTL. */
73 #include "tree-ssa-address.h"
75 /* TODO -- handling of symbols (according to Richard Hendersons
76 comments, http://gcc.gnu.org/ml/gcc-patches/2005-04/msg00949.html):
78 There are at least 5 different kinds of symbols that we can run up against:
80 (1) binds_local_p, small data area.
81 (2) binds_local_p, eg local statics
82 (3) !binds_local_p, eg global variables
83 (4) thread local, local_exec
84 (5) thread local, !local_exec
86 Now, (1) won't appear often in an array context, but it certainly can.
87 All you have to do is set -GN high enough, or explicitly mark any
88 random object __attribute__((section (".sdata"))).
90 All of these affect whether or not a symbol is in fact a valid address.
91 The only one tested here is (3). And that result may very well
92 be incorrect for (4) or (5).
94 An incorrect result here does not cause incorrect results out the
95 back end, because the expander in expr.c validizes the address. However
96 it would be nice to improve the handling here in order to produce more
99 /* A "template" for memory address, used to determine whether the address is
102 typedef struct GTY (()) mem_addr_template
{
103 rtx ref
; /* The template. */
104 rtx
* GTY ((skip
)) step_p
; /* The point in template where the step should be
106 rtx
* GTY ((skip
)) off_p
; /* The point in template where the offset should
111 /* The templates. Each of the low five bits of the index corresponds to one
112 component of TARGET_MEM_REF being present, while the high bits identify
113 the address space. See TEMPL_IDX. */
115 static GTY(()) vec
<mem_addr_template
, va_gc
> *mem_addr_template_list
;
117 #define TEMPL_IDX(AS, SYMBOL, BASE, INDEX, STEP, OFFSET) \
119 | ((SYMBOL != 0) << 4) \
120 | ((BASE != 0) << 3) \
121 | ((INDEX != 0) << 2) \
122 | ((STEP != 0) << 1) \
125 /* Stores address for memory reference with parameters SYMBOL, BASE, INDEX,
126 STEP and OFFSET to *ADDR using address mode ADDRESS_MODE. Stores pointers
127 to where step is placed to *STEP_P and offset to *OFFSET_P. */
130 gen_addr_rtx (machine_mode address_mode
,
131 rtx symbol
, rtx base
, rtx index
, rtx step
, rtx offset
,
132 rtx
*addr
, rtx
**step_p
, rtx
**offset_p
)
147 act_elem
= gen_rtx_MULT (address_mode
, act_elem
, step
);
150 *step_p
= &XEXP (act_elem
, 1);
156 if (base
&& base
!= const0_rtx
)
159 *addr
= simplify_gen_binary (PLUS
, address_mode
, base
, *addr
);
169 act_elem
= gen_rtx_PLUS (address_mode
, act_elem
, offset
);
172 *offset_p
= &XEXP (act_elem
, 1);
174 if (GET_CODE (symbol
) == SYMBOL_REF
175 || GET_CODE (symbol
) == LABEL_REF
176 || GET_CODE (symbol
) == CONST
)
177 act_elem
= gen_rtx_CONST (address_mode
, act_elem
);
181 *addr
= gen_rtx_PLUS (address_mode
, *addr
, act_elem
);
189 *addr
= gen_rtx_PLUS (address_mode
, *addr
, offset
);
191 *offset_p
= &XEXP (*addr
, 1);
205 /* Description of a memory address. */
209 tree symbol
, base
, index
, step
, offset
;
212 /* Returns address for TARGET_MEM_REF with parameters given by ADDR
214 If REALLY_EXPAND is false, just make fake registers instead
215 of really expanding the operands, and perform the expansion in-place
216 by using one of the "templates". */
219 addr_for_mem_ref (struct mem_address
*addr
, addr_space_t as
,
222 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
223 machine_mode pointer_mode
= targetm
.addr_space
.pointer_mode (as
);
224 rtx address
, sym
, bse
, idx
, st
, off
;
225 struct mem_addr_template
*templ
;
227 if (addr
->step
&& !integer_onep (addr
->step
))
228 st
= immed_wide_int_const (addr
->step
, pointer_mode
);
232 if (addr
->offset
&& !integer_zerop (addr
->offset
))
234 offset_int dc
= offset_int::from (addr
->offset
, SIGNED
);
235 off
= immed_wide_int_const (dc
, pointer_mode
);
242 unsigned int templ_index
243 = TEMPL_IDX (as
, addr
->symbol
, addr
->base
, addr
->index
, st
, off
);
245 if (templ_index
>= vec_safe_length (mem_addr_template_list
))
246 vec_safe_grow_cleared (mem_addr_template_list
, templ_index
+ 1);
248 /* Reuse the templates for addresses, so that we do not waste memory. */
249 templ
= &(*mem_addr_template_list
)[templ_index
];
252 sym
= (addr
->symbol
?
253 gen_rtx_SYMBOL_REF (pointer_mode
, ggc_strdup ("test_symbol"))
256 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 1)
259 gen_raw_REG (pointer_mode
, LAST_VIRTUAL_REGISTER
+ 2)
262 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
,
263 st
? const0_rtx
: NULL_RTX
,
264 off
? const0_rtx
: NULL_RTX
,
278 /* Otherwise really expand the expressions. */
280 ? expand_expr (addr
->symbol
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
283 ? expand_expr (addr
->base
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
286 ? expand_expr (addr
->index
, NULL_RTX
, pointer_mode
, EXPAND_NORMAL
)
289 gen_addr_rtx (pointer_mode
, sym
, bse
, idx
, st
, off
, &address
, NULL
, NULL
);
290 if (pointer_mode
!= address_mode
)
291 address
= convert_memory_address (address_mode
, address
);
295 /* implement addr_for_mem_ref() directly from a tree, which avoids exporting
296 the mem_address structure. */
299 addr_for_mem_ref (tree exp
, addr_space_t as
, bool really_expand
)
301 struct mem_address addr
;
302 get_address_description (exp
, &addr
);
303 return addr_for_mem_ref (&addr
, as
, really_expand
);
306 /* Returns address of MEM_REF in TYPE. */
309 tree_mem_ref_addr (tree type
, tree mem_ref
)
313 tree step
= TMR_STEP (mem_ref
), offset
= TMR_OFFSET (mem_ref
);
314 tree addr_base
= NULL_TREE
, addr_off
= NULL_TREE
;
316 addr_base
= fold_convert (type
, TMR_BASE (mem_ref
));
318 act_elem
= TMR_INDEX (mem_ref
);
322 act_elem
= fold_build2 (MULT_EXPR
, TREE_TYPE (act_elem
),
327 act_elem
= TMR_INDEX2 (mem_ref
);
331 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
),
337 if (offset
&& !integer_zerop (offset
))
340 addr_off
= fold_build2 (PLUS_EXPR
, TREE_TYPE (addr_off
), addr_off
,
341 fold_convert (TREE_TYPE (addr_off
), offset
));
347 addr
= fold_build_pointer_plus (addr_base
, addr_off
);
354 /* Returns true if a memory reference in MODE and with parameters given by
355 ADDR is valid on the current target. */
358 valid_mem_ref_p (machine_mode mode
, addr_space_t as
,
359 struct mem_address
*addr
)
363 address
= addr_for_mem_ref (addr
, as
, false);
367 return memory_address_addr_space_p (mode
, address
, as
);
370 /* Checks whether a TARGET_MEM_REF with type TYPE and parameters given by ADDR
371 is valid on the current target and if so, creates and returns the
372 TARGET_MEM_REF. If VERIFY is false omit the verification step. */
375 create_mem_ref_raw (tree type
, tree alias_ptr_type
, struct mem_address
*addr
,
381 && !valid_mem_ref_p (TYPE_MODE (type
), TYPE_ADDR_SPACE (type
), addr
))
384 if (addr
->step
&& integer_onep (addr
->step
))
385 addr
->step
= NULL_TREE
;
388 addr
->offset
= fold_convert (alias_ptr_type
, addr
->offset
);
390 addr
->offset
= build_int_cst (alias_ptr_type
, 0);
398 && POINTER_TYPE_P (TREE_TYPE (addr
->base
)))
405 base
= build_int_cst (ptr_type_node
, 0);
409 /* If possible use a plain MEM_REF instead of a TARGET_MEM_REF.
410 ??? As IVOPTs does not follow restrictions to where the base
411 pointer may point to create a MEM_REF only if we know that
413 if ((TREE_CODE (base
) == ADDR_EXPR
|| TREE_CODE (base
) == INTEGER_CST
)
414 && (!index2
|| integer_zerop (index2
))
415 && (!addr
->index
|| integer_zerop (addr
->index
)))
416 return fold_build2 (MEM_REF
, type
, base
, addr
->offset
);
418 return build5 (TARGET_MEM_REF
, type
,
419 base
, addr
->offset
, addr
->index
, addr
->step
, index2
);
422 /* Returns true if OBJ is an object whose address is a link time constant. */
425 fixed_address_object_p (tree obj
)
427 return (TREE_CODE (obj
) == VAR_DECL
428 && (TREE_STATIC (obj
)
429 || DECL_EXTERNAL (obj
))
430 && ! DECL_DLLIMPORT_P (obj
));
433 /* If ADDR contains an address of object that is a link time constant,
434 move it to PARTS->symbol. */
437 move_fixed_address_to_symbol (struct mem_address
*parts
, aff_tree
*addr
)
440 tree val
= NULL_TREE
;
442 for (i
= 0; i
< addr
->n
; i
++)
444 if (addr
->elts
[i
].coef
!= 1)
447 val
= addr
->elts
[i
].val
;
448 if (TREE_CODE (val
) == ADDR_EXPR
449 && fixed_address_object_p (TREE_OPERAND (val
, 0)))
457 aff_combination_remove_elt (addr
, i
);
460 /* If ADDR contains an instance of BASE_HINT, move it to PARTS->base. */
463 move_hint_to_base (tree type
, struct mem_address
*parts
, tree base_hint
,
467 tree val
= NULL_TREE
;
470 for (i
= 0; i
< addr
->n
; i
++)
472 if (addr
->elts
[i
].coef
!= 1)
475 val
= addr
->elts
[i
].val
;
476 if (operand_equal_p (val
, base_hint
, 0))
483 /* Cast value to appropriate pointer type. We cannot use a pointer
484 to TYPE directly, as the back-end will assume registers of pointer
485 type are aligned, and just the base itself may not actually be.
486 We use void pointer to the type's address space instead. */
487 qual
= ENCODE_QUAL_ADDR_SPACE (TYPE_ADDR_SPACE (type
));
488 type
= build_qualified_type (void_type_node
, qual
);
489 parts
->base
= fold_convert (build_pointer_type (type
), val
);
490 aff_combination_remove_elt (addr
, i
);
493 /* If ADDR contains an address of a dereferenced pointer, move it to
497 move_pointer_to_base (struct mem_address
*parts
, aff_tree
*addr
)
500 tree val
= NULL_TREE
;
502 for (i
= 0; i
< addr
->n
; i
++)
504 if (addr
->elts
[i
].coef
!= 1)
507 val
= addr
->elts
[i
].val
;
508 if (POINTER_TYPE_P (TREE_TYPE (val
)))
516 aff_combination_remove_elt (addr
, i
);
519 /* Moves the loop variant part V in linear address ADDR to be the index
523 move_variant_to_index (struct mem_address
*parts
, aff_tree
*addr
, tree v
)
526 tree val
= NULL_TREE
;
528 gcc_assert (!parts
->index
);
529 for (i
= 0; i
< addr
->n
; i
++)
531 val
= addr
->elts
[i
].val
;
532 if (operand_equal_p (val
, v
, 0))
539 parts
->index
= fold_convert (sizetype
, val
);
540 parts
->step
= wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
);
541 aff_combination_remove_elt (addr
, i
);
544 /* Adds ELT to PARTS. */
547 add_to_parts (struct mem_address
*parts
, tree elt
)
553 parts
->index
= fold_convert (sizetype
, elt
);
563 /* Add ELT to base. */
564 type
= TREE_TYPE (parts
->base
);
565 if (POINTER_TYPE_P (type
))
566 parts
->base
= fold_build_pointer_plus (parts
->base
, elt
);
568 parts
->base
= fold_build2 (PLUS_EXPR
, type
,
572 /* Finds the most expensive multiplication in ADDR that can be
573 expressed in an addressing mode and move the corresponding
574 element(s) to PARTS. */
577 most_expensive_mult_to_index (tree type
, struct mem_address
*parts
,
578 aff_tree
*addr
, bool speed
)
580 addr_space_t as
= TYPE_ADDR_SPACE (type
);
581 machine_mode address_mode
= targetm
.addr_space
.address_mode (as
);
583 unsigned best_mult_cost
= 0, acost
;
584 tree mult_elt
= NULL_TREE
, elt
;
586 enum tree_code op_code
;
588 offset_int best_mult
= 0;
589 for (i
= 0; i
< addr
->n
; i
++)
591 if (!wi::fits_shwi_p (addr
->elts
[i
].coef
))
594 coef
= addr
->elts
[i
].coef
.to_shwi ();
596 || !multiplier_allowed_in_address_p (coef
, TYPE_MODE (type
), as
))
599 acost
= mult_by_coeff_cost (coef
, address_mode
, speed
);
601 if (acost
> best_mult_cost
)
603 best_mult_cost
= acost
;
604 best_mult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
611 /* Collect elements multiplied by best_mult. */
612 for (i
= j
= 0; i
< addr
->n
; i
++)
614 offset_int amult
= offset_int::from (addr
->elts
[i
].coef
, SIGNED
);
615 offset_int amult_neg
= -wi::sext (amult
, TYPE_PRECISION (addr
->type
));
617 if (amult
== best_mult
)
619 else if (amult_neg
== best_mult
)
620 op_code
= MINUS_EXPR
;
623 addr
->elts
[j
] = addr
->elts
[i
];
628 elt
= fold_convert (sizetype
, addr
->elts
[i
].val
);
630 mult_elt
= fold_build2 (op_code
, sizetype
, mult_elt
, elt
);
631 else if (op_code
== PLUS_EXPR
)
634 mult_elt
= fold_build1 (NEGATE_EXPR
, sizetype
, elt
);
638 parts
->index
= mult_elt
;
639 parts
->step
= wide_int_to_tree (sizetype
, best_mult
);
642 /* Splits address ADDR for a memory access of type TYPE into PARTS.
643 If BASE_HINT is non-NULL, it specifies an SSA name to be used
644 preferentially as base of the reference, and IV_CAND is the selected
645 iv candidate used in ADDR.
647 TODO -- be more clever about the distribution of the elements of ADDR
648 to PARTS. Some architectures do not support anything but single
649 register in address, possibly with a small integer offset; while
650 create_mem_ref will simplify the address to an acceptable shape
651 later, it would be more efficient to know that asking for complicated
652 addressing modes is useless. */
655 addr_to_parts (tree type
, aff_tree
*addr
, tree iv_cand
,
656 tree base_hint
, struct mem_address
*parts
,
662 parts
->symbol
= NULL_TREE
;
663 parts
->base
= NULL_TREE
;
664 parts
->index
= NULL_TREE
;
665 parts
->step
= NULL_TREE
;
667 if (addr
->offset
!= 0)
668 parts
->offset
= wide_int_to_tree (sizetype
, addr
->offset
);
670 parts
->offset
= NULL_TREE
;
672 /* Try to find a symbol. */
673 move_fixed_address_to_symbol (parts
, addr
);
675 /* No need to do address parts reassociation if the number of parts
676 is <= 2 -- in that case, no loop invariant code motion can be
679 if (!base_hint
&& (addr
->n
> 2))
680 move_variant_to_index (parts
, addr
, iv_cand
);
682 /* First move the most expensive feasible multiplication
685 most_expensive_mult_to_index (type
, parts
, addr
, speed
);
687 /* Try to find a base of the reference. Since at the moment
688 there is no reliable way how to distinguish between pointer and its
689 offset, this is just a guess. */
690 if (!parts
->symbol
&& base_hint
)
691 move_hint_to_base (type
, parts
, base_hint
, addr
);
692 if (!parts
->symbol
&& !parts
->base
)
693 move_pointer_to_base (parts
, addr
);
695 /* Then try to process the remaining elements. */
696 for (i
= 0; i
< addr
->n
; i
++)
698 part
= fold_convert (sizetype
, addr
->elts
[i
].val
);
699 if (addr
->elts
[i
].coef
!= 1)
700 part
= fold_build2 (MULT_EXPR
, sizetype
, part
,
701 wide_int_to_tree (sizetype
, addr
->elts
[i
].coef
));
702 add_to_parts (parts
, part
);
705 add_to_parts (parts
, fold_convert (sizetype
, addr
->rest
));
708 /* Force the PARTS to register. */
711 gimplify_mem_ref_parts (gimple_stmt_iterator
*gsi
, struct mem_address
*parts
)
714 parts
->base
= force_gimple_operand_gsi_1 (gsi
, parts
->base
,
715 is_gimple_mem_ref_addr
, NULL_TREE
,
716 true, GSI_SAME_STMT
);
718 parts
->index
= force_gimple_operand_gsi (gsi
, parts
->index
,
720 true, GSI_SAME_STMT
);
723 /* Creates and returns a TARGET_MEM_REF for address ADDR. If necessary
724 computations are emitted in front of GSI. TYPE is the mode
725 of created memory reference. IV_CAND is the selected iv candidate in ADDR,
726 and BASE_HINT is non NULL if IV_CAND comes from a base address
730 create_mem_ref (gimple_stmt_iterator
*gsi
, tree type
, aff_tree
*addr
,
731 tree alias_ptr_type
, tree iv_cand
, tree base_hint
, bool speed
)
734 struct mem_address parts
;
736 addr_to_parts (type
, addr
, iv_cand
, base_hint
, &parts
, speed
);
737 gimplify_mem_ref_parts (gsi
, &parts
);
738 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
742 /* The expression is too complicated. Try making it simpler. */
744 if (parts
.step
&& !integer_onep (parts
.step
))
746 /* Move the multiplication to index. */
747 gcc_assert (parts
.index
);
748 parts
.index
= force_gimple_operand_gsi (gsi
,
749 fold_build2 (MULT_EXPR
, sizetype
,
750 parts
.index
, parts
.step
),
751 true, NULL_TREE
, true, GSI_SAME_STMT
);
752 parts
.step
= NULL_TREE
;
754 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
762 gcc_assert (is_gimple_val (tmp
));
764 /* Add the symbol to base, eventually forcing it to register. */
767 gcc_assert (useless_type_conversion_p
768 (sizetype
, TREE_TYPE (parts
.base
)));
772 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
773 fold_build_pointer_plus (tmp
, parts
.base
),
774 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
778 parts
.index
= parts
.base
;
784 parts
.symbol
= NULL_TREE
;
786 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
793 /* Add index to base. */
796 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
797 fold_build_pointer_plus (parts
.base
, parts
.index
),
798 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
801 parts
.base
= parts
.index
;
802 parts
.index
= NULL_TREE
;
804 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
809 if (parts
.offset
&& !integer_zerop (parts
.offset
))
811 /* Try adding offset to base. */
814 parts
.base
= force_gimple_operand_gsi_1 (gsi
,
815 fold_build_pointer_plus (parts
.base
, parts
.offset
),
816 is_gimple_mem_ref_addr
, NULL_TREE
, true, GSI_SAME_STMT
);
819 parts
.base
= parts
.offset
;
821 parts
.offset
= NULL_TREE
;
823 mem_ref
= create_mem_ref_raw (type
, alias_ptr_type
, &parts
, true);
828 /* Verify that the address is in the simplest possible shape
829 (only a register). If we cannot create such a memory reference,
830 something is really wrong. */
831 gcc_assert (parts
.symbol
== NULL_TREE
);
832 gcc_assert (parts
.index
== NULL_TREE
);
833 gcc_assert (!parts
.step
|| integer_onep (parts
.step
));
834 gcc_assert (!parts
.offset
|| integer_zerop (parts
.offset
));
838 /* Copies components of the address from OP to ADDR. */
841 get_address_description (tree op
, struct mem_address
*addr
)
843 if (TREE_CODE (TMR_BASE (op
)) == ADDR_EXPR
)
845 addr
->symbol
= TMR_BASE (op
);
846 addr
->base
= TMR_INDEX2 (op
);
850 addr
->symbol
= NULL_TREE
;
853 gcc_assert (integer_zerop (TMR_BASE (op
)));
854 addr
->base
= TMR_INDEX2 (op
);
857 addr
->base
= TMR_BASE (op
);
859 addr
->index
= TMR_INDEX (op
);
860 addr
->step
= TMR_STEP (op
);
861 addr
->offset
= TMR_OFFSET (op
);
864 /* Copies the reference information from OLD_REF to NEW_REF, where
865 NEW_REF should be either a MEM_REF or a TARGET_MEM_REF. */
868 copy_ref_info (tree new_ref
, tree old_ref
)
870 tree new_ptr_base
= NULL_TREE
;
872 gcc_assert (TREE_CODE (new_ref
) == MEM_REF
873 || TREE_CODE (new_ref
) == TARGET_MEM_REF
);
875 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (old_ref
);
876 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (old_ref
);
878 new_ptr_base
= TREE_OPERAND (new_ref
, 0);
880 /* We can transfer points-to information from an old pointer
881 or decl base to the new one. */
883 && TREE_CODE (new_ptr_base
) == SSA_NAME
884 && !SSA_NAME_PTR_INFO (new_ptr_base
))
886 tree base
= get_base_address (old_ref
);
889 else if ((TREE_CODE (base
) == MEM_REF
890 || TREE_CODE (base
) == TARGET_MEM_REF
)
891 && TREE_CODE (TREE_OPERAND (base
, 0)) == SSA_NAME
892 && SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)))
894 struct ptr_info_def
*new_pi
;
895 unsigned int align
, misalign
;
897 duplicate_ssa_name_ptr_info
898 (new_ptr_base
, SSA_NAME_PTR_INFO (TREE_OPERAND (base
, 0)));
899 new_pi
= SSA_NAME_PTR_INFO (new_ptr_base
);
900 /* We have to be careful about transferring alignment information. */
901 if (get_ptr_info_alignment (new_pi
, &align
, &misalign
)
902 && TREE_CODE (old_ref
) == MEM_REF
903 && !(TREE_CODE (new_ref
) == TARGET_MEM_REF
904 && (TMR_INDEX2 (new_ref
)
905 || (TMR_STEP (new_ref
)
906 && (TREE_INT_CST_LOW (TMR_STEP (new_ref
))
909 unsigned int inc
= (mem_ref_offset (old_ref
).to_short_addr ()
910 - mem_ref_offset (new_ref
).to_short_addr ());
911 adjust_ptr_info_misalignment (new_pi
, inc
);
914 mark_ptr_info_alignment_unknown (new_pi
);
916 else if (TREE_CODE (base
) == VAR_DECL
917 || TREE_CODE (base
) == PARM_DECL
918 || TREE_CODE (base
) == RESULT_DECL
)
920 struct ptr_info_def
*pi
= get_ptr_info (new_ptr_base
);
921 pt_solution_set_var (&pi
->pt
, base
);
926 /* Move constants in target_mem_ref REF to offset. Returns the new target
927 mem ref if anything changes, NULL_TREE otherwise. */
930 maybe_fold_tmr (tree ref
)
932 struct mem_address addr
;
933 bool changed
= false;
936 get_address_description (ref
, &addr
);
939 && TREE_CODE (addr
.base
) == INTEGER_CST
940 && !integer_zerop (addr
.base
))
942 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
943 TREE_TYPE (addr
.offset
),
944 addr
.offset
, addr
.base
);
945 addr
.base
= NULL_TREE
;
950 && TREE_CODE (TREE_OPERAND (addr
.symbol
, 0)) == MEM_REF
)
952 addr
.offset
= fold_binary_to_constant
953 (PLUS_EXPR
, TREE_TYPE (addr
.offset
),
955 TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 1));
956 addr
.symbol
= TREE_OPERAND (TREE_OPERAND (addr
.symbol
, 0), 0);
960 && handled_component_p (TREE_OPERAND (addr
.symbol
, 0)))
962 HOST_WIDE_INT offset
;
963 addr
.symbol
= build_fold_addr_expr
964 (get_addr_base_and_unit_offset
965 (TREE_OPERAND (addr
.symbol
, 0), &offset
));
966 addr
.offset
= int_const_binop (PLUS_EXPR
,
967 addr
.offset
, size_int (offset
));
971 if (addr
.index
&& TREE_CODE (addr
.index
) == INTEGER_CST
)
976 off
= fold_binary_to_constant (MULT_EXPR
, sizetype
,
978 addr
.step
= NULL_TREE
;
981 addr
.offset
= fold_binary_to_constant (PLUS_EXPR
,
982 TREE_TYPE (addr
.offset
),
984 addr
.index
= NULL_TREE
;
991 /* If we have propagated something into this TARGET_MEM_REF and thus
992 ended up folding it, always create a new TARGET_MEM_REF regardless
993 if it is valid in this for on the target - the propagation result
994 wouldn't be anyway. */
995 new_ref
= create_mem_ref_raw (TREE_TYPE (ref
),
996 TREE_TYPE (addr
.offset
), &addr
, false);
997 TREE_SIDE_EFFECTS (new_ref
) = TREE_SIDE_EFFECTS (ref
);
998 TREE_THIS_VOLATILE (new_ref
) = TREE_THIS_VOLATILE (ref
);
1002 /* Dump PARTS to FILE. */
1004 extern void dump_mem_address (FILE *, struct mem_address
*);
1006 dump_mem_address (FILE *file
, struct mem_address
*parts
)
1010 fprintf (file
, "symbol: ");
1011 print_generic_expr (file
, TREE_OPERAND (parts
->symbol
, 0), TDF_SLIM
);
1012 fprintf (file
, "\n");
1016 fprintf (file
, "base: ");
1017 print_generic_expr (file
, parts
->base
, TDF_SLIM
);
1018 fprintf (file
, "\n");
1022 fprintf (file
, "index: ");
1023 print_generic_expr (file
, parts
->index
, TDF_SLIM
);
1024 fprintf (file
, "\n");
1028 fprintf (file
, "step: ");
1029 print_generic_expr (file
, parts
->step
, TDF_SLIM
);
1030 fprintf (file
, "\n");
1034 fprintf (file
, "offset: ");
1035 print_generic_expr (file
, parts
->offset
, TDF_SLIM
);
1036 fprintf (file
, "\n");
1040 #include "gt-tree-ssa-address.h"