]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
re PR middle-end/42834 (memcpy folding overeager)
authorRichard Guenther <rguenther@suse.de>
Thu, 1 Jul 2010 08:49:19 +0000 (08:49 +0000)
committerRichard Biener <rguenth@gcc.gnu.org>
Thu, 1 Jul 2010 08:49:19 +0000 (08:49 +0000)
2010-07-01  Richard Guenther  <rguenther@suse.de>

PR middle-end/42834
PR middle-end/44468
* doc/gimple.texi (is_gimple_mem_ref_addr): Document.
* doc/generic.texi (References to storage): Document MEM_REF.
* tree-pretty-print.c (dump_generic_node): Handle MEM_REF.
(print_call_name): Likewise.
* tree.c (recompute_tree_invariant_for_addr_expr): Handle MEM_REF.
(build_simple_mem_ref_loc): New function.
(mem_ref_offset): Likewise.
* tree.h (build_simple_mem_ref_loc): Declare.
(build_simple_mem_ref): Define.
(mem_ref_offset): Declare.
* fold-const.c: Include tree-flow.h.
(operand_equal_p): Handle MEM_REF.
(build_fold_addr_expr_with_type_loc): Likewise.
(fold_comparison): Likewise.
(fold_unary_loc): Fold
VIEW_CONVERT_EXPR <T1, MEM_REF <T2, ...>> to MEM_REF <T1, ...>.
(fold_binary_loc): Fold MEM[&MEM[p, CST1], CST2] to MEM[p, CST1 + CST2],
fold MEM[&a.b, CST2] to MEM[&a, offsetof (a, b) + CST2].
* tree-ssa-alias.c (ptr_deref_may_alias_decl_p): Handle MEM_REF.
(ptr_deref_may_alias_ref_p_1): Likewise.
(ao_ref_base_alias_set): Properly differentiate base object for
offset and TBAA.
(ao_ref_init_from_ptr_and_size): Use MEM_REF.
(indirect_ref_may_alias_decl_p): Handle MEM_REFs properly.
(indirect_refs_may_alias_p): Likewise.
(refs_may_alias_p_1): Likewise.  Remove pointer SSA name def
chasing code.
(ref_maybe_used_by_call_p_1): Handle MEM_REF.
(call_may_clobber_ref_p_1): Likewise.
* dwarf2out.c (loc_list_from_tree): Handle MEM_REF.
* expr.c (expand_assignment): Handle MEM_REF.
(store_expr): Handle MEM_REFs from STRING_CSTs.
(store_field): If expanding a MEM_REF of a non-addressable
decl use bitfield operations.
(get_inner_reference): Handle MEM_REF.
(expand_expr_addr_expr_1): Likewise.
(expand_expr_real_1): Likewise.
* tree-eh.c (tree_could_trap_p): Handle MEM_REF.
* alias.c (ao_ref_from_mem): Handle MEM_REF.
(get_alias_set): Likewise.  Properly handle VIEW_CONVERT_EXPRs.
* tree-data-ref.c (dr_analyze_innermost): Handle MEM_REF.
(dr_analyze_indices): Likewise.
(dr_analyze_alias): Likewise.
(object_address_invariant_in_loop_p): Likewise.
* gimplify.c (mark_addressable): Handle MEM_REF.
(gimplify_cond_expr): Build MEM_REFs.
(gimplify_modify_expr_to_memcpy): Likewise.
(gimplify_init_ctor_preeval_1): Handle MEM_REF.
(gimple_fold_indirect_ref): Adjust.
(gimplify_expr): Handle MEM_REF.  Gimplify INDIRECT_REF to MEM_REF.
* tree.def (MEM_REF): New tree code.
* tree-dfa.c: Include toplev.h.
(get_ref_base_and_extent): Handle MEM_REF.
(get_addr_base_and_unit_offset): New function.
* emit-rtl.c (set_mem_attributes_minus_bitpos): Handle MEM_REF.
* gimple-fold.c (may_propagate_address_into_dereference): Handle
MEM_REF.
(maybe_fold_offset_to_array_ref): Allow possibly out-of bounds
accesses if the array has just one dimension.  Remove always true
parameter.  Do not require type compatibility here.
(maybe_fold_offset_to_component_ref): Remove.
(maybe_fold_stmt_indirect): Remove.
(maybe_fold_reference): Remove INDIRECT_REF handling.
Fold back to non-MEM_REF.
(maybe_fold_offset_to_address): Simplify.  Deal with type
mismatches here.
(maybe_fold_reference): Likewise.
(maybe_fold_stmt_addition): Likewise.  Also handle
&ARRAY + I in addition to &ARRAY[0] + I.
(fold_gimple_assign): Handle ADDR_EXPR of MEM_REFs.
(gimple_get_relevant_ref_binfo): Handle MEM_REF.
* cfgexpand.c (expand_debug_expr): Handle MEM_REF.
* tree-ssa.c (useless_type_conversion_p): Make most pointer
conversions useless.
(warn_uninitialized_var): Handle MEM_REF.
(maybe_rewrite_mem_ref_base): New function.
(execute_update_addresses_taken): Implement re-writing of MEM_REFs
to SSA form.
* tree-inline.c (remap_gimple_op_r): Handle MEM_REF, remove
INDIRECT_REF handling.
(copy_tree_body_r): Handle MEM_REF.
* gimple.c (is_gimple_addressable): Adjust.
(is_gimple_address): Likewise.
(is_gimple_invariant_address): ADDR_EXPRs of MEM_REFs with
invariant base are invariant.
(is_gimple_min_lval): Adjust.
(is_gimple_mem_ref_addr): New function.
(get_base_address): Handle MEM_REF.
(count_ptr_derefs): Likewise.
(get_base_loadstore): Likewise.
* gimple.h (is_gimple_mem_ref_addr): Declare.
(gimple_call_fndecl): Handle invariant MEM_REF addresses.
* tree-cfg.c (verify_address): New function, split out from ...
(verify_expr): ... here.  Use for verifying ADDR_EXPRs and
the address operand of MEM_REFs.  Verify MEM_REFs.  Reject
INDIRECT_REFs.
(verify_types_in_gimple_min_lval): Handle MEM_REF.  Disallow
INDIRECT_REF.  Allow conversions.
(verify_types_in_gimple_reference): Verify VIEW_CONVERT_EXPR of
a register does not change its size.
(verify_types_in_gimple_reference): Verify MEM_REF.
(verify_gimple_assign_single): Disallow INDIRECT_REF.
Handle MEM_REF.
* tree-ssa-operands.c (opf_non_addressable, opf_not_non_addressable):
New.
(mark_address_taken): Handle MEM_REF.
(get_indirect_ref_operands): Pass through opf_not_non_addressable.
(get_asm_expr_operands): Pass opf_not_non_addressable.
(get_expr_operands): Handle opf_[not_]non_addressable.
Handle MEM_REF.  Remove INDIRECT_REF handling.
* tree-vrp.c: (check_array_ref): Handle MEM_REF.
(search_for_addr_array): Likewise.
(check_array_bounds): Likewise.
(vrp_stmt_computes_nonzero): Adjust for MEM_REF.
* tree-ssa-loop-im.c (for_each_index): Handle MEM_REF.
(ref_always_accessed_p): Likewise.
(gen_lsm_tmp_name): Likewise.  Handle ADDR_EXPR.
* tree-complex.c (extract_component): Do not handle INDIRECT_REF.
Handle MEM_REF.
* cgraphbuild.c (mark_load): Properly check for NULL result
from get_base_address.
(mark_store): Likewise.
* tree-ssa-loop-niter.c (array_at_struct_end_p): Handle MEM_REF.
* tree-loop-distribution.c (generate_builtin): Exchange INDIRECT_REF
handling for MEM_REF.
* tree-scalar-evolution.c (follow_ssa_edge_expr): Handle
&MEM[ptr + CST] similar to POINTER_PLUS_EXPR.
* builtins.c (stabilize_va_list_loc): Use the function ABI
valist type if we couldn't canonicalize the argument type.
Always dereference with the canonical va-list type.
(maybe_emit_free_warning): Handle MEM_REF.
(fold_builtin_memory_op): Simplify and handle MEM_REFs in folding
memmove to memcpy.
* builtins.c (fold_builtin_memory_op): Use ref-all types
for all memcpy foldings.
* omp-low.c (build_receiver_ref): Adjust for MEM_REF.
(build_outer_var_ref): Likewise.
(scan_omp_1_op): Likewise.
(lower_rec_input_clauses): Likewise.
(lower_lastprivate_clauses): Likewise.
(lower_reduction_clauses): Likewise.
(lower_copyprivate_clauses): Likewise.
(expand_omp_atomic_pipeline): Likewise.
(expand_omp_atomic_mutex): Likewise.
(create_task_copyfn): Likewise.
* tree-ssa-sccvn.c (copy_reference_ops_from_ref): Handle MEM_REF.
Remove old union trick.  Initialize constant offsets.
(ao_ref_init_from_vn_reference): Likewise.  Do not handle
INDIRECT_REF.  Init base_alias_set properly.
(vn_reference_lookup_3): Replace INDIRECT_REF handling with
MEM_REF.
(vn_reference_fold_indirect): Adjust for MEM_REFs.
(valueize_refs): Fold MEM_REFs.  Re-evaluate constant offset
for ARRAY_REFs.
(may_insert): Remove.
(visit_reference_op_load): Do not test may_insert.
(run_scc_vn): Remove parameter, do not fiddle with may_insert.
* tree-ssa-sccvn.h (struct vn_reference_op_struct): Add
a field to store the constant offset this op applies.
(run_scc_vn): Adjust prototype.
* cgraphunit.c (thunk_adjust): Adjust for MEM_REF.
* tree-ssa-ccp.c (ccp_fold): Replace INDIRECT_REF folding with
MEM_REF.  Propagate &foo + CST as &MEM[&foo, CST].  Do not
bother about volatile qualifiers on pointers.
(fold_const_aggregate_ref): Handle MEM_REF, do not handle INDIRECT_REF.
* tree-ssa-loop-ivopts.c
* tree-ssa-loop-ivopts.c (determine_base_object): Adjust
for MEM_REF.
(strip_offset_1): Likewise.
(find_interesting_uses_address): Replace INDIRECT_REF handling with
MEM_REF handling.
(get_computation_cost_at): Likewise.
* ipa-pure-const.c (check_op): Handle MEM_REF.
* tree-stdarg.c (check_all_va_list_escapes): Adjust for MEM_REF.
* tree-ssa-sink.c (is_hidden_global_store): Handle MEM_REF
and constants.
* ipa-inline.c (likely_eliminated_by_inlining_p): Handle MEM_REF.
* tree-parloops.c (take_address_of): Adjust for MEM_REF.
(eliminate_local_variables_1): Likewise.
(create_call_for_reduction_1): Likewise.
(create_loads_for_reductions): Likewise.
(create_loads_and_stores_for_name): Likewise.
* matrix-reorg.c (may_flatten_matrices_1): Sanitize.
(ssa_accessed_in_tree): Handle MEM_REF.
(ssa_accessed_in_assign_rhs): Likewise.
(update_type_size): Likewise.
(analyze_accesses_for_call_stmt): Likewise.
(analyze_accesses_for_assign_stmt): Likewise.
(transform_access_sites): Likewise.
(transform_allocation_sites): Likewise.
* tree-affine.c (tree_to_aff_combination): Handle MEM_REF.
* tree-vect-data-refs.c (vect_create_addr_base_for_vector_ref): Do
not handle INDIRECT_REF.
* tree-ssa-phiopt.c (add_or_mark_expr): Handle MEM_REF.
(cond_store_replacement): Likewise.
* tree-ssa-pre.c (create_component_ref_by_pieces_1): Handle
MEM_REF, no not handle INDIRECT_REFs.
(insert_into_preds_of_block): Properly initialize avail.
(phi_translate_1): Fold MEM_REFs.  Re-evaluate constant offset
for ARRAY_REFs.  Properly handle reference lookups that
require a bit re-interpretation.
(can_PRE_operation): Do not handle INDIRECT_REF.  Handle MEM_REF.
* tree-sra.c
* tree-sra.c (build_access_from_expr_1): Handle MEM_REF.
(build_ref_for_offset_1): Remove.
(build_ref_for_offset): Build MEM_REFs.
(gate_intra_sra): Disable for now.
(sra_ipa_modify_expr): Handle MEM_REF.
(ipa_early_sra_gate): Disable for now.
* tree-sra.c (create_access): Swap INDIRECT_REF handling for
MEM_REF handling.
(disqualify_base_of_expr): Likewise.
(ptr_parm_has_direct_uses): Swap INDIRECT_REF handling for
MEM_REF handling.
(sra_ipa_modify_expr): Remove INDIRECT_REF handling.
Use mem_ref_offset.  Remove bogus folding.
(build_access_from_expr_1): Properly handle MEM_REF for
non IPA-SRA.
(make_fancy_name_1): Add support for MEM_REF.
* tree-predcom.c (ref_at_iteration): Handle MEM_REFs.
* tree-mudflap.c (mf_xform_derefs_1): Adjust for MEM_REF.
* ipa-prop.c (compute_complex_assign_jump_func): Handle MEM_REF.
(compute_complex_ancestor_jump_func): Likewise.
(ipa_analyze_virtual_call_uses): Likewise.
* tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Replace
INDIRECT_REF folding with more generalized MEM_REF folding.
(tree_ssa_forward_propagate_single_use_vars): Adjust accordingly.
(forward_propagate_addr_into_variable_array_index): Also handle
&ARRAY + I in addition to &ARRAY[0] + I.
* tree-ssa-dce.c (ref_may_be_aliased): Handle MEM_REF.
* tree-ssa-ter.c (find_replaceable_in_bb): Avoid TER if that
creates assignments with overlap.
* tree-nested.c (get_static_chain): Adjust for MEM_REF.
(get_frame_field): Likewise.
(get_nonlocal_debug_decl): Likewise.
(convert_nonlocal_reference_op): Likewise.
(struct nesting_info): Add mem_refs pointer-set.
(create_nesting_tree): Allocate it.
(convert_local_reference_op): Insert to be folded mem-refs.
(fold_mem_refs): New function.
(finalize_nesting_tree_1): Perform defered folding of mem-refs
(free_nesting_tree): Free the pointer-set.
* tree-vect-stmts.c (vectorizable_store): Adjust for MEM_REF.
(vectorizable_load): Likewise.
* tree-ssa-phiprop.c (phiprop_insert_phi): Adjust for MEM_REF.
(propagate_with_phi): Likewise.
* tree-object-size.c (addr_object_size): Handle MEM_REFs
instead of INDIRECT_REFs.
(compute_object_offset): Handle MEM_REF.
(plus_stmt_object_size): Handle MEM_REF.
(collect_object_sizes_for): Dispatch to plus_stmt_object_size
for &MEM_REF.
* tree-flow.h (get_addr_base_and_unit_offset): Declare.
(symbol_marked_for_renaming): Likewise.
* Makefile.in (tree-dfa.o): Add $(TOPLEV_H).
(fold-const.o): Add $(TREE_FLOW_H).
* tree-ssa-structalias.c (get_constraint_for_1): Handle MEM_REF.
(find_func_clobbers): Likewise.
* ipa-struct-reorg.c (decompose_indirect_ref_acc): Handle MEM_REF.
(decompose_access): Likewise.
(replace_field_acc): Likewise.
(replace_field_access_stmt): Likewise.
(insert_new_var_in_stmt): Likewise.
(get_stmt_accesses): Likewise.
(reorg_structs_drive): Disable.
* config/i386/i386.c (ix86_va_start): Adjust for MEM_REF.
(ix86_canonical_va_list_type): Likewise.

cp/
* cp-gimplify.c (cp_gimplify_expr): Open-code the rhs
predicate we are looking for, allow non-gimplified
INDIRECT_REFs.

testsuite/
* gcc.c-torture/execute/20100316-1.c: New testcase.
* gcc.c-torture/execute/pr44468.c: Likewise.
* gcc.c-torture/compile/20100609-1.c: Likewise.
* gcc.dg/volatile2.c: Adjust.
* gcc.dg/plugin/selfassign.c: Likewise.
* gcc.dg/pr36902.c: Likewise.
* gcc.dg/tree-ssa/foldaddr-2.c: Remove.
* gcc.dg/tree-ssa/foldaddr-3.c: Likewise.
* gcc.dg/tree-ssa/forwprop-8.c: Adjust.
* gcc.dg/tree-ssa/pr17141-1.c: Likewise.
* gcc.dg/tree-ssa/ssa-fre-13.c: Likewise.
* gcc.dg/tree-ssa/ssa-fre-14.c: Likewise.
* gcc.dg/tree-ssa/ssa-ccp-21.c: Likewise.
* gcc.dg/tree-ssa/pta-ptrarith-1.c: Likewise.
* gcc.dg/tree-ssa/20030807-7.c: Likewise.
* gcc.dg/tree-ssa/forwprop-10.c: Likewise.
* gcc.dg/tree-ssa/ssa-fre-1.c: Likewise.
* gcc.dg/tree-ssa/pta-ptrarith-2.c: Likewise.
* gcc.dg/tree-ssa/ssa-ccp-23.c: Likewise.
* gcc.dg/tree-ssa/forwprop-1.c: Likewise.
* gcc.dg/tree-ssa/forwprop-2.c: Likewise.
* gcc.dg/tree-ssa/struct-aliasing-1.c: Likewise.
* gcc.dg/tree-ssa/ssa-ccp-25.c: Likewise.
* gcc.dg/tree-ssa/ssa-pre-26.c: Likewise.
* gcc.dg/tree-ssa/struct-aliasing-2.c: Likewise.
* gcc.dg/tree-ssa/ssa-ccp-26.c: Likewise.
* gcc.dg/tree-ssa/ssa-sccvn-4.c: Likewise.
* gcc.dg/tree-ssa/ssa-pre-7.c: Likewise.
* gcc.dg/tree-ssa/forwprop-5.c: Likewise.
* gcc.dg/struct/w_prof_two_strs.c: XFAIL.
* gcc.dg/struct/wo_prof_escape_arg_to_local.c: Likewise.
* gcc.dg/struct/wo_prof_global_var.c: Likewise.
* gcc.dg/struct/wo_prof_malloc_size_var.c: Likewise.
* gcc.dg/struct/w_prof_local_array.c: Likewise.
* gcc.dg/struct/w_prof_single_str_global.c: Likewise.
* gcc.dg/struct/wo_prof_escape_str_init.c: Likewise.
* gcc.dg/struct/wo_prof_array_through_pointer.c: Likewise.
* gcc.dg/struct/w_prof_global_array.c: Likewise.
* gcc.dg/struct/wo_prof_array_field.c: Likewise.
* gcc.dg/struct/wo_prof_single_str_local.c: Likewise.
* gcc.dg/struct/w_prof_local_var.c: Likewise.
* gcc.dg/struct/wo_prof_two_strs.c: Likewise.
* gcc.dg/struct/wo_prof_empty_str.c: Likewise.
* gcc.dg/struct/wo_prof_local_array.c: Likewise.
* gcc.dg/struct/w_prof_global_var.c: Likewise.
* gcc.dg/struct/wo_prof_single_str_global.c: Likewise.
* gcc.dg/struct/wo_prof_escape_substr_value.c: Likewise.
* gcc.dg/struct/wo_prof_global_array.c: Likewise.
* gcc.dg/struct/wo_prof_escape_return.c: Likewise.
* gcc.dg/struct/wo_prof_escape_substr_array.c: Likewise.
* gcc.dg/struct/wo_prof_double_malloc.c: Likewise.
* gcc.dg/struct/w_ratio_cold_str.c: Likewise.
* gcc.dg/struct/wo_prof_escape_substr_pointer.c: Likewise.
* gcc.dg/struct/wo_prof_local_var.c: Likewise.
* gcc.dg/tree-prof/stringop-1.c: Adjust.
* g++.dg/tree-ssa/pr31146.C: Likewise.
* g++.dg/tree-ssa/copyprop-1.C: Likewise.
* g++.dg/tree-ssa/pr33604.C: Likewise.
* g++.dg/plugin/selfassign.c: Likewise.
* gfortran.dg/array_memcpy_3.f90: Likewise.
* gfortran.dg/array_memcpy_4.f90: Likewise.
* c-c++-common/torture/pr42834.c: New testcase.

From-SVN: r161655

136 files changed:
gcc/ChangeLog
gcc/Makefile.in
gcc/alias.c
gcc/builtins.c
gcc/cfgexpand.c
gcc/cgraphbuild.c
gcc/cgraphunit.c
gcc/config/alpha/alpha.c
gcc/config/i386/i386.c
gcc/config/rs6000/rs6000.c
gcc/config/sh/sh.c
gcc/config/spu/spu.c
gcc/cp/ChangeLog
gcc/cp/cp-gimplify.c
gcc/doc/generic.texi
gcc/doc/gimple.texi
gcc/dwarf2out.c
gcc/emit-rtl.c
gcc/expr.c
gcc/fold-const.c
gcc/gimple-fold.c
gcc/gimple.c
gcc/gimple.h
gcc/gimplify.c
gcc/ipa-inline.c
gcc/ipa-prop.c
gcc/ipa-pure-const.c
gcc/ipa-struct-reorg.c
gcc/matrix-reorg.c
gcc/omp-low.c
gcc/testsuite/ChangeLog
gcc/testsuite/c-c++-common/torture/pr42834.c [new file with mode: 0644]
gcc/testsuite/g++.dg/plugin/selfassign.c
gcc/testsuite/g++.dg/tree-ssa/copyprop-1.C
gcc/testsuite/g++.dg/tree-ssa/pr31146.C
gcc/testsuite/g++.dg/tree-ssa/pr33604.C
gcc/testsuite/gcc.c-torture/compile/20100609-1.c [new file with mode: 0644]
gcc/testsuite/gcc.c-torture/execute/20100316-1.c [new file with mode: 0644]
gcc/testsuite/gcc.c-torture/execute/pr44468.c [new file with mode: 0644]
gcc/testsuite/gcc.dg/plugin/selfassign.c
gcc/testsuite/gcc.dg/pr36902.c
gcc/testsuite/gcc.dg/struct/w_prof_global_array.c
gcc/testsuite/gcc.dg/struct/w_prof_global_var.c
gcc/testsuite/gcc.dg/struct/w_prof_local_array.c
gcc/testsuite/gcc.dg/struct/w_prof_local_var.c
gcc/testsuite/gcc.dg/struct/w_prof_single_str_global.c
gcc/testsuite/gcc.dg/struct/w_prof_two_strs.c
gcc/testsuite/gcc.dg/struct/w_ratio_cold_str.c
gcc/testsuite/gcc.dg/struct/wo_prof_array_field.c
gcc/testsuite/gcc.dg/struct/wo_prof_array_through_pointer.c
gcc/testsuite/gcc.dg/struct/wo_prof_double_malloc.c
gcc/testsuite/gcc.dg/struct/wo_prof_empty_str.c
gcc/testsuite/gcc.dg/struct/wo_prof_escape_arg_to_local.c
gcc/testsuite/gcc.dg/struct/wo_prof_escape_return.c
gcc/testsuite/gcc.dg/struct/wo_prof_escape_str_init.c
gcc/testsuite/gcc.dg/struct/wo_prof_escape_substr_array.c
gcc/testsuite/gcc.dg/struct/wo_prof_escape_substr_pointer.c
gcc/testsuite/gcc.dg/struct/wo_prof_escape_substr_value.c
gcc/testsuite/gcc.dg/struct/wo_prof_global_array.c
gcc/testsuite/gcc.dg/struct/wo_prof_global_var.c
gcc/testsuite/gcc.dg/struct/wo_prof_local_array.c
gcc/testsuite/gcc.dg/struct/wo_prof_local_var.c
gcc/testsuite/gcc.dg/struct/wo_prof_malloc_size_var.c
gcc/testsuite/gcc.dg/struct/wo_prof_single_str_global.c
gcc/testsuite/gcc.dg/struct/wo_prof_single_str_local.c
gcc/testsuite/gcc.dg/struct/wo_prof_two_strs.c
gcc/testsuite/gcc.dg/tree-prof/stringop-1.c
gcc/testsuite/gcc.dg/tree-ssa/20030807-7.c
gcc/testsuite/gcc.dg/tree-ssa/foldaddr-2.c [deleted file]
gcc/testsuite/gcc.dg/tree-ssa/foldaddr-3.c [deleted file]
gcc/testsuite/gcc.dg/tree-ssa/forwprop-1.c
gcc/testsuite/gcc.dg/tree-ssa/forwprop-10.c
gcc/testsuite/gcc.dg/tree-ssa/forwprop-2.c
gcc/testsuite/gcc.dg/tree-ssa/forwprop-5.c
gcc/testsuite/gcc.dg/tree-ssa/forwprop-8.c
gcc/testsuite/gcc.dg/tree-ssa/loadpre6.c
gcc/testsuite/gcc.dg/tree-ssa/pr17141-1.c
gcc/testsuite/gcc.dg/tree-ssa/pta-ptrarith-1.c
gcc/testsuite/gcc.dg/tree-ssa/pta-ptrarith-2.c
gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-21.c
gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-23.c
gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-25.c
gcc/testsuite/gcc.dg/tree-ssa/ssa-ccp-26.c
gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-1.c
gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-13.c
gcc/testsuite/gcc.dg/tree-ssa/ssa-fre-14.c
gcc/testsuite/gcc.dg/tree-ssa/ssa-pre-26.c
gcc/testsuite/gcc.dg/tree-ssa/ssa-pre-7.c
gcc/testsuite/gcc.dg/tree-ssa/ssa-sccvn-4.c
gcc/testsuite/gcc.dg/tree-ssa/struct-aliasing-1.c
gcc/testsuite/gcc.dg/tree-ssa/struct-aliasing-2.c
gcc/testsuite/gcc.dg/volatile2.c
gcc/testsuite/gfortran.dg/array_memcpy_3.f90
gcc/testsuite/gfortran.dg/array_memcpy_4.f90
gcc/tree-affine.c
gcc/tree-cfg.c
gcc/tree-complex.c
gcc/tree-data-ref.c
gcc/tree-dfa.c
gcc/tree-eh.c
gcc/tree-flow.h
gcc/tree-inline.c
gcc/tree-into-ssa.c
gcc/tree-loop-distribution.c
gcc/tree-mudflap.c
gcc/tree-nested.c
gcc/tree-object-size.c
gcc/tree-parloops.c
gcc/tree-predcom.c
gcc/tree-pretty-print.c
gcc/tree-scalar-evolution.c
gcc/tree-sra.c
gcc/tree-ssa-alias.c
gcc/tree-ssa-ccp.c
gcc/tree-ssa-dce.c
gcc/tree-ssa-forwprop.c
gcc/tree-ssa-loop-im.c
gcc/tree-ssa-loop-ivopts.c
gcc/tree-ssa-loop-niter.c
gcc/tree-ssa-operands.c
gcc/tree-ssa-phiopt.c
gcc/tree-ssa-phiprop.c
gcc/tree-ssa-pre.c
gcc/tree-ssa-sccvn.c
gcc/tree-ssa-sccvn.h
gcc/tree-ssa-sink.c
gcc/tree-ssa-structalias.c
gcc/tree-ssa-ter.c
gcc/tree-ssa.c
gcc/tree-stdarg.c
gcc/tree-vect-data-refs.c
gcc/tree-vect-stmts.c
gcc/tree-vrp.c
gcc/tree.c
gcc/tree.def
gcc/tree.h

index af1148791b953cbfbb34f346a289ee214d6b4b2b..d36c585e27789cfd8d7bf09d928343f78aa71a42 100644 (file)
@@ -1,3 +1,275 @@
+2010-07-01  Richard Guenther  <rguenther@suse.de>
+
+       PR middle-end/42834
+       PR middle-end/44468
+       * doc/gimple.texi (is_gimple_mem_ref_addr): Document.
+       * doc/generic.texi (References to storage): Document MEM_REF.
+       * tree-pretty-print.c (dump_generic_node): Handle MEM_REF.
+       (print_call_name): Likewise.
+       * tree.c (recompute_tree_invariant_for_addr_expr): Handle MEM_REF.
+       (build_simple_mem_ref_loc): New function.
+       (mem_ref_offset): Likewise.
+       * tree.h (build_simple_mem_ref_loc): Declare.
+       (build_simple_mem_ref): Define.
+       (mem_ref_offset): Declare.
+       * fold-const.c: Include tree-flow.h.
+       (operand_equal_p): Handle MEM_REF.
+       (build_fold_addr_expr_with_type_loc): Likewise.
+       (fold_comparison): Likewise.
+       (fold_unary_loc): Fold
+       VIEW_CONVERT_EXPR <T1, MEM_REF <T2, ...>> to MEM_REF <T1, ...>.
+       (fold_binary_loc): Fold MEM[&MEM[p, CST1], CST2] to MEM[p, CST1 + CST2],
+       fold MEM[&a.b, CST2] to MEM[&a, offsetof (a, b) + CST2].
+       * tree-ssa-alias.c (ptr_deref_may_alias_decl_p): Handle MEM_REF.
+       (ptr_deref_may_alias_ref_p_1): Likewise.
+       (ao_ref_base_alias_set): Properly differentiate base object for
+       offset and TBAA.
+       (ao_ref_init_from_ptr_and_size): Use MEM_REF.
+       (indirect_ref_may_alias_decl_p): Handle MEM_REFs properly.
+       (indirect_refs_may_alias_p): Likewise.
+       (refs_may_alias_p_1): Likewise.  Remove pointer SSA name def
+       chasing code.
+       (ref_maybe_used_by_call_p_1): Handle MEM_REF.
+       (call_may_clobber_ref_p_1): Likewise.
+       * dwarf2out.c (loc_list_from_tree): Handle MEM_REF.
+       * expr.c (expand_assignment): Handle MEM_REF.
+       (store_expr): Handle MEM_REFs from STRING_CSTs.
+       (store_field): If expanding a MEM_REF of a non-addressable
+       decl use bitfield operations.
+       (get_inner_reference): Handle MEM_REF.
+       (expand_expr_addr_expr_1): Likewise.
+       (expand_expr_real_1): Likewise.
+       * tree-eh.c (tree_could_trap_p): Handle MEM_REF.
+       * alias.c (ao_ref_from_mem): Handle MEM_REF.
+       (get_alias_set): Likewise.  Properly handle VIEW_CONVERT_EXPRs.
+       * tree-data-ref.c (dr_analyze_innermost): Handle MEM_REF.
+       (dr_analyze_indices): Likewise.
+       (dr_analyze_alias): Likewise.
+       (object_address_invariant_in_loop_p): Likewise.
+       * gimplify.c (mark_addressable): Handle MEM_REF.
+       (gimplify_cond_expr): Build MEM_REFs.
+       (gimplify_modify_expr_to_memcpy): Likewise.
+       (gimplify_init_ctor_preeval_1): Handle MEM_REF.
+       (gimple_fold_indirect_ref): Adjust.
+       (gimplify_expr): Handle MEM_REF.  Gimplify INDIRECT_REF to MEM_REF.
+       * tree.def (MEM_REF): New tree code.
+       * tree-dfa.c: Include toplev.h.
+       (get_ref_base_and_extent): Handle MEM_REF.
+       (get_addr_base_and_unit_offset): New function.
+       * emit-rtl.c (set_mem_attributes_minus_bitpos): Handle MEM_REF.
+       * gimple-fold.c (may_propagate_address_into_dereference): Handle
+       MEM_REF.
+       (maybe_fold_offset_to_array_ref): Allow possibly out-of bounds
+       accesses if the array has just one dimension.  Remove always true
+       parameter.  Do not require type compatibility here.
+       (maybe_fold_offset_to_component_ref): Remove.
+       (maybe_fold_stmt_indirect): Remove.
+       (maybe_fold_reference): Remove INDIRECT_REF handling.
+       Fold back to non-MEM_REF.
+       (maybe_fold_offset_to_address): Simplify.  Deal with type
+       mismatches here.
+       (maybe_fold_reference): Likewise.
+       (maybe_fold_stmt_addition): Likewise.  Also handle
+       &ARRAY + I in addition to &ARRAY[0] + I.
+       (fold_gimple_assign): Handle ADDR_EXPR of MEM_REFs.
+       (gimple_get_relevant_ref_binfo): Handle MEM_REF.
+       * cfgexpand.c (expand_debug_expr): Handle MEM_REF.
+       * tree-ssa.c (useless_type_conversion_p): Make most pointer
+       conversions useless.
+       (warn_uninitialized_var): Handle MEM_REF.
+       (maybe_rewrite_mem_ref_base): New function.
+       (execute_update_addresses_taken): Implement re-writing of MEM_REFs
+       to SSA form.
+       * tree-inline.c (remap_gimple_op_r): Handle MEM_REF, remove
+       INDIRECT_REF handling.
+       (copy_tree_body_r): Handle MEM_REF.
+       * gimple.c (is_gimple_addressable): Adjust.
+       (is_gimple_address): Likewise.
+       (is_gimple_invariant_address): ADDR_EXPRs of MEM_REFs with
+       invariant base are invariant.
+       (is_gimple_min_lval): Adjust.
+       (is_gimple_mem_ref_addr): New function.
+       (get_base_address): Handle MEM_REF.
+       (count_ptr_derefs): Likewise.
+       (get_base_loadstore): Likewise.
+       * gimple.h (is_gimple_mem_ref_addr): Declare.
+       (gimple_call_fndecl): Handle invariant MEM_REF addresses.
+       * tree-cfg.c (verify_address): New function, split out from ...
+       (verify_expr): ... here.  Use for verifying ADDR_EXPRs and
+       the address operand of MEM_REFs.  Verify MEM_REFs.  Reject
+       INDIRECT_REFs.
+       (verify_types_in_gimple_min_lval): Handle MEM_REF.  Disallow
+       INDIRECT_REF.  Allow conversions.
+       (verify_types_in_gimple_reference): Verify VIEW_CONVERT_EXPR of
+       a register does not change its size.
+       (verify_types_in_gimple_reference): Verify MEM_REF.
+       (verify_gimple_assign_single): Disallow INDIRECT_REF.
+       Handle MEM_REF.
+       * tree-ssa-operands.c (opf_non_addressable, opf_not_non_addressable):
+       New.
+       (mark_address_taken): Handle MEM_REF.
+       (get_indirect_ref_operands): Pass through opf_not_non_addressable.
+       (get_asm_expr_operands): Pass opf_not_non_addressable.
+       (get_expr_operands): Handle opf_[not_]non_addressable.
+       Handle MEM_REF.  Remove INDIRECT_REF handling.
+       * tree-vrp.c: (check_array_ref): Handle MEM_REF.
+       (search_for_addr_array): Likewise.
+       (check_array_bounds): Likewise.
+       (vrp_stmt_computes_nonzero): Adjust for MEM_REF.
+       * tree-ssa-loop-im.c (for_each_index): Handle MEM_REF.
+       (ref_always_accessed_p): Likewise.
+       (gen_lsm_tmp_name): Likewise.  Handle ADDR_EXPR.
+       * tree-complex.c (extract_component): Do not handle INDIRECT_REF.
+       Handle MEM_REF.
+       * cgraphbuild.c (mark_load): Properly check for NULL result
+       from get_base_address.
+       (mark_store): Likewise.
+       * tree-ssa-loop-niter.c (array_at_struct_end_p): Handle MEM_REF.
+       * tree-loop-distribution.c (generate_builtin): Exchange INDIRECT_REF
+       handling for MEM_REF.
+       * tree-scalar-evolution.c (follow_ssa_edge_expr): Handle
+       &MEM[ptr + CST] similar to POINTER_PLUS_EXPR.
+       * builtins.c (stabilize_va_list_loc): Use the function ABI
+       valist type if we couldn't canonicalize the argument type.
+       Always dereference with the canonical va-list type.
+       (maybe_emit_free_warning): Handle MEM_REF.
+       (fold_builtin_memory_op): Simplify and handle MEM_REFs in folding
+       memmove to memcpy.
+       * builtins.c (fold_builtin_memory_op): Use ref-all types
+       for all memcpy foldings.
+       * omp-low.c (build_receiver_ref): Adjust for MEM_REF.
+       (build_outer_var_ref): Likewise.
+       (scan_omp_1_op): Likewise.
+       (lower_rec_input_clauses): Likewise.
+       (lower_lastprivate_clauses): Likewise.
+       (lower_reduction_clauses): Likewise.
+       (lower_copyprivate_clauses): Likewise.
+       (expand_omp_atomic_pipeline): Likewise.
+       (expand_omp_atomic_mutex): Likewise.
+       (create_task_copyfn): Likewise.
+       * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Handle MEM_REF.
+       Remove old union trick.  Initialize constant offsets.
+       (ao_ref_init_from_vn_reference): Likewise.  Do not handle
+       INDIRECT_REF.  Init base_alias_set properly.
+       (vn_reference_lookup_3): Replace INDIRECT_REF handling with
+       MEM_REF.
+       (vn_reference_fold_indirect): Adjust for MEM_REFs.
+       (valueize_refs): Fold MEM_REFs.  Re-evaluate constant offset
+       for ARRAY_REFs.
+       (may_insert): Remove.
+       (visit_reference_op_load): Do not test may_insert.
+       (run_scc_vn): Remove parameter, do not fiddle with may_insert.
+       * tree-ssa-sccvn.h (struct vn_reference_op_struct): Add
+       a field to store the constant offset this op applies.
+       (run_scc_vn): Adjust prototype.
+       * cgraphunit.c (thunk_adjust): Adjust for MEM_REF.
+       * tree-ssa-ccp.c (ccp_fold): Replace INDIRECT_REF folding with
+       MEM_REF.  Propagate &foo + CST as &MEM[&foo, CST].  Do not
+       bother about volatile qualifiers on pointers.
+       (fold_const_aggregate_ref): Handle MEM_REF, do not handle INDIRECT_REF.
+       * tree-ssa-loop-ivopts.c
+       * tree-ssa-loop-ivopts.c (determine_base_object): Adjust
+       for MEM_REF.
+       (strip_offset_1): Likewise.
+       (find_interesting_uses_address): Replace INDIRECT_REF handling with
+       MEM_REF handling.
+       (get_computation_cost_at): Likewise.
+       * ipa-pure-const.c (check_op): Handle MEM_REF.
+       * tree-stdarg.c (check_all_va_list_escapes): Adjust for MEM_REF.
+       * tree-ssa-sink.c (is_hidden_global_store): Handle MEM_REF
+       and constants.
+       * ipa-inline.c (likely_eliminated_by_inlining_p): Handle MEM_REF.
+       * tree-parloops.c (take_address_of): Adjust for MEM_REF.
+       (eliminate_local_variables_1): Likewise.
+       (create_call_for_reduction_1): Likewise.
+       (create_loads_for_reductions): Likewise.
+       (create_loads_and_stores_for_name): Likewise.
+       * matrix-reorg.c (may_flatten_matrices_1): Sanitize.
+       (ssa_accessed_in_tree): Handle MEM_REF.
+       (ssa_accessed_in_assign_rhs): Likewise.
+       (update_type_size): Likewise.
+       (analyze_accesses_for_call_stmt): Likewise.
+       (analyze_accesses_for_assign_stmt): Likewise.
+       (transform_access_sites): Likewise.
+       (transform_allocation_sites): Likewise.
+       * tree-affine.c (tree_to_aff_combination): Handle MEM_REF.
+       * tree-vect-data-refs.c (vect_create_addr_base_for_vector_ref): Do
+       not handle INDIRECT_REF.
+       * tree-ssa-phiopt.c (add_or_mark_expr): Handle MEM_REF.
+       (cond_store_replacement): Likewise.
+       * tree-ssa-pre.c (create_component_ref_by_pieces_1): Handle
+       MEM_REF, no not handle INDIRECT_REFs.
+       (insert_into_preds_of_block): Properly initialize avail.
+       (phi_translate_1): Fold MEM_REFs.  Re-evaluate constant offset
+       for ARRAY_REFs.  Properly handle reference lookups that
+       require a bit re-interpretation.
+       (can_PRE_operation): Do not handle INDIRECT_REF.  Handle MEM_REF.
+       * tree-sra.c
+       * tree-sra.c (build_access_from_expr_1): Handle MEM_REF.
+       (build_ref_for_offset_1): Remove.
+       (build_ref_for_offset): Build MEM_REFs.
+       (gate_intra_sra): Disable for now.
+       (sra_ipa_modify_expr): Handle MEM_REF.
+       (ipa_early_sra_gate): Disable for now.
+       * tree-sra.c (create_access): Swap INDIRECT_REF handling for
+       MEM_REF handling.
+       (disqualify_base_of_expr): Likewise.
+       (ptr_parm_has_direct_uses): Swap INDIRECT_REF handling for
+       MEM_REF handling.
+       (sra_ipa_modify_expr): Remove INDIRECT_REF handling.
+       Use mem_ref_offset.  Remove bogus folding.
+       (build_access_from_expr_1): Properly handle MEM_REF for
+       non IPA-SRA.
+       (make_fancy_name_1): Add support for MEM_REF.
+       * tree-predcom.c (ref_at_iteration): Handle MEM_REFs.
+       * tree-mudflap.c (mf_xform_derefs_1): Adjust for MEM_REF.
+       * ipa-prop.c (compute_complex_assign_jump_func): Handle MEM_REF.
+       (compute_complex_ancestor_jump_func): Likewise.
+       (ipa_analyze_virtual_call_uses): Likewise.
+       * tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Replace
+       INDIRECT_REF folding with more generalized MEM_REF folding.
+       (tree_ssa_forward_propagate_single_use_vars): Adjust accordingly.
+       (forward_propagate_addr_into_variable_array_index): Also handle
+       &ARRAY + I in addition to &ARRAY[0] + I.
+       * tree-ssa-dce.c (ref_may_be_aliased): Handle MEM_REF.
+       * tree-ssa-ter.c (find_replaceable_in_bb): Avoid TER if that
+       creates assignments with overlap.
+       * tree-nested.c (get_static_chain): Adjust for MEM_REF.
+       (get_frame_field): Likewise.
+       (get_nonlocal_debug_decl): Likewise.
+       (convert_nonlocal_reference_op): Likewise.
+       (struct nesting_info): Add mem_refs pointer-set.
+       (create_nesting_tree): Allocate it.
+       (convert_local_reference_op): Insert to be folded mem-refs.
+       (fold_mem_refs): New function.
+       (finalize_nesting_tree_1): Perform defered folding of mem-refs 
+       (free_nesting_tree): Free the pointer-set.
+       * tree-vect-stmts.c (vectorizable_store): Adjust for MEM_REF.
+       (vectorizable_load): Likewise.
+       * tree-ssa-phiprop.c (phiprop_insert_phi): Adjust for MEM_REF.
+       (propagate_with_phi): Likewise.
+       * tree-object-size.c (addr_object_size): Handle MEM_REFs
+       instead of INDIRECT_REFs.
+       (compute_object_offset): Handle MEM_REF.
+       (plus_stmt_object_size): Handle MEM_REF.
+       (collect_object_sizes_for): Dispatch to plus_stmt_object_size
+       for &MEM_REF.
+       * tree-flow.h (get_addr_base_and_unit_offset): Declare.
+       (symbol_marked_for_renaming): Likewise.
+       * Makefile.in (tree-dfa.o): Add $(TOPLEV_H).
+       (fold-const.o): Add $(TREE_FLOW_H).
+       * tree-ssa-structalias.c (get_constraint_for_1): Handle MEM_REF.
+       (find_func_clobbers): Likewise.
+       * ipa-struct-reorg.c (decompose_indirect_ref_acc): Handle MEM_REF.
+       (decompose_access): Likewise.
+       (replace_field_acc): Likewise.
+       (replace_field_access_stmt): Likewise.
+       (insert_new_var_in_stmt): Likewise.
+       (get_stmt_accesses): Likewise.
+       (reorg_structs_drive): Disable.
+       * config/i386/i386.c (ix86_va_start): Adjust for MEM_REF.
+       (ix86_canonical_va_list_type): Likewise.
+
 2010-06-30  Joern Rennecke  <joern.rennecke@embecosm.com>
 
        PR other/44566
index a4214cb87df829be64ab1e01e4cca279e3b1ab30..d717332b3287ef7e6eacd3d4334a6e54d81252fc 100644 (file)
@@ -2510,7 +2510,7 @@ tree-dfa.o : tree-dfa.c $(TREE_FLOW_H) $(CONFIG_H) $(SYSTEM_H) \
    $(TREE_INLINE_H) $(HASHTAB_H) pointer-set.h $(FLAGS_H) $(FUNCTION_H) \
    $(TIMEVAR_H) convert.h $(TM_H) coretypes.h langhooks.h $(TREE_DUMP_H) \
    $(TREE_PASS_H) $(PARAMS_H) $(CGRAPH_H) $(BASIC_BLOCK_H) $(GIMPLE_H) \
-   tree-pretty-print.h
+   tree-pretty-print.h $(TOPLEV_H)
 tree-ssa-operands.o : tree-ssa-operands.c $(TREE_FLOW_H) $(CONFIG_H) \
    $(SYSTEM_H) $(TREE_H) $(GGC_H) $(DIAGNOSTIC_H) $(TREE_INLINE_H) \
    $(FLAGS_H) $(FUNCTION_H) $(TM_H) $(TIMEVAR_H) $(TREE_PASS_H) $(TOPLEV_H) \
@@ -2789,7 +2789,7 @@ tree-diagnostic.o : tree-diagnostic.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
 fold-const.o : fold-const.c $(CONFIG_H) $(SYSTEM_H) coretypes.h $(TM_H) \
    $(TREE_H) $(FLAGS_H) $(TOPLEV_H) $(HASHTAB_H) $(EXPR_H) $(RTL_H) \
    $(GGC_H) $(TM_P_H) langhooks.h $(MD5_H) intl.h $(TARGET_H) \
-   $(GIMPLE_H) realmpfr.h
+   $(GIMPLE_H) realmpfr.h $(TREE_FLOW_H)
 diagnostic.o : diagnostic.c $(CONFIG_H) $(SYSTEM_H) coretypes.h \
    version.h $(INPUT_H) intl.h $(DIAGNOSTIC_H) diagnostic.def
 opts.o : opts.c opts.h options.h $(TOPLEV_H) $(CONFIG_H) $(SYSTEM_H) \
index 08c38bf661601af8c749d541f088ff88c9ca9f7c..30717127be235356f43e76914880ed6cbee895dc 100644 (file)
@@ -279,7 +279,8 @@ ao_ref_from_mem (ao_ref *ref, const_rtx mem)
 
   /* If this is a pointer dereference of a non-SSA_NAME punt.
      ???  We could replace it with a pointer to anything.  */
-  if (INDIRECT_REF_P (base)
+  if ((INDIRECT_REF_P (base)
+       || TREE_CODE (base) == MEM_REF)
       && TREE_CODE (TREE_OPERAND (base, 0)) != SSA_NAME)
     return false;
 
@@ -293,10 +294,7 @@ ao_ref_from_mem (ao_ref *ref, const_rtx mem)
       void *namep;
       namep = pointer_map_contains (cfun->gimple_df->decls_to_pointers, base);
       if (namep)
-       {
-         ref->base_alias_set = get_alias_set (base);
-         ref->base = build1 (INDIRECT_REF, TREE_TYPE (base), *(tree *)namep);
-       }
+       ref->base = build_simple_mem_ref (*(tree *)namep);
     }
 
   ref->ref_alias_set = MEM_ALIAS_SET (mem);
@@ -648,8 +646,8 @@ get_alias_set (tree t)
     {
       tree inner;
 
-      /* Remove any nops, then give the language a chance to do
-        something with this tree before we look at it.  */
+      /* Give the language a chance to do something with this tree
+        before we look at it.  */
       STRIP_NOPS (t);
       set = lang_hooks.get_alias_set (t);
       if (set != -1)
@@ -659,21 +657,41 @@ get_alias_set (tree t)
       if (TREE_CODE (t) == TARGET_MEM_REF)
        t = TMR_ORIGINAL (t);
 
-      /* First see if the actual object referenced is an INDIRECT_REF from a
-        restrict-qualified pointer or a "void *".  */
+      /* Get the base object of the reference.  */
       inner = t;
       while (handled_component_p (inner))
        {
+         /* If there is a VIEW_CONVERT_EXPR in the chain we cannot use
+            the type of any component references that wrap it to
+            determine the alias-set.  */
+         if (TREE_CODE (inner) == VIEW_CONVERT_EXPR)
+           t = TREE_OPERAND (inner, 0);
          inner = TREE_OPERAND (inner, 0);
-         STRIP_NOPS (inner);
        }
 
+      /* Handle pointer dereferences here, they can override the
+        alias-set.  */
       if (INDIRECT_REF_P (inner))
        {
          set = get_deref_alias_set_1 (TREE_OPERAND (inner, 0));
          if (set != -1)
            return set;
        }
+      else if (TREE_CODE (inner) == MEM_REF)
+       {
+         set = get_deref_alias_set_1 (TREE_OPERAND (inner, 1));
+         if (set != -1)
+           return set;
+       }
+
+      /* If the innermost reference is a MEM_REF that has a
+        conversion embedded treat it like a VIEW_CONVERT_EXPR above,
+        using the memory access type for determining the alias-set.  */
+     if (TREE_CODE (inner) == MEM_REF
+        && (TYPE_MAIN_VARIANT (TREE_TYPE (inner))
+            != TYPE_MAIN_VARIANT
+                 (TREE_TYPE (TREE_TYPE (TREE_OPERAND (inner, 1))))))
+       return get_deref_alias_set (TREE_OPERAND (inner, 1));
 
       /* Otherwise, pick up the outermost object that we could have a pointer
         to, processing conversions as above.  */
index 8a3f4486e66d9f1ebab44423286a609193c4131b..ddbbd79dbe2be02e39edf51c88943269e6840666 100644 (file)
@@ -4455,7 +4455,10 @@ stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
 {
   tree vatype = targetm.canonical_va_list_type (TREE_TYPE (valist));
 
-  gcc_assert (vatype != NULL_TREE);
+  /* The current way of determining the type of valist is completely
+     bogus.  We should have the information on the va builtin instead.  */
+  if (!vatype)
+    vatype = targetm.fn_abi_va_list (cfun->decl);
 
   if (TREE_CODE (vatype) == ARRAY_TYPE)
     {
@@ -4474,21 +4477,21 @@ stabilize_va_list_loc (location_t loc, tree valist, int needs_lvalue)
     }
   else
     {
-      tree pt;
+      tree pt = build_pointer_type (vatype);
 
       if (! needs_lvalue)
        {
          if (! TREE_SIDE_EFFECTS (valist))
            return valist;
 
-         pt = build_pointer_type (vatype);
          valist = fold_build1_loc (loc, ADDR_EXPR, pt, valist);
          TREE_SIDE_EFFECTS (valist) = 1;
        }
 
       if (TREE_SIDE_EFFECTS (valist))
        valist = save_expr (valist);
-      valist = build_fold_indirect_ref_loc (loc, valist);
+      valist = fold_build2_loc (loc, MEM_REF,
+                               vatype, valist, build_int_cst (pt, 0));
     }
 
   return valist;
@@ -8346,6 +8349,7 @@ fold_builtin_memory_op (location_t loc, tree dest, tree src,
     {
       tree srctype, desttype;
       int src_align, dest_align;
+      tree off0;
 
       if (endp == 3)
        {
@@ -8371,37 +8375,26 @@ fold_builtin_memory_op (location_t loc, tree dest, tree src,
            }
 
          /* If *src and *dest can't overlap, optimize into memcpy as well.  */
-         srcvar = build_fold_indirect_ref_loc (loc, src);
-         destvar = build_fold_indirect_ref_loc (loc, dest);
-         if (srcvar
-             && !TREE_THIS_VOLATILE (srcvar)
-             && destvar
-             && !TREE_THIS_VOLATILE (destvar))
+         if (TREE_CODE (src) == ADDR_EXPR
+             && TREE_CODE (dest) == ADDR_EXPR)
            {
              tree src_base, dest_base, fn;
              HOST_WIDE_INT src_offset = 0, dest_offset = 0;
              HOST_WIDE_INT size = -1;
              HOST_WIDE_INT maxsize = -1;
 
-             src_base = srcvar;
-             if (handled_component_p (src_base))
-               src_base = get_ref_base_and_extent (src_base, &src_offset,
-                                                   &size, &maxsize);
-             dest_base = destvar;
-             if (handled_component_p (dest_base))
-               dest_base = get_ref_base_and_extent (dest_base, &dest_offset,
-                                                    &size, &maxsize);
+             srcvar = TREE_OPERAND (src, 0);
+             src_base = get_ref_base_and_extent (srcvar, &src_offset,
+                                                 &size, &maxsize);
+             destvar = TREE_OPERAND (dest, 0);
+             dest_base = get_ref_base_and_extent (destvar, &dest_offset,
+                                                  &size, &maxsize);
              if (host_integerp (len, 1))
-               {
-                 maxsize = tree_low_cst (len, 1);
-                 if (maxsize
-                     > INTTYPE_MAXIMUM (HOST_WIDE_INT) / BITS_PER_UNIT)
-                   maxsize = -1;
-                 else
-                   maxsize *= BITS_PER_UNIT;
-               }
+               maxsize = tree_low_cst (len, 1);
              else
                maxsize = -1;
+             src_offset /= BITS_PER_UNIT;
+             dest_offset /= BITS_PER_UNIT;
              if (SSA_VAR_P (src_base)
                  && SSA_VAR_P (dest_base))
                {
@@ -8410,13 +8403,25 @@ fold_builtin_memory_op (location_t loc, tree dest, tree src,
                                           dest_offset, maxsize))
                    return NULL_TREE;
                }
-             else if (TREE_CODE (src_base) == INDIRECT_REF
-                      && TREE_CODE (dest_base) == INDIRECT_REF)
+             else if (TREE_CODE (src_base) == MEM_REF
+                      && TREE_CODE (dest_base) == MEM_REF)
                {
+                 double_int off;
                  if (! operand_equal_p (TREE_OPERAND (src_base, 0),
-                                        TREE_OPERAND (dest_base, 0), 0)
-                     || ranges_overlap_p (src_offset, maxsize,
-                                          dest_offset, maxsize))
+                                        TREE_OPERAND (dest_base, 0), 0))
+                   return NULL_TREE;
+                 off = double_int_add (mem_ref_offset (src_base),
+                                       shwi_to_double_int (src_offset));
+                 if (!double_int_fits_in_shwi_p (off))
+                   return NULL_TREE;
+                 src_offset = off.low;
+                 off = double_int_add (mem_ref_offset (dest_base),
+                                       shwi_to_double_int (dest_offset));
+                 if (!double_int_fits_in_shwi_p (off))
+                   return NULL_TREE;
+                 dest_offset = off.low;
+                 if (ranges_overlap_p (src_offset, maxsize,
+                                       dest_offset, maxsize))
                    return NULL_TREE;
                }
              else
@@ -8472,12 +8477,12 @@ fold_builtin_memory_op (location_t loc, tree dest, tree src,
          dest = build1 (NOP_EXPR, build_pointer_type (desttype), dest);
        }
       if (!srctype || !desttype
+         || TREE_ADDRESSABLE (srctype)
+         || TREE_ADDRESSABLE (desttype)
          || !TYPE_SIZE_UNIT (srctype)
          || !TYPE_SIZE_UNIT (desttype)
          || TREE_CODE (TYPE_SIZE_UNIT (srctype)) != INTEGER_CST
-         || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST
-         || TYPE_VOLATILE (srctype)
-         || TYPE_VOLATILE (desttype))
+         || TREE_CODE (TYPE_SIZE_UNIT (desttype)) != INTEGER_CST)
        return NULL_TREE;
 
       src_align = get_pointer_alignment (src, BIGGEST_ALIGNMENT);
@@ -8489,97 +8494,44 @@ fold_builtin_memory_op (location_t loc, tree dest, tree src,
       if (!ignore)
         dest = builtin_save_expr (dest);
 
-      srcvar = NULL_TREE;
-      if (tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
-       {
-         srcvar = build_fold_indirect_ref_loc (loc, src);
-         if (TREE_THIS_VOLATILE (srcvar))
-           return NULL_TREE;
-         else if (!tree_int_cst_equal (tree_expr_size (srcvar), len))
-           srcvar = NULL_TREE;
-         /* With memcpy, it is possible to bypass aliasing rules, so without
-            this check i.e. execute/20060930-2.c would be misoptimized,
-            because it use conflicting alias set to hold argument for the
-            memcpy call.  This check is probably unnecessary with
-            -fno-strict-aliasing.  Similarly for destvar.  See also
-            PR29286.  */
-         else if (!var_decl_component_p (srcvar))
-           srcvar = NULL_TREE;
-       }
-
-      destvar = NULL_TREE;
-      if (tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
-       {
-         destvar = build_fold_indirect_ref_loc (loc, dest);
-         if (TREE_THIS_VOLATILE (destvar))
-           return NULL_TREE;
-         else if (!tree_int_cst_equal (tree_expr_size (destvar), len))
-           destvar = NULL_TREE;
-         else if (!var_decl_component_p (destvar))
-           destvar = NULL_TREE;
-       }
+      /* Build accesses at offset zero with a ref-all character type.  */
+      off0 = build_int_cst (build_pointer_type_for_mode (char_type_node,
+                                                        ptr_mode, true), 0);
+
+      destvar = dest;
+      STRIP_NOPS (destvar);
+      if (TREE_CODE (destvar) == ADDR_EXPR
+         && var_decl_component_p (TREE_OPERAND (destvar, 0))
+         && tree_int_cst_equal (TYPE_SIZE_UNIT (desttype), len))
+       destvar = fold_build2 (MEM_REF, desttype, destvar, off0);
+      else
+       destvar = NULL_TREE;
+
+      srcvar = src;
+      STRIP_NOPS (srcvar);
+      if (TREE_CODE (srcvar) == ADDR_EXPR
+         && var_decl_component_p (TREE_OPERAND (srcvar, 0))
+         && tree_int_cst_equal (TYPE_SIZE_UNIT (srctype), len))
+       srcvar = fold_build2 (MEM_REF, destvar ? desttype : srctype,
+                             srcvar, off0);
+      else
+       srcvar = NULL_TREE;
 
       if (srcvar == NULL_TREE && destvar == NULL_TREE)
        return NULL_TREE;
 
       if (srcvar == NULL_TREE)
        {
-         tree srcptype;
-         if (TREE_ADDRESSABLE (TREE_TYPE (destvar)))
-           return NULL_TREE;
-
-         srctype = build_qualified_type (desttype, 0);
-         if (src_align < (int) TYPE_ALIGN (srctype))
-           {
-             if (AGGREGATE_TYPE_P (srctype)
-                 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (srctype), src_align))
-               return NULL_TREE;
-
-             srctype = build_variant_type_copy (srctype);
-             TYPE_ALIGN (srctype) = src_align;
-             TYPE_USER_ALIGN (srctype) = 1;
-             TYPE_PACKED (srctype) = 1;
-           }
-         srcptype = build_pointer_type_for_mode (srctype, ptr_mode, true);
-         src = fold_convert_loc (loc, srcptype, src);
-         srcvar = build_fold_indirect_ref_loc (loc, src);
+         STRIP_NOPS (src);
+         srcvar = fold_build2 (MEM_REF, desttype, src, off0);
        }
       else if (destvar == NULL_TREE)
        {
-         tree destptype;
-         if (TREE_ADDRESSABLE (TREE_TYPE (srcvar)))
-           return NULL_TREE;
-
-         desttype = build_qualified_type (srctype, 0);
-         if (dest_align < (int) TYPE_ALIGN (desttype))
-           {
-             if (AGGREGATE_TYPE_P (desttype)
-                 || SLOW_UNALIGNED_ACCESS (TYPE_MODE (desttype), dest_align))
-               return NULL_TREE;
+         STRIP_NOPS (dest);
+         destvar = fold_build2 (MEM_REF, srctype, dest, off0);
+       }
 
-             desttype = build_variant_type_copy (desttype);
-             TYPE_ALIGN (desttype) = dest_align;
-             TYPE_USER_ALIGN (desttype) = 1;
-             TYPE_PACKED (desttype) = 1;
-           }
-         destptype = build_pointer_type_for_mode (desttype, ptr_mode, true);
-         dest = fold_convert_loc (loc, destptype, dest);
-         destvar = build_fold_indirect_ref_loc (loc, dest);
-       }
-
-      if (srctype == desttype
-         || (gimple_in_ssa_p (cfun)
-             && useless_type_conversion_p (desttype, srctype)))
-       expr = srcvar;
-      else if ((INTEGRAL_TYPE_P (TREE_TYPE (srcvar))
-          || POINTER_TYPE_P (TREE_TYPE (srcvar)))
-         && (INTEGRAL_TYPE_P (TREE_TYPE (destvar))
-             || POINTER_TYPE_P (TREE_TYPE (destvar))))
-       expr = fold_convert_loc (loc, TREE_TYPE (destvar), srcvar);
-      else
-       expr = fold_build1_loc (loc, VIEW_CONVERT_EXPR,
-                           TREE_TYPE (destvar), srcvar);
-      expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, expr);
+      expr = build2 (MODIFY_EXPR, TREE_TYPE (destvar), destvar, srcvar);
     }
 
   if (ignore)
@@ -12068,7 +12020,7 @@ maybe_emit_free_warning (tree exp)
     return;
 
   arg = get_base_address (TREE_OPERAND (arg, 0));
-  if (arg == NULL || INDIRECT_REF_P (arg))
+  if (arg == NULL || INDIRECT_REF_P (arg) || TREE_CODE (arg) == MEM_REF)
     return;
 
   if (SSA_VAR_P (arg))
index 0d8026e75a15121344e4573f672c433145c76c9d..a76aeb23b3b24d4657613e2ee4deb7943bb14632 100644 (file)
@@ -2438,6 +2438,11 @@ expand_debug_expr (tree exp)
        return op0;
       }
 
+    case MEM_REF:
+      /* ??? FIXME.  */
+      if (!integer_zerop (TREE_OPERAND (exp, 1)))
+       return NULL;
+      /* Fallthru.  */
     case INDIRECT_REF:
     case ALIGN_INDIRECT_REF:
     case MISALIGNED_INDIRECT_REF:
index c63b5afc81c1c56adaa6526e144fb3e34076acc4..9dcb862316729760e36c15e16c5ea2edc40b5ed8 100644 (file)
@@ -275,7 +275,7 @@ mark_load (gimple stmt ATTRIBUTE_UNUSED, tree t,
           void *data ATTRIBUTE_UNUSED)
 {
   t = get_base_address (t);
-  if (TREE_CODE (t) == VAR_DECL
+  if (t && TREE_CODE (t) == VAR_DECL
       && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
     {
       struct varpool_node *vnode = varpool_node (t);
@@ -300,7 +300,7 @@ mark_store (gimple stmt ATTRIBUTE_UNUSED, tree t,
            void *data ATTRIBUTE_UNUSED)
 {
   t = get_base_address (t);
-  if (TREE_CODE (t) == VAR_DECL
+  if (t && TREE_CODE (t) == VAR_DECL
       && (TREE_STATIC (t) || DECL_EXTERNAL (t)))
     {
       struct varpool_node *vnode = varpool_node (t);
index 27faead140333b0cc1c2259398ab4634e22a70bc..5a69afa5286f7f2a891aaf87725cd517fa93addf 100644 (file)
@@ -1364,8 +1364,7 @@ thunk_adjust (gimple_stmt_iterator * bsi,
       vtabletmp2 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp)),
                                   "vtableaddr");
       stmt = gimple_build_assign (vtabletmp2,
-                                 build1 (INDIRECT_REF,
-                                         TREE_TYPE (vtabletmp2), vtabletmp));
+                                 build_simple_mem_ref (vtabletmp));
       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
       mark_symbols_for_renaming (stmt);
       find_referenced_vars_in (stmt);
@@ -1384,9 +1383,7 @@ thunk_adjust (gimple_stmt_iterator * bsi,
       vtabletmp3 = create_tmp_var (TREE_TYPE (TREE_TYPE (vtabletmp2)),
                                   "vcalloffset");
       stmt = gimple_build_assign (vtabletmp3,
-                                 build1 (INDIRECT_REF,
-                                         TREE_TYPE (vtabletmp3),
-                                         vtabletmp2));
+                                 build_simple_mem_ref (vtabletmp2));
       gsi_insert_after (bsi, stmt, GSI_NEW_STMT);
       mark_symbols_for_renaming (stmt);
       find_referenced_vars_in (stmt);
index 7345be4791b0197c88263845e8fe4502a325d8ab..a67097a3c2cc04b95edf2bfdc2f05c9df0f2de5c 100644 (file)
@@ -6025,7 +6025,7 @@ alpha_stdarg_optimize_hook (struct stdarg_info *si, const_gimple stmt)
   rhs = gimple_assign_rhs1 (stmt);
   while (handled_component_p (rhs))
     rhs = TREE_OPERAND (rhs, 0);
-  if (TREE_CODE (rhs) != INDIRECT_REF
+  if (TREE_CODE (rhs) != MEM_REF
       || TREE_CODE (TREE_OPERAND (rhs, 0)) != SSA_NAME)
     return false;
 
index 4f5afbec0a1d885f06e50bbda22ca24ae4fad719..ec2cdd38d83896624bcf280154330c8eb8febc17 100644 (file)
@@ -7093,11 +7093,17 @@ ix86_va_start (tree valist, rtx nextarg)
   f_ovf = TREE_CHAIN (f_fpr);
   f_sav = TREE_CHAIN (f_ovf);
 
-  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
-  gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), valist, f_gpr, NULL_TREE);
-  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), valist, f_fpr, NULL_TREE);
-  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), valist, f_ovf, NULL_TREE);
-  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), valist, f_sav, NULL_TREE);
+  valist = build_simple_mem_ref (valist);
+  TREE_TYPE (valist) = TREE_TYPE (sysv_va_list_type_node);
+  /* The following should be folded into the MEM_REF offset.  */
+  gpr = build3 (COMPONENT_REF, TREE_TYPE (f_gpr), unshare_expr (valist),
+               f_gpr, NULL_TREE);
+  fpr = build3 (COMPONENT_REF, TREE_TYPE (f_fpr), unshare_expr (valist),
+               f_fpr, NULL_TREE);
+  ovf = build3 (COMPONENT_REF, TREE_TYPE (f_ovf), unshare_expr (valist),
+               f_ovf, NULL_TREE);
+  sav = build3 (COMPONENT_REF, TREE_TYPE (f_sav), unshare_expr (valist),
+               f_sav, NULL_TREE);
 
   /* Count number of gp and fp argument registers used.  */
   words = crtl->args.info.words;
@@ -30619,10 +30625,12 @@ ix86_canonical_va_list_type (tree type)
   tree wtype, htype;
 
   /* Resolve references and pointers to va_list type.  */
-  if (INDIRECT_REF_P (type))
+  if (TREE_CODE (type) == MEM_REF)
     type = TREE_TYPE (type);
   else if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE(type)))
     type = TREE_TYPE (type);
+  else if (POINTER_TYPE_P (type) && TREE_CODE (TREE_TYPE (type)) == ARRAY_TYPE)
+    type = TREE_TYPE (type);
 
   if (TARGET_64BIT)
     {
index 70ff5e0f292b695810bbd896553a8ee4494ec7e8..36187c95cd5085139529c6508d08e864c6b72166 100644 (file)
@@ -13682,7 +13682,7 @@ rs6000_check_sdmode (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
     case RESULT_DECL:
     case SSA_NAME:
     case REAL_CST:
-    case INDIRECT_REF:
+    case MEM_REF:
     case ALIGN_INDIRECT_REF:
     case MISALIGNED_INDIRECT_REF:
     case VIEW_CONVERT_EXPR:
index c488ef4958b396ec0d1ea6a30a89a46338b931df..b8d2be17c36bc059c45b673d207e6a6a3db6da8c 100644 (file)
@@ -7886,7 +7886,7 @@ sh_gimplify_va_arg_expr (tree valist, tree type, gimple_seq *pre_p,
       lab_false = create_artificial_label (UNKNOWN_LOCATION);
       lab_over = create_artificial_label (UNKNOWN_LOCATION);
 
-      valist = build1 (INDIRECT_REF, ptr_type_node, addr);
+      valist = build_simple_mem_ref (addr);
 
       if (pass_as_float)
        {
index 8ae4e6053629da7814b5fe5cb1053e4a13c0aa21..4b7f9162395bf4f069327995d57c1c7f2b558973 100644 (file)
@@ -4171,7 +4171,7 @@ spu_gimplify_va_arg_expr (tree valist, tree type, gimple_seq * pre_p,
   f_args = TYPE_FIELDS (TREE_TYPE (va_list_type_node));
   f_skip = TREE_CHAIN (f_args);
 
-  valist = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (valist)), valist);
+  valist = build_simple_mem_ref (valist);
   args =
     build3 (COMPONENT_REF, TREE_TYPE (f_args), valist, f_args, NULL_TREE);
   skip =
index 3c6268de9b1218bf822663302b9a4865b64f2ac8..27e7f7a5ff3d44a78e5b6f96da812aa963914176 100644 (file)
@@ -1,3 +1,9 @@
+2010-07-01  Richard Guenther  <rguenther@suse.de>
+
+       * cp-gimplify.c (cp_gimplify_expr): Open-code the rhs
+       predicate we are looking for, allow non-gimplified
+       INDIRECT_REFs.
+
 2010-06-30  Paolo Carlini  <paolo.carlini@oracle.com>
 
        PR c++/44628
index fb7daeb3e818b2275f22ed63ca2e3c749443e78d..fa897bfa2b89f7c89c66ad84f967797dbd3cf60d 100644 (file)
@@ -575,7 +575,7 @@ cp_gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p)
          TREE_OPERAND (*expr_p, 1) = build1 (VIEW_CONVERT_EXPR,
                                              TREE_TYPE (op0), op1);
 
-       else if ((rhs_predicate_for (op0)) (op1)
+       else if ((is_gimple_lvalue (op1) || INDIRECT_REF_P (op1))
                 && !(TREE_CODE (op1) == CALL_EXPR
                      && CALL_EXPR_RETURN_SLOT_OPT (op1))
                 && is_really_empty_class (TREE_TYPE (op0)))
index 4d7a5d865bd750d4c1625d3d563d7203b5c129b9..7b3be30dc6a8328d31a2088954f4836cc9ebf21f 100644 (file)
@@ -1116,6 +1116,7 @@ target system bytes are not the same width as host system bytes.
 @subsection References to storage
 @tindex ADDR_EXPR
 @tindex INDIRECT_REF
+@tindex MEM_REF
 @tindex ARRAY_REF
 @tindex ARRAY_RANGE_REF
 @tindex TARGET_MEM_REF
@@ -1176,6 +1177,13 @@ These nodes are used to represent the object pointed to by a pointer.
 The operand is the pointer being dereferenced; it will always have
 pointer or reference type.
 
+@item MEM_REF
+These nodes are used to represent the object pointed to by a pointer
+offset by a constant.
+The first operand is the pointer being dereferenced; it will always have
+pointer or reference type.  The second operand is a pointer constant.
+Its type is specifying the type to be used for type-based alias analysis.
+
 @item COMPONENT_REF
 These nodes represent non-static data member accesses.  The first
 operand is the object (rather than a pointer to it); the second operand
index 984f00bd65aa4d0a44b2495ff6a2d06e519abf9a..b778db387d91316bae3bf9407aa7d3e911eb6201 100644 (file)
@@ -452,8 +452,8 @@ becomes
 
 The same rule holds for arguments to a @code{GIMPLE_CALL}.
 
-The target of an assignment is usually a variable, but can also be an
-@code{INDIRECT_REF} or a compound lvalue as described below.
+The target of an assignment is usually a variable, but can also be a
+@code{MEM_REF} or a compound lvalue as described below.
 
 @menu
 * Compound Expressions::
@@ -664,6 +664,11 @@ Return true if t is a valid expression to use as the function
 called by a @code{GIMPLE_CALL}.
 @end deftypefn
 
+@deftypefn {GIMPLE function} is_gimple_mem_ref_addr (tree t)
+Return true if t is a valid expression to use as first operand
+of a @code{MEM_REF} expression.
+@end deftypefn
+
 @deftypefn {GIMPLE function} is_gimple_constant (tree t)
 Return true if t is a valid gimple constant.
 @end deftypefn
index a72771df2f8cd5e112028910f0dcd720a9190f91..3faa175485999a907b2a45695b73b7f2ca84361c 100644 (file)
@@ -15160,6 +15160,11 @@ loc_list_from_tree (tree loc, int want_address)
       }
       break;
 
+    case MEM_REF:
+      /* ??? FIXME.  */
+      if (!integer_zerop (TREE_OPERAND (loc, 1)))
+       return 0;
+      /* Fallthru.  */
     case INDIRECT_REF:
     case ALIGN_INDIRECT_REF:
     case MISALIGNED_INDIRECT_REF:
index 55ed2054e4e703947b5bc75d4147aa982fc5b14e..4e38d89d34875f973f2b96aa90a8fb9be4abeb14 100644 (file)
@@ -1614,6 +1614,35 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
       || TREE_CODE (t) == ALIGN_INDIRECT_REF
       || TYPE_ALIGN_OK (type))
     align = MAX (align, TYPE_ALIGN (type));
+  else if (TREE_CODE (t) == MEM_REF)
+    {
+      HOST_WIDE_INT aoff = BITS_PER_UNIT;
+      if (host_integerp (TREE_OPERAND (t, 1), 1))
+       {
+         HOST_WIDE_INT ioff = TREE_INT_CST_LOW (TREE_OPERAND (t, 1));
+         aoff = (ioff & -ioff) * BITS_PER_UNIT;
+       }
+      if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
+         && DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
+       align = MAX (align,
+                    DECL_ALIGN (TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
+      else if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
+              && CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
+       {
+         align = TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (t, 0), 0)));
+#ifdef CONSTANT_ALIGNMENT
+         align = CONSTANT_ALIGNMENT (TREE_OPERAND (TREE_OPERAND (t, 0), 0), align);
+#endif
+       }
+      else
+       /* This technically isn't correct.  We can't really derive
+          alignment information from types.  */
+       align = MAX (align,
+                    TYPE_ALIGN (TREE_TYPE (TREE_TYPE (TREE_OPERAND (t, 1)))));
+      if (!integer_zerop (TREE_OPERAND (t, 1))
+         && aoff < align)
+       align = aoff;
+    }
   else
     if (TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
       {
@@ -1654,6 +1683,9 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
             || TREE_CODE (base) == BIT_FIELD_REF)
        base = TREE_OPERAND (base, 0);
 
+      if (TREE_CODE (base) == MEM_REF
+         && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
+       base = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
       if (DECL_P (base))
        {
          if (CODE_CONTAINS_STRUCT (TREE_CODE (base), TS_DECL_WITH_VIS))
@@ -1774,7 +1806,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
            }
 
          /* If this is an indirect reference, record it.  */
-         else if (TREE_CODE (t) == INDIRECT_REF
+         else if (TREE_CODE (t) == MEM_REF 
                   || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
            {
              expr = t;
@@ -1784,7 +1816,7 @@ set_mem_attributes_minus_bitpos (rtx ref, tree t, int objectp,
        }
 
       /* If this is an indirect reference, record it.  */
-      else if (TREE_CODE (t) == INDIRECT_REF
+      else if (TREE_CODE (t) == MEM_REF 
               || TREE_CODE (t) == MISALIGNED_INDIRECT_REF)
        {
          expr = t;
index 2763dc91991456b0415be526b3013f77d2ff05a0..a2a8054eb4ef69ea644197c47d991c1c140cbfaa 100644 (file)
@@ -4213,6 +4213,10 @@ expand_assignment (tree to, tree from, bool nontemporal)
      an array element in an unaligned packed structure field, has the same
      problem.  */
   if (handled_component_p (to)
+      /* ???  We only need to handle MEM_REF here if the access is not
+         a full access of the base object.  */
+      || (TREE_CODE (to) == MEM_REF
+         && TREE_CODE (TREE_OPERAND (to, 0)) == ADDR_EXPR)
       || TREE_CODE (TREE_TYPE (to)) == ARRAY_TYPE)
     {
       enum machine_mode mode1;
@@ -4686,6 +4690,51 @@ store_expr (tree exp, rtx target, int call_param_p, bool nontemporal)
                       BLOCK_OP_NORMAL);
       return NULL_RTX;
     }
+  else if (TREE_CODE (exp) == MEM_REF
+          && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
+          && TREE_CODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) == STRING_CST
+          && integer_zerop (TREE_OPERAND (exp, 1))
+          && !nontemporal && !call_param_p
+          && TYPE_MODE (TREE_TYPE (exp)) == BLKmode)
+    {
+      /* Optimize initialization of an array with a STRING_CST.  */
+      HOST_WIDE_INT exp_len, str_copy_len;
+      rtx dest_mem;
+      tree str = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+
+      exp_len = int_expr_size (exp);
+      if (exp_len <= 0)
+       goto normal_expr;
+
+      str_copy_len = strlen (TREE_STRING_POINTER (str));
+      if (str_copy_len < TREE_STRING_LENGTH (str) - 1)
+       goto normal_expr;
+
+      str_copy_len = TREE_STRING_LENGTH (str);
+      if ((STORE_MAX_PIECES & (STORE_MAX_PIECES - 1)) == 0)
+       {
+         str_copy_len += STORE_MAX_PIECES - 1;
+         str_copy_len &= ~(STORE_MAX_PIECES - 1);
+       }
+      str_copy_len = MIN (str_copy_len, exp_len);
+      if (!can_store_by_pieces (str_copy_len, builtin_strncpy_read_str,
+                               CONST_CAST(char *, TREE_STRING_POINTER (str)),
+                               MEM_ALIGN (target), false))
+       goto normal_expr;
+
+      dest_mem = target;
+
+      dest_mem = store_by_pieces (dest_mem,
+                                 str_copy_len, builtin_strncpy_read_str,
+                                 CONST_CAST(char *, TREE_STRING_POINTER (str)),
+                                 MEM_ALIGN (target), false,
+                                 exp_len > str_copy_len ? 1 : 0);
+      if (exp_len > str_copy_len)
+       clear_storage (adjust_address (dest_mem, BLKmode, 0),
+                      GEN_INT (exp_len - str_copy_len),
+                      BLOCK_OP_NORMAL);
+      return NULL_RTX;
+    }
   else
     {
       rtx tmp_target;
@@ -5852,7 +5901,15 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
         operations.  */
       || (bitsize >= 0
          && TREE_CODE (TYPE_SIZE (TREE_TYPE (exp))) == INTEGER_CST
-         && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0))
+         && compare_tree_int (TYPE_SIZE (TREE_TYPE (exp)), bitsize) != 0)
+      /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
+         decl we must use bitfield operations.  */
+      || (bitsize >= 0
+         && TREE_CODE (exp) == MEM_REF
+         && TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR
+         && DECL_P (TREE_OPERAND (TREE_OPERAND (exp, 0), 0))
+         && !TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (exp, 0),0 ))
+         && DECL_MODE (TREE_OPERAND (TREE_OPERAND (exp, 0), 0)) != BLKmode))
     {
       rtx temp;
       gimple nop_def;
@@ -6113,6 +6170,24 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
            goto done;
          break;
 
+       case MEM_REF:
+         /* Hand back the decl for MEM[&decl, off].  */
+         if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
+           {
+             tree off = TREE_OPERAND (exp, 1);
+             if (!integer_zerop (off))
+               {
+                 double_int boff, coff = mem_ref_offset (exp);
+                 boff = double_int_lshift (coff,
+                                           BITS_PER_UNIT == 8
+                                           ? 3 : exact_log2 (BITS_PER_UNIT),
+                                           HOST_BITS_PER_DOUBLE_INT, true);
+                 bit_offset = double_int_add (bit_offset, boff);
+               }
+             exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+           }
+         goto done;
+
        default:
          goto done;
        }
@@ -6873,6 +6948,16 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
       /* This case will happen via recursion for &a->b.  */
       return expand_expr (TREE_OPERAND (exp, 0), target, tmode, modifier);
 
+    case MEM_REF:
+      {
+       tree tem = TREE_OPERAND (exp, 0);
+       if (!integer_zerop (TREE_OPERAND (exp, 1)))
+         tem = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (exp, 1)),
+                       tem,
+                       double_int_to_tree (sizetype, mem_ref_offset (exp)));
+       return expand_expr (tem, target, tmode, modifier);
+      }
+
     case CONST_DECL:
       /* Expand the initializer like constants above.  */
       return XEXP (expand_expr_constant (DECL_INITIAL (exp), 0, modifier), 0);
@@ -8684,6 +8769,71 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
       }
       return temp;
 
+    case MEM_REF:
+      {
+       addr_space_t as
+         = TYPE_ADDR_SPACE (TREE_TYPE (TREE_TYPE (TREE_OPERAND (exp, 1))));
+       enum machine_mode address_mode;
+       tree base = TREE_OPERAND (exp, 0);
+       /* Handle expansion of non-aliased memory with non-BLKmode.  That
+          might end up in a register.  */
+       if (TREE_CODE (base) == ADDR_EXPR)
+         {
+           HOST_WIDE_INT offset = mem_ref_offset (exp).low;
+           tree bit_offset;
+           base = TREE_OPERAND (base, 0);
+           if (!DECL_P (base))
+             {
+               HOST_WIDE_INT off;
+               base = get_addr_base_and_unit_offset (base, &off);
+               gcc_assert (base);
+               offset += off;
+             }
+           /* If we are expanding a MEM_REF of a non-BLKmode non-addressable
+              decl we must use bitfield operations.  */
+           if (DECL_P (base)
+               && !TREE_ADDRESSABLE (base)
+               && DECL_MODE (base) != BLKmode
+               && DECL_RTL_SET_P (base)
+               && !MEM_P (DECL_RTL (base)))
+             {
+               tree bftype;
+               if (offset == 0
+                   && host_integerp (TYPE_SIZE (TREE_TYPE (exp)), 1)
+                   && (GET_MODE_BITSIZE (DECL_MODE (base))
+                       == TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp)))))
+                 return expand_expr (build1 (VIEW_CONVERT_EXPR,
+                                             TREE_TYPE (exp), base),
+                                     target, tmode, modifier);
+               bit_offset = bitsize_int (offset * BITS_PER_UNIT);
+               bftype = TREE_TYPE (base);
+               if (TYPE_MODE (TREE_TYPE (exp)) != BLKmode)
+                 bftype = TREE_TYPE (exp);
+               return expand_expr (build3 (BIT_FIELD_REF, bftype,
+                                           base,
+                                           TYPE_SIZE (TREE_TYPE (exp)),
+                                           bit_offset),
+                                   target, tmode, modifier);
+             }
+         }
+       address_mode = targetm.addr_space.address_mode (as);
+       op0 = expand_expr (TREE_OPERAND (exp, 0), NULL_RTX, address_mode,
+                          EXPAND_NORMAL);
+       if (!integer_zerop (TREE_OPERAND (exp, 1)))
+         {
+           rtx off;
+           off = immed_double_int_const (mem_ref_offset (exp), address_mode);
+           op0 = simplify_gen_binary (PLUS, address_mode, op0, off);
+         }
+       op0 = memory_address_addr_space (mode, op0, as);
+       temp = gen_rtx_MEM (mode, op0);
+       set_mem_attributes (temp, exp, 0);
+       set_mem_addr_space (temp, as);
+       if (TREE_THIS_VOLATILE (exp))
+         MEM_VOLATILE_P (temp) = 1;
+       return temp;
+      }
+
     case ARRAY_REF:
 
       {
index e2b30f90e8d1e7be03226e88f606fc171e8afe2f..1e3bae65405119467244975df5bcb7ae67ece765 100644 (file)
@@ -60,6 +60,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "langhooks.h"
 #include "md5.h"
 #include "gimple.h"
+#include "tree-flow.h"
 
 /* Nonzero if we are folding constants inside an initializer; zero
    otherwise.  */
@@ -2591,6 +2592,17 @@ operand_equal_p (const_tree arg0, const_tree arg1, unsigned int flags)
        case IMAGPART_EXPR:
          return OP_SAME (0);
 
+       case MEM_REF:
+         /* Require equal access sizes.  We can have incomplete types
+            for array references of variable-sized arrays from the
+            Fortran frontent though.  */
+         return ((TYPE_SIZE (TREE_TYPE (arg0)) == TYPE_SIZE (TREE_TYPE (arg1))
+                  || (TYPE_SIZE (TREE_TYPE (arg0))
+                      && TYPE_SIZE (TREE_TYPE (arg1))
+                      && operand_equal_p (TYPE_SIZE (TREE_TYPE (arg0)),
+                                          TYPE_SIZE (TREE_TYPE (arg1)), flags)))
+                 && OP_SAME (0) && OP_SAME (1));
+
        case ARRAY_REF:
        case ARRAY_RANGE_REF:
          /* Operands 2 and 3 may be null.
@@ -7596,6 +7608,9 @@ build_fold_addr_expr_with_type_loc (location_t loc, tree t, tree ptrtype)
          SET_EXPR_LOCATION (t, loc);
        }
     }
+  else if (TREE_CODE (t) == MEM_REF
+      && integer_zerop (TREE_OPERAND (t, 1)))
+    return TREE_OPERAND (t, 0);
   else if (TREE_CODE (t) == VIEW_CONVERT_EXPR)
     {
       t = build_fold_addr_expr_loc (loc, TREE_OPERAND (t, 0));
@@ -8014,6 +8029,9 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
       if (TREE_CODE (op0) == VIEW_CONVERT_EXPR)
        return fold_build1_loc (loc, VIEW_CONVERT_EXPR,
                            type, TREE_OPERAND (op0, 0));
+      if (TREE_CODE (op0) == MEM_REF)
+       return fold_build2_loc (loc, MEM_REF, type,
+                               TREE_OPERAND (op0, 0), TREE_OPERAND (op0, 1));
 
       /* For integral conversions with the same precision or pointer
         conversions use a NOP_EXPR instead.  */
@@ -8665,6 +8683,11 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
       else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
        {
          base0 = TREE_OPERAND (arg0, 0);
+         if (TREE_CODE (base0) == ADDR_EXPR)
+           {
+             base0 = TREE_OPERAND (base0, 0);
+             indirect_base0 = true;
+           }
          offset0 = TREE_OPERAND (arg0, 1);
        }
 
@@ -8682,6 +8705,11 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
       else if (TREE_CODE (arg1) == POINTER_PLUS_EXPR)
        {
          base1 = TREE_OPERAND (arg1, 0);
+         if (TREE_CODE (base1) == ADDR_EXPR)
+           {
+             base1 = TREE_OPERAND (base1, 0);
+             indirect_base1 = true;
+           }
          offset1 = TREE_OPERAND (arg1, 1);
        }
 
@@ -9524,6 +9552,36 @@ fold_binary_loc (location_t loc,
 
   switch (code)
     {
+    case MEM_REF:
+      /* MEM[&MEM[p, CST1], CST2] -> MEM[p, CST1 + CST2].  */
+      if (TREE_CODE (arg0) == ADDR_EXPR
+         && TREE_CODE (TREE_OPERAND (arg0, 0)) == MEM_REF)
+       {
+         tree iref = TREE_OPERAND (arg0, 0);
+         return fold_build2 (MEM_REF, type,
+                             TREE_OPERAND (iref, 0),
+                             int_const_binop (PLUS_EXPR, arg1,
+                                              TREE_OPERAND (iref, 1), 0));
+       }
+
+      /* MEM[&a.b, CST2] -> MEM[&a, offsetof (a, b) + CST2].  */
+      if (TREE_CODE (arg0) == ADDR_EXPR
+         && handled_component_p (TREE_OPERAND (arg0, 0)))
+       {
+         tree base;
+         HOST_WIDE_INT coffset;
+         base = get_addr_base_and_unit_offset (TREE_OPERAND (arg0, 0),
+                                               &coffset);
+         if (!base)
+           return NULL_TREE;
+         return fold_build2 (MEM_REF, type,
+                             build_fold_addr_expr (base),
+                             int_const_binop (PLUS_EXPR, arg1,
+                                              size_int (coffset), 0));
+       }
+
+      return NULL_TREE;
+
     case POINTER_PLUS_EXPR:
       /* 0 +p index -> (type)index */
       if (integer_zerop (arg0))
index a1fc0201473ee2d4d2dd6f6b34d121d31703fecf..6af651140006574818fca407c5e87bf5dff2a9e9 100644 (file)
@@ -82,7 +82,7 @@ get_symbol_constant_value (tree sym)
 bool
 may_propagate_address_into_dereference (tree addr, tree deref)
 {
-  gcc_assert (INDIRECT_REF_P (deref)
+  gcc_assert (TREE_CODE (deref) == MEM_REF
              && TREE_CODE (addr) == ADDR_EXPR);
 
   /* Don't propagate if ADDR's operand has incomplete type.  */
@@ -108,15 +108,12 @@ may_propagate_address_into_dereference (tree addr, tree deref)
 
 
 /* A subroutine of fold_stmt.  Attempts to fold *(A+O) to A[X].
-   BASE is an array type.  OFFSET is a byte displacement.  ORIG_TYPE
-   is the desired result type.
+   BASE is an array type.  OFFSET is a byte displacement.
 
    LOC is the location of the original expression.  */
 
 static tree
-maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
-                               tree orig_type,
-                               bool allow_negative_idx)
+maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset)
 {
   tree min_idx, idx, idx_type, elt_offset = integer_zero_node;
   tree array_type, elt_type, elt_size;
@@ -145,8 +142,6 @@ maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
   if (TREE_CODE (array_type) != ARRAY_TYPE)
     return NULL_TREE;
   elt_type = TREE_TYPE (array_type);
-  if (!useless_type_conversion_p (orig_type, elt_type))
-    return NULL_TREE;
 
   /* Use signed size type for intermediate computation on the index.  */
   idx_type = ssizetype;
@@ -219,34 +214,22 @@ maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
        char *(c[4]);
        c[3][2];
      should not be simplified into (*c)[14] or tree-vrp will
-     give false warnings.  The same is true for
-       struct A { long x; char d[0]; } *a;
-       (char *)a - 4;
-     which should be not folded to &a->d[-8].  */
-  if (domain_type
-      && TYPE_MAX_VALUE (domain_type)
-      && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST)
+     give false warnings.
+     This is only an issue for multi-dimensional arrays.  */
+  if (TREE_CODE (elt_type) == ARRAY_TYPE
+      && domain_type)
     {
-      tree up_bound = TYPE_MAX_VALUE (domain_type);
-
-      if (tree_int_cst_lt (up_bound, idx)
-         /* Accesses after the end of arrays of size 0 (gcc
-            extension) and 1 are likely intentional ("struct
-            hack").  */
-         && compare_tree_int (up_bound, 1) > 0)
+      if (TYPE_MAX_VALUE (domain_type)
+         && TREE_CODE (TYPE_MAX_VALUE (domain_type)) == INTEGER_CST
+         && tree_int_cst_lt (TYPE_MAX_VALUE (domain_type), idx))
        return NULL_TREE;
-    }
-  if (domain_type
-      && TYPE_MIN_VALUE (domain_type))
-    {
-      if (!allow_negative_idx
-         && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
-         && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
+      else if (TYPE_MIN_VALUE (domain_type)
+              && TREE_CODE (TYPE_MIN_VALUE (domain_type)) == INTEGER_CST
+              && tree_int_cst_lt (idx, TYPE_MIN_VALUE (domain_type)))
+       return NULL_TREE;
+      else if (compare_tree_int (idx, 0) < 0)
        return NULL_TREE;
     }
-  else if (!allow_negative_idx
-          && compare_tree_int (idx, 0) < 0)
-    return NULL_TREE;
 
   {
     tree t = build4 (ARRAY_REF, elt_type, base, idx, NULL_TREE, NULL_TREE);
@@ -256,340 +239,55 @@ maybe_fold_offset_to_array_ref (location_t loc, tree base, tree offset,
 }
 
 
-/* Attempt to fold *(S+O) to S.X.
-   BASE is a record type.  OFFSET is a byte displacement.  ORIG_TYPE
-   is the desired result type.
-
-   LOC is the location of the original expression.  */
-
-static tree
-maybe_fold_offset_to_component_ref (location_t loc, tree record_type,
-                                   tree base, tree offset, tree orig_type)
-{
-  tree f, t, field_type, tail_array_field, field_offset;
-  tree ret;
-  tree new_base;
-
-  if (TREE_CODE (record_type) != RECORD_TYPE
-      && TREE_CODE (record_type) != UNION_TYPE
-      && TREE_CODE (record_type) != QUAL_UNION_TYPE)
-    return NULL_TREE;
-
-  /* Short-circuit silly cases.  */
-  if (useless_type_conversion_p (record_type, orig_type))
-    return NULL_TREE;
-
-  tail_array_field = NULL_TREE;
-  for (f = TYPE_FIELDS (record_type); f ; f = TREE_CHAIN (f))
-    {
-      int cmp;
-
-      if (TREE_CODE (f) != FIELD_DECL)
-       continue;
-      if (DECL_BIT_FIELD (f))
-       continue;
-
-      if (!DECL_FIELD_OFFSET (f))
-       continue;
-      field_offset = byte_position (f);
-      if (TREE_CODE (field_offset) != INTEGER_CST)
-       continue;
-
-      /* ??? Java creates "interesting" fields for representing base classes.
-        They have no name, and have no context.  With no context, we get into
-        trouble with nonoverlapping_component_refs_p.  Skip them.  */
-      if (!DECL_FIELD_CONTEXT (f))
-       continue;
-
-      /* The previous array field isn't at the end.  */
-      tail_array_field = NULL_TREE;
-
-      /* Check to see if this offset overlaps with the field.  */
-      cmp = tree_int_cst_compare (field_offset, offset);
-      if (cmp > 0)
-       continue;
-
-      field_type = TREE_TYPE (f);
-
-      /* Here we exactly match the offset being checked.  If the types match,
-        then we can return that field.  */
-      if (cmp == 0
-         && useless_type_conversion_p (orig_type, field_type))
-       {
-         t = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
-         return t;
-       }
-
-      /* Don't care about offsets into the middle of scalars.  */
-      if (!AGGREGATE_TYPE_P (field_type))
-       continue;
-
-      /* Check for array at the end of the struct.  This is often
-        used as for flexible array members.  We should be able to
-        turn this into an array access anyway.  */
-      if (TREE_CODE (field_type) == ARRAY_TYPE)
-       tail_array_field = f;
-
-      /* Check the end of the field against the offset.  */
-      if (!DECL_SIZE_UNIT (f)
-         || TREE_CODE (DECL_SIZE_UNIT (f)) != INTEGER_CST)
-       continue;
-      t = int_const_binop (MINUS_EXPR, offset, field_offset, 1);
-      if (!tree_int_cst_lt (t, DECL_SIZE_UNIT (f)))
-       continue;
-
-      /* If we matched, then set offset to the displacement into
-        this field.  */
-      new_base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
-      SET_EXPR_LOCATION (new_base, loc);
-
-      /* Recurse to possibly find the match.  */
-      ret = maybe_fold_offset_to_array_ref (loc, new_base, t, orig_type,
-                                           f == TYPE_FIELDS (record_type));
-      if (ret)
-       return ret;
-      ret = maybe_fold_offset_to_component_ref (loc, field_type, new_base, t,
-                                               orig_type);
-      if (ret)
-       return ret;
-    }
-
-  if (!tail_array_field)
-    return NULL_TREE;
-
-  f = tail_array_field;
-  field_type = TREE_TYPE (f);
-  offset = int_const_binop (MINUS_EXPR, offset, byte_position (f), 1);
-
-  /* If we get here, we've got an aggregate field, and a possibly
-     nonzero offset into them.  Recurse and hope for a valid match.  */
-  base = fold_build3 (COMPONENT_REF, field_type, base, f, NULL_TREE);
-  SET_EXPR_LOCATION (base, loc);
-
-  t = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type,
-                                     f == TYPE_FIELDS (record_type));
-  if (t)
-    return t;
-  return maybe_fold_offset_to_component_ref (loc, field_type, base, offset,
-                                            orig_type);
-}
-
-/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE->field_of_orig_type
-   or BASE[index] or by combination of those.
-
+/* Attempt to express (ORIG_TYPE)BASE+OFFSET as BASE[index].
    LOC is the location of original expression.
 
-   Before attempting the conversion strip off existing ADDR_EXPRs and
-   handled component refs.  */
+   Before attempting the conversion strip off existing ADDR_EXPRs.  */
 
 tree
 maybe_fold_offset_to_reference (location_t loc, tree base, tree offset,
                                tree orig_type)
 {
   tree ret;
-  tree type;
 
   STRIP_NOPS (base);
   if (TREE_CODE (base) != ADDR_EXPR)
     return NULL_TREE;
 
   base = TREE_OPERAND (base, 0);
-
-  /* Handle case where existing COMPONENT_REF pick e.g. wrong field of union,
-     so it needs to be removed and new COMPONENT_REF constructed.
-     The wrong COMPONENT_REF are often constructed by folding the
-     (type *)&object within the expression (type *)&object+offset  */
-  if (handled_component_p (base))
-    {
-      HOST_WIDE_INT sub_offset, size, maxsize;
-      tree newbase;
-      newbase = get_ref_base_and_extent (base, &sub_offset,
-                                        &size, &maxsize);
-      gcc_assert (newbase);
-      if (size == maxsize
-         && size != -1
-         && !(sub_offset & (BITS_PER_UNIT - 1)))
-       {
-         base = newbase;
-         if (sub_offset)
-           offset = int_const_binop (PLUS_EXPR, offset,
-                                     build_int_cst (TREE_TYPE (offset),
-                                                    sub_offset / BITS_PER_UNIT), 1);
-       }
-    }
-  if (useless_type_conversion_p (orig_type, TREE_TYPE (base))
+  if (types_compatible_p (orig_type, TREE_TYPE (base))
       && integer_zerop (offset))
     return base;
-  type = TREE_TYPE (base);
 
-  ret = maybe_fold_offset_to_component_ref (loc, type, base, offset, orig_type);
-  if (!ret)
-    ret = maybe_fold_offset_to_array_ref (loc, base, offset, orig_type, true);
-
-  return ret;
+  ret = maybe_fold_offset_to_array_ref (loc, base, offset);
+  if (ret && types_compatible_p (orig_type, TREE_TYPE (ret)))
+    return ret;
+  return NULL_TREE;
 }
 
-/* Attempt to express (ORIG_TYPE)&BASE+OFFSET as &BASE->field_of_orig_type
-   or &BASE[index] or by combination of those.
-
-   LOC is the location of the original expression.
-
-   Before attempting the conversion strip off existing component refs.  */
+/* Attempt to express (ORIG_TYPE)ADDR+OFFSET as (*ADDR)[index].
+   LOC is the location of the original expression.  */
 
 tree
 maybe_fold_offset_to_address (location_t loc, tree addr, tree offset,
                              tree orig_type)
 {
-  tree t;
+  tree base, ret;
 
-  gcc_assert (POINTER_TYPE_P (TREE_TYPE (addr))
-             && POINTER_TYPE_P (orig_type));
-
-  t = maybe_fold_offset_to_reference (loc, addr, offset,
-                                     TREE_TYPE (orig_type));
-  if (t != NULL_TREE)
-    {
-      tree orig = addr;
-      tree ptr_type;
-
-      /* For __builtin_object_size to function correctly we need to
-         make sure not to fold address arithmetic so that we change
-        reference from one array to another.  This would happen for
-        example for
-
-          struct X { char s1[10]; char s2[10] } s;
-          char *foo (void) { return &s.s2[-4]; }
-
-        where we need to avoid generating &s.s1[6].  As the C and
-        C++ frontends create different initial trees
-        (char *) &s.s1 + -4  vs.  &s.s1[-4]  we have to do some
-        sophisticated comparisons here.  Note that checking for the
-        condition after the fact is easier than trying to avoid doing
-        the folding.  */
-      STRIP_NOPS (orig);
-      if (TREE_CODE (orig) == ADDR_EXPR)
-       orig = TREE_OPERAND (orig, 0);
-      if ((TREE_CODE (orig) == ARRAY_REF
-          || (TREE_CODE (orig) == COMPONENT_REF
-              && TREE_CODE (TREE_TYPE (TREE_OPERAND (orig, 1))) == ARRAY_TYPE))
-         && (TREE_CODE (t) == ARRAY_REF
-             || TREE_CODE (t) == COMPONENT_REF)
-         && !operand_equal_p (TREE_CODE (orig) == ARRAY_REF
-                              ? TREE_OPERAND (orig, 0) : orig,
-                              TREE_CODE (t) == ARRAY_REF
-                              ? TREE_OPERAND (t, 0) : t, 0))
-       return NULL_TREE;
-
-      ptr_type = build_pointer_type (TREE_TYPE (t));
-      if (!useless_type_conversion_p (orig_type, ptr_type))
-       return NULL_TREE;
-      return build_fold_addr_expr_with_type_loc (loc, t, ptr_type);
-    }
-
-  return NULL_TREE;
-}
-
-/* A subroutine of fold_stmt.  Attempt to simplify *(BASE+OFFSET).
-   Return the simplified expression, or NULL if nothing could be done.  */
-
-static tree
-maybe_fold_stmt_indirect (tree expr, tree base, tree offset)
-{
-  tree t;
-  bool volatile_p = TREE_THIS_VOLATILE (expr);
-  location_t loc = EXPR_LOCATION (expr);
-
-  /* We may well have constructed a double-nested PLUS_EXPR via multiple
-     substitutions.  Fold that down to one.  Remove NON_LVALUE_EXPRs that
-     are sometimes added.  */
-  base = fold (base);
-  STRIP_TYPE_NOPS (base);
-  TREE_OPERAND (expr, 0) = base;
-
-  /* One possibility is that the address reduces to a string constant.  */
-  t = fold_read_from_constant_string (expr);
-  if (t)
-    return t;
-
-  /* Add in any offset from a POINTER_PLUS_EXPR.  */
-  if (TREE_CODE (base) == POINTER_PLUS_EXPR)
+  STRIP_NOPS (addr);
+  if (TREE_CODE (addr) != ADDR_EXPR)
+    return NULL_TREE;
+  base = TREE_OPERAND (addr, 0);
+  ret = maybe_fold_offset_to_array_ref (loc, base, offset);
+  if (ret)
     {
-      tree offset2;
-
-      offset2 = TREE_OPERAND (base, 1);
-      if (TREE_CODE (offset2) != INTEGER_CST)
+      ret = build_fold_addr_expr (ret);
+      if (!useless_type_conversion_p (orig_type, TREE_TYPE (ret)))
        return NULL_TREE;
-      base = TREE_OPERAND (base, 0);
-
-      offset = fold_convert (sizetype,
-                            int_const_binop (PLUS_EXPR, offset, offset2, 1));
+      SET_EXPR_LOCATION (ret, loc);
     }
 
-  if (TREE_CODE (base) == ADDR_EXPR)
-    {
-      tree base_addr = base;
-
-      /* Strip the ADDR_EXPR.  */
-      base = TREE_OPERAND (base, 0);
-
-      /* Fold away CONST_DECL to its value, if the type is scalar.  */
-      if (TREE_CODE (base) == CONST_DECL
-         && is_gimple_min_invariant (DECL_INITIAL (base)))
-       return DECL_INITIAL (base);
-
-      /* If there is no offset involved simply return the folded base.  */
-      if (integer_zerop (offset))
-       return base;
-
-      /* Try folding *(&B+O) to B.X.  */
-      t = maybe_fold_offset_to_reference (loc, base_addr, offset,
-                                         TREE_TYPE (expr));
-      if (t)
-       {
-         /* Preserve volatileness of the original expression.
-            We can end up with a plain decl here which is shared
-            and we shouldn't mess with its flags.  */
-         if (!SSA_VAR_P (t))
-           TREE_THIS_VOLATILE (t) = volatile_p;
-         return t;
-       }
-    }
-  else
-    {
-      /* We can get here for out-of-range string constant accesses,
-        such as "_"[3].  Bail out of the entire substitution search
-        and arrange for the entire statement to be replaced by a
-        call to __builtin_trap.  In all likelihood this will all be
-        constant-folded away, but in the meantime we can't leave with
-        something that get_expr_operands can't understand.  */
-
-      t = base;
-      STRIP_NOPS (t);
-      if (TREE_CODE (t) == ADDR_EXPR
-         && TREE_CODE (TREE_OPERAND (t, 0)) == STRING_CST)
-       {
-         /* FIXME: Except that this causes problems elsewhere with dead
-            code not being deleted, and we die in the rtl expanders
-            because we failed to remove some ssa_name.  In the meantime,
-            just return zero.  */
-         /* FIXME2: This condition should be signaled by
-            fold_read_from_constant_string directly, rather than
-            re-checking for it here.  */
-         return integer_zero_node;
-       }
-
-      /* Try folding *(B+O) to B->X.  Still an improvement.  */
-      if (POINTER_TYPE_P (TREE_TYPE (base)))
-       {
-          t = maybe_fold_offset_to_reference (loc, base, offset,
-                                             TREE_TYPE (expr));
-         if (t)
-           return t;
-       }
-    }
-
-  /* Otherwise we had an offset that we could not simplify.  */
-  return NULL_TREE;
+  return ret;
 }
 
 
@@ -622,18 +320,17 @@ maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
       /* Or op0 should now be A[0] and the non-constant offset defined
         via a multiplication by the array element size.  */
       if (TREE_CODE (op0) == ARRAY_REF
-         && integer_zerop (TREE_OPERAND (op0, 1))
-         && TREE_CODE (op1) == SSA_NAME
-         && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (op0)), 1))
-       {
-         gimple offset_def = SSA_NAME_DEF_STMT (op1);
-         if (!is_gimple_assign (offset_def))
-           return NULL_TREE;
-
          /* As we will end up creating a variable index array access
             in the outermost array dimension make sure there isn't
             a more inner array that the index could overflow to.  */
-         if (TREE_CODE (TREE_OPERAND (op0, 0)) == ARRAY_REF)
+         && TREE_CODE (TREE_OPERAND (op0, 0)) != ARRAY_REF
+         && integer_zerop (TREE_OPERAND (op0, 1))
+         && TREE_CODE (op1) == SSA_NAME)
+       {
+         gimple offset_def = SSA_NAME_DEF_STMT (op1);
+         tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (op0));
+         if (!host_integerp (elsz, 1)
+             || !is_gimple_assign (offset_def))
            return NULL_TREE;
 
          /* Do not build array references of something that we can't
@@ -644,15 +341,14 @@ maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
 
          if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
              && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
-             && tree_int_cst_equal (gimple_assign_rhs2 (offset_def),
-                                    TYPE_SIZE_UNIT (TREE_TYPE (op0))))
+             && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
            return build_fold_addr_expr
                          (build4 (ARRAY_REF, TREE_TYPE (op0),
                                   TREE_OPERAND (op0, 0),
                                   gimple_assign_rhs1 (offset_def),
                                   TREE_OPERAND (op0, 2),
                                   TREE_OPERAND (op0, 3)));
-         else if (integer_onep (TYPE_SIZE_UNIT (TREE_TYPE (op0)))
+         else if (integer_onep (elsz)
                   && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
            return build_fold_addr_expr
                          (build4 (ARRAY_REF, TREE_TYPE (op0),
@@ -661,6 +357,38 @@ maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
                                   TREE_OPERAND (op0, 2),
                                   TREE_OPERAND (op0, 3)));
        }
+      else if (TREE_CODE (TREE_TYPE (op0)) == ARRAY_TYPE
+              /* Dto.  */
+              && TREE_CODE (TREE_TYPE (TREE_TYPE (op0))) != ARRAY_TYPE
+              && TREE_CODE (op1) == SSA_NAME)
+       {
+         gimple offset_def = SSA_NAME_DEF_STMT (op1);
+         tree elsz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op0)));
+         if (!host_integerp (elsz, 1)
+             || !is_gimple_assign (offset_def))
+           return NULL_TREE;
+
+         /* Do not build array references of something that we can't
+            see the true number of array dimensions for.  */
+         if (!DECL_P (op0)
+             && !handled_component_p (op0))
+           return NULL_TREE;
+
+         if (gimple_assign_rhs_code (offset_def) == MULT_EXPR
+             && TREE_CODE (gimple_assign_rhs2 (offset_def)) == INTEGER_CST
+             && tree_int_cst_equal (gimple_assign_rhs2 (offset_def), elsz))
+           return build_fold_addr_expr
+                         (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
+                                  op0, gimple_assign_rhs1 (offset_def),
+                                  integer_zero_node, NULL_TREE));
+         else if (integer_onep (elsz)
+                  && gimple_assign_rhs_code (offset_def) != MULT_EXPR)
+           return build_fold_addr_expr
+                         (build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (op0)),
+                                  op0, op1,
+                                  integer_zero_node, NULL_TREE));
+       }
+
       return NULL_TREE;
     }
 
@@ -715,13 +443,12 @@ maybe_fold_stmt_addition (location_t loc, tree res_type, tree op0, tree op1)
     ptd_type = TREE_TYPE (TREE_TYPE (op0));
 
   /* At which point we can try some of the same things as for indirects.  */
-  t = maybe_fold_offset_to_array_ref (loc, op0, op1, ptd_type, true);
-  if (!t)
-    t = maybe_fold_offset_to_component_ref (loc, TREE_TYPE (op0), op0, op1,
-                                           ptd_type);
+  t = maybe_fold_offset_to_array_ref (loc, op0, op1);
   if (t)
     {
-      t = build1 (ADDR_EXPR, res_type, t);
+      t = build_fold_addr_expr (t);
+      if (!useless_type_conversion_p (res_type, TREE_TYPE (t)))
+       return NULL_TREE;
       SET_EXPR_LOCATION (t, loc);
     }
 
@@ -759,19 +486,39 @@ maybe_fold_reference (tree expr, bool is_lhs)
   while (handled_component_p (*t))
     t = &TREE_OPERAND (*t, 0);
 
-  if (TREE_CODE (*t) == INDIRECT_REF)
+  /* Fold back MEM_REFs to reference trees.  */
+  if (TREE_CODE (*t) == MEM_REF
+      && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
+      && integer_zerop (TREE_OPERAND (*t, 1))
+      && (TREE_THIS_VOLATILE (*t)
+         == TREE_THIS_VOLATILE (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
+      && !TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (*t, 1)))
+      && (TYPE_MAIN_VARIANT (TREE_TYPE (*t))
+         == TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (TREE_OPERAND (*t, 1)))))
+      /* We have to look out here to not drop a required conversion
+        from the rhs to the lhs if is_lhs, but we don't have the
+        rhs here to verify that.  Thus require strict type
+        compatibility.  */
+      && types_compatible_p (TREE_TYPE (*t),
+                            TREE_TYPE (TREE_OPERAND
+                                         (TREE_OPERAND (*t, 0), 0))))
     {
-      tree tem = maybe_fold_stmt_indirect (*t, TREE_OPERAND (*t, 0),
-                                          integer_zero_node);
-      /* Avoid folding *"abc" = 5 into 'a' = 5.  */
-      if (is_lhs && tem && CONSTANT_CLASS_P (tem))
-       tem = NULL_TREE;
-      if (!tem
-         && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR)
-       /* If we had a good reason for propagating the address here,
-          make sure we end up with valid gimple.  See PR34989.  */
-       tem = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
-
+      tree tem;
+      *t = TREE_OPERAND (TREE_OPERAND (*t, 0), 0);
+      tem = maybe_fold_reference (expr, is_lhs);
+      if (tem)
+       return tem;
+      return expr;
+    }
+  /* Canonicalize MEM_REFs invariant address operand.  */
+  else if (TREE_CODE (*t) == MEM_REF
+          && TREE_CODE (TREE_OPERAND (*t, 0)) == ADDR_EXPR
+          && !DECL_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0))
+          && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (*t, 0), 0)))
+    {
+      tree tem = fold_binary (MEM_REF, TREE_TYPE (*t),
+                             TREE_OPERAND (*t, 0),
+                             TREE_OPERAND (*t, 1));
       if (tem)
        {
          *t = tem;
@@ -863,10 +610,18 @@ fold_gimple_assign (gimple_stmt_iterator *si)
 
        else if (TREE_CODE (rhs) == ADDR_EXPR)
          {
-           tree tem = maybe_fold_reference (TREE_OPERAND (rhs, 0), true);
-           if (tem)
+           tree ref = TREE_OPERAND (rhs, 0);
+           tree tem = maybe_fold_reference (ref, true);
+           if (tem
+               && TREE_CODE (tem) == MEM_REF
+               && integer_zerop (TREE_OPERAND (tem, 1)))
+             result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (tem, 0));
+           else if (tem)
              result = fold_convert (TREE_TYPE (rhs),
                                     build_fold_addr_expr_loc (loc, tem));
+           else if (TREE_CODE (ref) == MEM_REF
+                    && integer_zerop (TREE_OPERAND (ref, 1)))
+             result = fold_convert (TREE_TYPE (rhs), TREE_OPERAND (ref, 0));
          }
 
        else if (TREE_CODE (rhs) == CONSTRUCTOR
@@ -1580,7 +1335,7 @@ gimple_get_relevant_ref_binfo (tree ref, tree known_binfo)
        return TYPE_BINFO (TREE_TYPE (ref));
       else if (known_binfo
               && (TREE_CODE (ref) == SSA_NAME
-                  || TREE_CODE (ref) == INDIRECT_REF))
+                  || TREE_CODE (ref) == MEM_REF))
        return known_binfo;
       else
        return NULL_TREE;
index 0a5f6fb97d13982a35871a19249e2d9c91582f64..2359e0e954a4a1f2ae73080df77d9529382722e7 100644 (file)
@@ -2595,7 +2595,8 @@ is_gimple_condexpr (tree t)
 bool
 is_gimple_addressable (tree t)
 {
-  return (is_gimple_id (t) || handled_component_p (t) || INDIRECT_REF_P (t));
+  return (is_gimple_id (t) || handled_component_p (t)
+         || TREE_CODE (t) == MEM_REF);
 }
 
 /* Return true if T is a valid gimple constant.  */
@@ -2646,7 +2647,7 @@ is_gimple_address (const_tree t)
       op = TREE_OPERAND (op, 0);
     }
 
-  if (CONSTANT_CLASS_P (op) || INDIRECT_REF_P (op))
+  if (CONSTANT_CLASS_P (op) || TREE_CODE (op) == MEM_REF)
     return true;
 
   switch (TREE_CODE (op))
@@ -2706,8 +2707,18 @@ is_gimple_invariant_address (const_tree t)
     return false;
 
   op = strip_invariant_refs (TREE_OPERAND (t, 0));
+  if (!op)
+    return false;
 
-  return op && (CONSTANT_CLASS_P (op) || decl_address_invariant_p (op));
+  if (TREE_CODE (op) == MEM_REF)
+    {
+      const_tree op0 = TREE_OPERAND (op, 0);
+      return (TREE_CODE (op0) == ADDR_EXPR
+             && (CONSTANT_CLASS_P (TREE_OPERAND (op0, 0))
+                 || decl_address_invariant_p (TREE_OPERAND (op0, 0))));
+    }
+
+  return CONSTANT_CLASS_P (op) || decl_address_invariant_p (op);
 }
 
 /* Return true if T is a gimple invariant address at IPA level
@@ -2924,7 +2935,7 @@ is_gimple_min_lval (tree t)
 {
   if (!(t = CONST_CAST_TREE (strip_invariant_refs (t))))
     return false;
-  return (is_gimple_id (t) || TREE_CODE (t) == INDIRECT_REF);
+  return (is_gimple_id (t) || TREE_CODE (t) == MEM_REF);
 }
 
 /* Return true if T is a typecast operation.  */
@@ -2944,6 +2955,18 @@ is_gimple_call_addr (tree t)
   return (TREE_CODE (t) == OBJ_TYPE_REF || is_gimple_val (t));
 }
 
+/* Return true if T is a valid address operand of a MEM_REF.  */
+
+bool
+is_gimple_mem_ref_addr (tree t)
+{
+  return (is_gimple_reg (t)
+         || TREE_CODE (t) == INTEGER_CST
+         || (TREE_CODE (t) == ADDR_EXPR
+             && (CONSTANT_CLASS_P (TREE_OPERAND (t, 0))
+                 || decl_address_invariant_p (TREE_OPERAND (t, 0)))));
+}
+
 /* If T makes a function call, return the corresponding CALL_EXPR operand.
    Otherwise, return NULL_TREE.  */
 
@@ -2975,10 +2998,15 @@ get_base_address (tree t)
   while (handled_component_p (t))
     t = TREE_OPERAND (t, 0);
 
+  if (TREE_CODE (t) == MEM_REF
+      && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR)
+    t = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
+
   if (SSA_VAR_P (t)
       || TREE_CODE (t) == STRING_CST
       || TREE_CODE (t) == CONSTRUCTOR
-      || INDIRECT_REF_P (t))
+      || INDIRECT_REF_P (t)
+      || TREE_CODE (t) == MEM_REF)
     return t;
   else
     return NULL_TREE;
@@ -4418,7 +4446,7 @@ count_ptr_derefs (tree *tp, int *walk_subtrees, void *data)
       return NULL_TREE;
     }
 
-  if (INDIRECT_REF_P (*tp) && TREE_OPERAND (*tp, 0) == count_p->ptr)
+  if (TREE_CODE (*tp) == MEM_REF && TREE_OPERAND (*tp, 0) == count_p->ptr)
     {
       if (wi_p->is_lhs)
        count_p->num_stores++;
@@ -4491,6 +4519,7 @@ get_base_loadstore (tree op)
     op = TREE_OPERAND (op, 0);
   if (DECL_P (op)
       || INDIRECT_REF_P (op)
+      || TREE_CODE (op) == MEM_REF
       || TREE_CODE (op) == TARGET_MEM_REF)
     return op;
   return NULL_TREE;
index ffc344178d73c963a7cdddf410c22a63cc54c517..7d2289b5dddf0a8ae44307ee7413acf1730f4ef2 100644 (file)
@@ -933,6 +933,8 @@ extern bool is_gimple_ip_invariant (const_tree);
 extern bool is_gimple_val (tree);
 /* Returns true iff T is a GIMPLE asm statement input.  */
 extern bool is_gimple_asm_val (tree);
+/* Returns true iff T is a valid address operand of a MEM_REF.  */
+bool is_gimple_mem_ref_addr (tree);
 /* Returns true iff T is a valid rhs for a MODIFY_EXPR where the LHS is a
    GIMPLE temporary, a renamed user variable, or something else,
    respectively.  */
@@ -2037,7 +2039,18 @@ gimple_call_fndecl (const_gimple gs)
 {
   tree addr = gimple_call_fn (gs);
   if (TREE_CODE (addr) == ADDR_EXPR)
-    return TREE_OPERAND (addr, 0);
+    {
+      tree fndecl = TREE_OPERAND (addr, 0);
+      if (TREE_CODE (fndecl) == MEM_REF)
+       {
+         if (TREE_CODE (TREE_OPERAND (fndecl, 0)) == ADDR_EXPR
+             && integer_zerop (TREE_OPERAND (fndecl, 1)))
+           return TREE_OPERAND (TREE_OPERAND (fndecl, 0), 0);
+         else
+           return NULL_TREE;
+       }
+      return TREE_OPERAND (addr, 0);
+    }
   return NULL_TREE;
 }
 
@@ -4857,8 +4870,8 @@ void gimplify_and_update_call_from_tree (gimple_stmt_iterator *, tree);
 tree gimple_fold_builtin (gimple);
 bool fold_stmt (gimple_stmt_iterator *);
 bool fold_stmt_inplace (gimple);
-tree maybe_fold_offset_to_reference (location_t, tree, tree, tree);
 tree maybe_fold_offset_to_address (location_t, tree, tree, tree);
+tree maybe_fold_offset_to_reference (location_t, tree, tree, tree);
 tree maybe_fold_stmt_addition (location_t, tree, tree, tree);
 tree get_symbol_constant_value (tree);
 bool may_propagate_address_into_dereference (tree, tree);
index e1660a350bafbdd4d9182ec525d430becfa97536..a9c93ac972d0e17e997f69f462fb88904540950f 100644 (file)
@@ -110,10 +110,13 @@ mark_addressable (tree x)
 {
   while (handled_component_p (x))
     x = TREE_OPERAND (x, 0);
+  if (TREE_CODE (x) == MEM_REF
+      && TREE_CODE (TREE_OPERAND (x, 0)) == ADDR_EXPR)
+    x = TREE_OPERAND (TREE_OPERAND (x, 0), 0);
   if (TREE_CODE (x) != VAR_DECL
       && TREE_CODE (x) != PARM_DECL
       && TREE_CODE (x) != RESULT_DECL)
-    return ;
+    return;
   TREE_ADDRESSABLE (x) = 1;
 }
 
@@ -2961,7 +2964,7 @@ gimplify_cond_expr (tree *expr_p, gimple_seq *pre_p, fallback_t fallback)
            = build3 (COND_EXPR, type, TREE_OPERAND (expr, 0), then_, else_);
 
          tmp = create_tmp_var (type, "iftmp");
-         result = build_fold_indirect_ref_loc (loc, tmp);
+         result = build_simple_mem_ref_loc (loc, tmp);
        }
 
       /* Build the new then clause, `tmp = then_;'.  But don't build the
@@ -3185,7 +3188,7 @@ gimplify_modify_expr_to_memcpy (tree *expr_p, tree size, bool want_value,
       gimple_call_set_lhs (gs, t);
       gimplify_seq_add_stmt (seq_p, gs);
 
-      *expr_p = build1 (INDIRECT_REF, TREE_TYPE (to), t);
+      *expr_p = build_simple_mem_ref (t);
       return GS_ALL_DONE;
     }
 
@@ -3269,13 +3272,16 @@ gimplify_init_ctor_preeval_1 (tree *tp, int *walk_subtrees, void *xdata)
   /* If the constructor component is indirect, determine if we have a
      potential overlap with the lhs.  The only bits of information we
      have to go on at this point are addressability and alias sets.  */
-  if (TREE_CODE (t) == INDIRECT_REF
+  if ((INDIRECT_REF_P (t)
+       || TREE_CODE (t) == MEM_REF)
       && (!data->lhs_base_decl || TREE_ADDRESSABLE (data->lhs_base_decl))
       && alias_sets_conflict_p (data->lhs_alias_set, get_alias_set (t)))
     return t;
 
   /* If the constructor component is a call, determine if it can hide a
-     potential overlap with the lhs through an INDIRECT_REF like above.  */
+     potential overlap with the lhs through an INDIRECT_REF like above.
+     ??? Ugh - this is completely broken.  In fact this whole analysis
+     doesn't look conservative.  */
   if (TREE_CODE (t) == CALL_EXPR)
     {
       tree type, fntype = TREE_TYPE (TREE_TYPE (CALL_EXPR_FN (t)));
@@ -4004,7 +4010,7 @@ gimplify_init_constructor (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
 tree
 gimple_fold_indirect_ref (tree t)
 {
-  tree type = TREE_TYPE (TREE_TYPE (t));
+  tree ptype = TREE_TYPE (t), type = TREE_TYPE (ptype);
   tree sub = t;
   tree subtype;
 
@@ -4047,51 +4053,52 @@ gimple_fold_indirect_ref (tree t)
         }
     }
 
-  /* ((foo*)&vectorfoo)[1] => BIT_FIELD_REF<vectorfoo,...> */
+  /* *(p + CST) -> ...  */
   if (TREE_CODE (sub) == POINTER_PLUS_EXPR
       && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
     {
-      tree op00 = TREE_OPERAND (sub, 0);
-      tree op01 = TREE_OPERAND (sub, 1);
-      tree op00type;
+      tree addr = TREE_OPERAND (sub, 0);
+      tree off = TREE_OPERAND (sub, 1);
+      tree addrtype;
+
+      STRIP_NOPS (addr);
+      addrtype = TREE_TYPE (addr);
 
-      STRIP_NOPS (op00);
-      op00type = TREE_TYPE (op00);
-      if (TREE_CODE (op00) == ADDR_EXPR
-         && TREE_CODE (TREE_TYPE (op00type)) == VECTOR_TYPE
-         && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
+      /* ((foo*)&vectorfoo)[1] -> BIT_FIELD_REF<vectorfoo,...> */
+      if (TREE_CODE (addr) == ADDR_EXPR
+         && TREE_CODE (TREE_TYPE (addrtype)) == VECTOR_TYPE
+         && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
        {
-         HOST_WIDE_INT offset = tree_low_cst (op01, 0);
-         tree part_width = TYPE_SIZE (type);
-         unsigned HOST_WIDE_INT part_widthi
-           = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
-         unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
-         tree index = bitsize_int (indexi);
-         if (offset / part_widthi
-             <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (op00type)))
-           return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (op00, 0),
-                               part_width, index);
+          HOST_WIDE_INT offset = tree_low_cst (off, 0);
+          tree part_width = TYPE_SIZE (type);
+          unsigned HOST_WIDE_INT part_widthi
+            = tree_low_cst (part_width, 0) / BITS_PER_UNIT;
+          unsigned HOST_WIDE_INT indexi = offset * BITS_PER_UNIT;
+          tree index = bitsize_int (indexi);
+          if (offset / part_widthi
+              <= TYPE_VECTOR_SUBPARTS (TREE_TYPE (addrtype)))
+            return fold_build3 (BIT_FIELD_REF, type, TREE_OPERAND (addr, 0),
+                                part_width, index);
        }
-    }
 
-  /* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
-  if (TREE_CODE (sub) == POINTER_PLUS_EXPR
-      && TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
-    {
-      tree op00 = TREE_OPERAND (sub, 0);
-      tree op01 = TREE_OPERAND (sub, 1);
-      tree op00type;
+      /* ((foo*)&complexfoo)[1] -> __imag__ complexfoo */
+      if (TREE_CODE (addr) == ADDR_EXPR
+         && TREE_CODE (TREE_TYPE (addrtype)) == COMPLEX_TYPE
+         && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (addrtype))))
+        {
+          tree size = TYPE_SIZE_UNIT (type);
+          if (tree_int_cst_equal (size, off))
+            return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (addr, 0));
+        }
 
-      STRIP_NOPS (op00);
-      op00type = TREE_TYPE (op00);
-      if (TREE_CODE (op00) == ADDR_EXPR
-         && TREE_CODE (TREE_TYPE (op00type)) == COMPLEX_TYPE
-         && useless_type_conversion_p (type, TREE_TYPE (TREE_TYPE (op00type))))
-       {
-         tree size = TYPE_SIZE_UNIT (type);
-         if (tree_int_cst_equal (size, op01))
-           return fold_build1 (IMAGPART_EXPR, type, TREE_OPERAND (op00, 0));
-       }
+      /* *(p + CST) -> MEM_REF <p, CST>.  */
+      if (TREE_CODE (addr) != ADDR_EXPR
+         || DECL_P (TREE_OPERAND (addr, 0)))
+       return fold_build2 (MEM_REF, type,
+                           addr,
+                           build_int_cst_wide (ptype,
+                                               TREE_INT_CST_LOW (off),
+                                               TREE_INT_CST_HIGH (off)));
     }
 
   /* *(foo *)fooarrptr => (*fooarrptr)[0] */
@@ -6558,7 +6565,8 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
            || gimple_test_f == is_gimple_mem_rhs_or_call
            || gimple_test_f == is_gimple_reg_rhs
            || gimple_test_f == is_gimple_reg_rhs_or_call
-           || gimple_test_f == is_gimple_asm_val)
+           || gimple_test_f == is_gimple_asm_val
+          || gimple_test_f == is_gimple_mem_ref_addr)
     gcc_assert (fallback & fb_rvalue);
   else if (gimple_test_f == is_gimple_min_lval
           || gimple_test_f == is_gimple_lvalue)
@@ -6764,19 +6772,57 @@ gimplify_expr (tree *expr_p, gimple_seq *pre_p, gimple_seq *post_p,
          recalculate_side_effects (*expr_p);
          break;
 
+       case ALIGN_INDIRECT_REF:
+       case MISALIGNED_INDIRECT_REF:
+         /* We can only reach this through re-gimplification from
+            tree optimizers.  */
+         ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+                              is_gimple_reg, fb_rvalue);
+         recalculate_side_effects (*expr_p);
+         break;
+
        case INDIRECT_REF:
-         *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
-         if (*expr_p != save_expr)
+         {
+           bool volatilep = TREE_THIS_VOLATILE (*expr_p);
+           tree saved_ptr_type = TREE_TYPE (TREE_OPERAND (*expr_p, 0));
+
+           *expr_p = fold_indirect_ref_loc (input_location, *expr_p);
+           if (*expr_p != save_expr)
+             {
+               ret = GS_OK;
+               break;
+             }
+
+           ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
+                                is_gimple_reg, fb_rvalue);
+           recalculate_side_effects (*expr_p);
+
+           *expr_p = fold_build2_loc (input_location, MEM_REF,
+                                      TREE_TYPE (*expr_p),
+                                      TREE_OPERAND (*expr_p, 0),
+                                      build_int_cst (saved_ptr_type, 0));
+           TREE_THIS_VOLATILE (*expr_p) = volatilep;
+           ret = GS_OK;
+           break;
+         }
+
+       /* We arrive here through the various re-gimplifcation paths.  */
+       case MEM_REF:
+         /* First try re-folding the whole thing.  */
+         tmp = fold_binary (MEM_REF, TREE_TYPE (*expr_p),
+                            TREE_OPERAND (*expr_p, 0),
+                            TREE_OPERAND (*expr_p, 1));
+         if (tmp)
            {
+             *expr_p = tmp;
+             recalculate_side_effects (*expr_p);
              ret = GS_OK;
              break;
            }
-         /* else fall through.  */
-       case ALIGN_INDIRECT_REF:
-       case MISALIGNED_INDIRECT_REF:
          ret = gimplify_expr (&TREE_OPERAND (*expr_p, 0), pre_p, post_p,
-                              is_gimple_reg, fb_rvalue);
+                              is_gimple_mem_ref_addr, fb_rvalue);
          recalculate_side_effects (*expr_p);
+         ret = GS_ALL_DONE;
          break;
 
          /* Constants need not be gimplified.  */
index f9e4cf3cfa736c73b5591f314d99aaeecc7c39a5..691bf6ca771ca130cb7ce1e60882e8d28a87ca0f 100644 (file)
@@ -1832,10 +1832,12 @@ likely_eliminated_by_inlining_p (gimple stmt)
            bool rhs_free = false;
            bool lhs_free = false;
 
-           while (handled_component_p (inner_lhs) || TREE_CODE (inner_lhs) == INDIRECT_REF)
+           while (handled_component_p (inner_lhs)
+                  || TREE_CODE (inner_lhs) == MEM_REF)
              inner_lhs = TREE_OPERAND (inner_lhs, 0);
            while (handled_component_p (inner_rhs)
-                  || TREE_CODE (inner_rhs) == ADDR_EXPR || TREE_CODE (inner_rhs) == INDIRECT_REF)
+                  || TREE_CODE (inner_rhs) == ADDR_EXPR
+                  || TREE_CODE (inner_rhs) == MEM_REF)
              inner_rhs = TREE_OPERAND (inner_rhs, 0);
 
 
@@ -1855,7 +1857,8 @@ likely_eliminated_by_inlining_p (gimple stmt)
                || (TREE_CODE (inner_lhs) == SSA_NAME
                    && TREE_CODE (SSA_NAME_VAR (inner_lhs)) == RESULT_DECL))
              lhs_free = true;
-           if (lhs_free && (is_gimple_reg (rhs) || is_gimple_min_invariant (rhs)))
+           if (lhs_free
+               && (is_gimple_reg (rhs) || is_gimple_min_invariant (rhs)))
              rhs_free = true;
            if (lhs_free && rhs_free)
              return true;
index 997f8ec9612688fa4e074551d32cb1393c81207c..9bd07f039b98870499fe57ba4e50363cf1c72217 100644 (file)
@@ -405,11 +405,12 @@ compute_complex_assign_jump_func (struct ipa_node_params *info,
   if (TREE_CODE (type) != RECORD_TYPE)
     return;
   op1 = get_ref_base_and_extent (op1, &offset, &size, &max_size);
-  if (TREE_CODE (op1) != INDIRECT_REF
+  if (TREE_CODE (op1) != MEM_REF
       /* If this is a varying address, punt.  */
       || max_size == -1
       || max_size != size)
     return;
+  offset += mem_ref_offset (op1).low * BITS_PER_UNIT;
   op1 = TREE_OPERAND (op1, 0);
   if (TREE_CODE (op1) != SSA_NAME
       || !SSA_NAME_IS_DEFAULT_DEF (op1))
@@ -481,11 +482,12 @@ compute_complex_ancestor_jump_func (struct ipa_node_params *info,
   expr = TREE_OPERAND (expr, 0);
   expr = get_ref_base_and_extent (expr, &offset, &size, &max_size);
 
-  if (TREE_CODE (expr) != INDIRECT_REF
+  if (TREE_CODE (expr) != MEM_REF
       /* If this is a varying address, punt.  */
       || max_size == -1
       || max_size != size)
     return;
+  offset += mem_ref_offset (expr).low * BITS_PER_UNIT;
   parm = TREE_OPERAND (expr, 0);
   if (TREE_CODE (parm) != SSA_NAME
       || !SSA_NAME_IS_DEFAULT_DEF (parm))
@@ -1179,7 +1181,7 @@ ipa_analyze_virtual_call_uses (struct cgraph_node *node,
          obj = TREE_OPERAND (obj, 0);
        }
       while (TREE_CODE (obj) == COMPONENT_REF);
-      if (TREE_CODE (obj) != INDIRECT_REF)
+      if (TREE_CODE (obj) != MEM_REF)
        return;
       obj = TREE_OPERAND (obj, 0);
     }
index bc17b0c81bb0016c1d28dcd82eefa0902c599ba2..741742961cb60a57cb61a1f0defe8a3b7ad6aeb8 100644 (file)
@@ -324,7 +324,7 @@ check_op (funct_state local, tree t, bool checking_write)
       return;
     }
   else if (t
-          && INDIRECT_REF_P (t)
+          && (INDIRECT_REF_P (t) || TREE_CODE (t) == MEM_REF)
           && TREE_CODE (TREE_OPERAND (t, 0)) == SSA_NAME
           && !ptr_deref_may_alias_global_p (TREE_OPERAND (t, 0)))
     {
index 6ace2fcd0d79b8e16bb4293f5bfae8730f6e88ba..98ad91f8508d34c6033b4bcc6199636552139928 100644 (file)
@@ -421,6 +421,10 @@ decompose_indirect_ref_acc (tree str_decl, struct field_access_site *acc)
   if (!is_result_of_mult (before_cast, &acc->num, struct_size))
     return false;
 
+  /* ???  Add TREE_OPERAND (acc->ref, 1) to acc->offset.  */
+  if (!integer_zerop (TREE_OPERAND (acc->ref, 1)))
+    return false;
+
   return true;
 }
 
@@ -434,7 +438,7 @@ decompose_access (tree str_decl, struct field_access_site *acc)
 {
   gcc_assert (acc->ref);
 
-  if (TREE_CODE (acc->ref) == INDIRECT_REF)
+  if (TREE_CODE (acc->ref) == MEM_REF)
     return decompose_indirect_ref_acc (str_decl, acc);
   else if (TREE_CODE (acc->ref) == ARRAY_REF)
     return true;
@@ -969,12 +973,12 @@ replace_field_acc (struct field_access_site *acc, tree new_type)
   type_wrapper_t *wr_p = NULL;
   struct ref_pos r_pos;
 
-  while (TREE_CODE (ref_var) == INDIRECT_REF
+  while (TREE_CODE (ref_var) == MEM_REF
         || TREE_CODE (ref_var) == ARRAY_REF)
     {
       type_wrapper_t wr;
 
-      if ( TREE_CODE (ref_var) == INDIRECT_REF)
+      if (TREE_CODE (ref_var) == MEM_REF)
        {
          wr.wrap = 0;
          wr.domain = 0;
@@ -1001,7 +1005,7 @@ replace_field_acc (struct field_access_site *acc, tree new_type)
        new_ref = build4 (ARRAY_REF, type, new_ref,
                          wr_p->domain, NULL_TREE, NULL_TREE);
       else /* Pointer.  */
-       new_ref = build1 (INDIRECT_REF, type, new_ref);
+       new_ref = build_simple_mem_ref (new_ref);
       VEC_pop (type_wrapper_t, wrapper);
     }
 
@@ -1041,7 +1045,7 @@ static void
 replace_field_access_stmt (struct field_access_site *acc, tree new_type)
 {
 
-  if (TREE_CODE (acc->ref) == INDIRECT_REF
+  if (TREE_CODE (acc->ref) == MEM_REF
       ||TREE_CODE (acc->ref) == ARRAY_REF
       ||TREE_CODE (acc->ref) == VAR_DECL)
     replace_field_acc (acc, new_type);
@@ -1277,13 +1281,11 @@ insert_new_var_in_stmt (gimple stmt, tree var, tree new_var)
   pos = find_pos_in_stmt (stmt, var, &r_pos);
   gcc_assert (pos);
 
-  while (r_pos.container && (TREE_CODE(r_pos.container) == INDIRECT_REF
+  while (r_pos.container && (TREE_CODE(r_pos.container) == MEM_REF
                             || TREE_CODE(r_pos.container) == ADDR_EXPR))
     {
-      tree type = TREE_TYPE (TREE_TYPE (new_var));
-
-      if (TREE_CODE(r_pos.container) == INDIRECT_REF)
-       new_var = build1 (INDIRECT_REF, type, new_var);
+      if (TREE_CODE(r_pos.container) == MEM_REF)
+       new_var = build_simple_mem_ref (new_var);
       else
        new_var = build_fold_addr_expr (new_var);
       pos = find_pos_in_stmt (stmt, r_pos.container, &r_pos);
@@ -2530,7 +2532,7 @@ get_stmt_accesses (tree *tp, int *walk_subtrees, void *data)
        tree field_decl = TREE_OPERAND (t, 1);
 
 
-       if ((TREE_CODE (ref) == INDIRECT_REF
+       if ((TREE_CODE (ref) == MEM_REF
             || TREE_CODE (ref) == ARRAY_REF
             || TREE_CODE (ref) == VAR_DECL)
            && TREE_CODE (field_decl) == FIELD_DECL)
@@ -4031,7 +4033,10 @@ reorg_structs (void)
 static unsigned int
 reorg_structs_drive (void)
 {
-  reorg_structs ();
+  /* IPA struct-reorg is completely broken - its analysis phase is
+     non-conservative (which is not the only reason it is broken).  */
+  if (0)
+    reorg_structs ();
   return 0;
 }
 
index 4958762169f2822deb3bd4dd4919a7e90ceb1628..84c6e6ea3c2e2e69471efd56f0c4ee7f962ba9d0 100644 (file)
@@ -218,7 +218,7 @@ collect_data_for_malloc_call (gimple stmt, struct malloc_call_data *m_data)
    initial address and index of each dimension.  */
 struct access_site_info
 {
-  /* The statement (INDIRECT_REF or POINTER_PLUS_EXPR).  */
+  /* The statement (MEM_REF or POINTER_PLUS_EXPR).  */
   gimple stmt;
 
   /* In case of POINTER_PLUS_EXPR, what is the offset.  */
@@ -334,7 +334,7 @@ struct ssa_acc_in_tree
   /* The variable whose accesses in the tree we are looking for.  */
   tree ssa_var;
   /* The tree and code inside it the ssa_var is accessed, currently
-     it could be an INDIRECT_REF or CALL_EXPR.  */
+     it could be an MEM_REF or CALL_EXPR.  */
   enum tree_code t_code;
   tree t_tree;
   /* The place in the containing tree.  */
@@ -413,33 +413,18 @@ mtt_info_eq (const void *mtt1, const void *mtt2)
 static bool
 may_flatten_matrices_1 (gimple stmt)
 {
-  tree t;
-
   switch (gimple_code (stmt))
     {
     case GIMPLE_ASSIGN:
-      if (!gimple_assign_cast_p (stmt))
+    case GIMPLE_CALL:
+      if (!gimple_has_lhs (stmt))
        return true;
-
-      t = gimple_assign_rhs1 (stmt);
-      while (CONVERT_EXPR_P (t))
+      if (TREE_CODE (TREE_TYPE (gimple_get_lhs (stmt))) == VECTOR_TYPE)
        {
-         if (TREE_TYPE (t) && POINTER_TYPE_P (TREE_TYPE (t)))
-           {
-             tree pointee;
-
-             pointee = TREE_TYPE (t);
-             while (POINTER_TYPE_P (pointee))
-               pointee = TREE_TYPE (pointee);
-             if (TREE_CODE (pointee) == VECTOR_TYPE)
-               {
-                 if (dump_file)
-                   fprintf (dump_file,
-                            "Found vector type, don't flatten matrix\n");
-                 return false;
-               }
-           }
-         t = TREE_OPERAND (t, 0);
+         if (dump_file)
+           fprintf (dump_file,
+                    "Found vector type, don't flatten matrix\n");
+         return false;
        }
       break;
     case GIMPLE_ASM:
@@ -602,7 +587,7 @@ mark_min_matrix_escape_level (struct matrix_info *mi, int l, gimple s)
 /* Find if the SSA variable is accessed inside the
    tree and record the tree containing it.
    The only relevant uses are the case of SSA_NAME, or SSA inside
-   INDIRECT_REF, PLUS_EXPR, POINTER_PLUS_EXPR, MULT_EXPR.  */
+   MEM_REF, PLUS_EXPR, POINTER_PLUS_EXPR, MULT_EXPR.  */
 static void
 ssa_accessed_in_tree (tree t, struct ssa_acc_in_tree *a)
 {
@@ -613,7 +598,7 @@ ssa_accessed_in_tree (tree t, struct ssa_acc_in_tree *a)
       if (t == a->ssa_var)
        a->var_found = true;
       break;
-    case INDIRECT_REF:
+    case MEM_REF:
       if (SSA_VAR_P (TREE_OPERAND (t, 0))
          && TREE_OPERAND (t, 0) == a->ssa_var)
        a->var_found = true;
@@ -660,7 +645,7 @@ ssa_accessed_in_assign_rhs (gimple stmt, struct ssa_acc_in_tree *a)
       tree op1, op2;
 
     case SSA_NAME:
-    case INDIRECT_REF:
+    case MEM_REF:
     CASE_CONVERT:
     case VIEW_CONVERT_EXPR:
       ssa_accessed_in_tree (gimple_assign_rhs1 (stmt), a);
@@ -984,7 +969,7 @@ get_index_from_offset (tree offset, gimple def_stmt)
 
 /* update MI->dimension_type_size[CURRENT_INDIRECT_LEVEL] with the size
    of the type related to the SSA_VAR, or the type related to the
-   lhs of STMT, in the case that it is an INDIRECT_REF.  */
+   lhs of STMT, in the case that it is an MEM_REF.  */
 static void
 update_type_size (struct matrix_info *mi, gimple stmt, tree ssa_var,
                  int current_indirect_level)
@@ -992,9 +977,9 @@ update_type_size (struct matrix_info *mi, gimple stmt, tree ssa_var,
   tree lhs;
   HOST_WIDE_INT type_size;
 
-  /* Update type according to the type of the INDIRECT_REF expr.   */
+  /* Update type according to the type of the MEM_REF expr.   */
   if (is_gimple_assign (stmt)
-      && TREE_CODE (gimple_assign_lhs (stmt)) == INDIRECT_REF)
+      && TREE_CODE (gimple_assign_lhs (stmt)) == MEM_REF)
     {
       lhs = gimple_assign_lhs (stmt);
       gcc_assert (POINTER_TYPE_P
@@ -1073,7 +1058,7 @@ analyze_accesses_for_call_stmt (struct matrix_info *mi, tree ssa_var,
         at this level because in this case we cannot calculate the
         address correctly.  */
       if ((lhs_acc.var_found && rhs_acc.var_found
-          && lhs_acc.t_code == INDIRECT_REF)
+          && lhs_acc.t_code == MEM_REF)
          || (!rhs_acc.var_found && !lhs_acc.var_found))
        {
          mark_min_matrix_escape_level (mi, current_indirect_level, use_stmt);
@@ -1087,7 +1072,7 @@ analyze_accesses_for_call_stmt (struct matrix_info *mi, tree ssa_var,
        {
          int l = current_indirect_level + 1;
 
-         gcc_assert (lhs_acc.t_code == INDIRECT_REF);
+         gcc_assert (lhs_acc.t_code == MEM_REF);
          mark_min_matrix_escape_level (mi, l, use_stmt);
          return current_indirect_level;
        }
@@ -1213,7 +1198,7 @@ analyze_accesses_for_assign_stmt (struct matrix_info *mi, tree ssa_var,
      at this level because in this case we cannot calculate the
      address correctly.  */
   if ((lhs_acc.var_found && rhs_acc.var_found
-       && lhs_acc.t_code == INDIRECT_REF)
+       && lhs_acc.t_code == MEM_REF)
       || (!rhs_acc.var_found && !lhs_acc.var_found))
     {
       mark_min_matrix_escape_level (mi, current_indirect_level, use_stmt);
@@ -1227,7 +1212,7 @@ analyze_accesses_for_assign_stmt (struct matrix_info *mi, tree ssa_var,
     {
       int l = current_indirect_level + 1;
 
-      gcc_assert (lhs_acc.t_code == INDIRECT_REF);
+      gcc_assert (lhs_acc.t_code == MEM_REF);
 
       if (!(gimple_assign_copy_p (use_stmt)
            || gimple_assign_cast_p (use_stmt))
@@ -1248,7 +1233,7 @@ analyze_accesses_for_assign_stmt (struct matrix_info *mi, tree ssa_var,
      is used.  */
   if (rhs_acc.var_found)
     {
-      if (rhs_acc.t_code != INDIRECT_REF
+      if (rhs_acc.t_code != MEM_REF
          && rhs_acc.t_code != POINTER_PLUS_EXPR && rhs_acc.t_code != SSA_NAME)
        {
          mark_min_matrix_escape_level (mi, current_indirect_level, use_stmt);
@@ -1256,7 +1241,7 @@ analyze_accesses_for_assign_stmt (struct matrix_info *mi, tree ssa_var,
        }
       /* If the access in the RHS has an indirection increase the
          indirection level.  */
-      if (rhs_acc.t_code == INDIRECT_REF)
+      if (rhs_acc.t_code == MEM_REF)
        {
          if (record_accesses)
            record_access_alloc_site_info (mi, use_stmt, NULL_TREE,
@@ -1309,7 +1294,7 @@ analyze_accesses_for_assign_stmt (struct matrix_info *mi, tree ssa_var,
        }
       /* If we are storing this level of indirection mark it as
          escaping.  */
-      if (lhs_acc.t_code == INDIRECT_REF || TREE_CODE (lhs) != SSA_NAME)
+      if (lhs_acc.t_code == MEM_REF || TREE_CODE (lhs) != SSA_NAME)
        {
          int l = current_indirect_level;
 
@@ -1369,8 +1354,8 @@ analyze_matrix_accesses (struct matrix_info *mi, tree ssa_var,
     return;
 
 /* Now go over the uses of the SSA_NAME and check how it is used in
-   each one of them.  We are mainly looking for the pattern INDIRECT_REF,
-   then a POINTER_PLUS_EXPR, then INDIRECT_REF etc.  while in between there could
+   each one of them.  We are mainly looking for the pattern MEM_REF,
+   then a POINTER_PLUS_EXPR, then MEM_REF etc.  while in between there could
    be any number of copies and casts.  */
   gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
 
@@ -1856,7 +1841,7 @@ transform_access_sites (void **slot, void *data ATTRIBUTE_UNUSED)
                    gimple new_stmt;
 
                    gcc_assert (gimple_assign_rhs_code (acc_info->stmt)
-                               == INDIRECT_REF);
+                               == MEM_REF);
                    /* Emit convert statement to convert to type of use.  */
                    tmp = create_tmp_var (TREE_TYPE (lhs), "new");
                    add_referenced_var (tmp);
@@ -1878,10 +1863,10 @@ transform_access_sites (void **slot, void *data ATTRIBUTE_UNUSED)
          continue;
        }
       code = gimple_assign_rhs_code (acc_info->stmt);
-      if (code == INDIRECT_REF
+      if (code == MEM_REF
          && acc_info->level < min_escape_l - 1)
        {
-         /* Replace the INDIRECT_REF with NOP (cast) usually we are casting
+         /* Replace the MEM_REF with NOP (cast) usually we are casting
             from "pointer to type" to "type".  */
          tree t =
            build1 (NOP_EXPR, TREE_TYPE (gimple_assign_rhs1 (acc_info->stmt)),
@@ -2206,7 +2191,6 @@ transform_allocation_sites (void **slot, void *data ATTRIBUTE_UNUSED)
   for (i = 1; i < mi->min_indirect_level_escape; i++)
     {
       gimple_stmt_iterator gsi;
-      gimple use_stmt1 = NULL;
 
       gimple call_stmt = mi->malloc_for_level[i];
       gcc_assert (is_gimple_call (call_stmt));
@@ -2216,17 +2200,9 @@ transform_allocation_sites (void **slot, void *data ATTRIBUTE_UNUSED)
       gsi = gsi_for_stmt (call_stmt);
       /* Remove the call stmt.  */
       gsi_remove (&gsi, true);
-      /* remove the type cast stmt.  */
-      FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter,
-                            gimple_call_lhs (call_stmt))
-      {
-       use_stmt1 = use_stmt;
-       gsi = gsi_for_stmt (use_stmt);
-       gsi_remove (&gsi, true);
-      }
       /* Remove the assignment of the allocated area.  */
       FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter,
-                            gimple_get_lhs (use_stmt1))
+                            gimple_call_lhs (call_stmt))
       {
        gsi = gsi_for_stmt (use_stmt);
        gsi_remove (&gsi, true);
index 53309ad02a90c2bd6dfca6e58daa7ac80bda01f9..68bc84a881cddd3f0bcc34beb4e28376506387d6 100644 (file)
@@ -864,10 +864,10 @@ build_receiver_ref (tree var, bool by_ref, omp_context *ctx)
   if (x != NULL)
     field = x;
 
-  x = build_fold_indirect_ref (ctx->receiver_decl);
+  x = build_simple_mem_ref (ctx->receiver_decl);
   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL);
   if (by_ref)
-    x = build_fold_indirect_ref (x);
+    x = build_simple_mem_ref (x);
 
   return x;
 }
@@ -887,7 +887,7 @@ build_outer_var_ref (tree var, omp_context *ctx)
     {
       x = TREE_OPERAND (DECL_VALUE_EXPR (var), 0);
       x = build_outer_var_ref (x, ctx);
-      x = build_fold_indirect_ref (x);
+      x = build_simple_mem_ref (x);
     }
   else if (is_taskreg_ctx (ctx))
     {
@@ -904,7 +904,7 @@ build_outer_var_ref (tree var, omp_context *ctx)
     gcc_unreachable ();
 
   if (is_reference (var))
-    x = build_fold_indirect_ref (x);
+    x = build_simple_mem_ref (x);
 
   return x;
 }
@@ -1916,7 +1916,18 @@ scan_omp_1_op (tree *tp, int *walk_subtrees, void *data)
        {
          *walk_subtrees = 1;
          if (ctx)
-           TREE_TYPE (t) = remap_type (TREE_TYPE (t), &ctx->cb);
+           {
+             tree tem = remap_type (TREE_TYPE (t), &ctx->cb);
+             if (tem != TREE_TYPE (t))
+               {
+                 if (TREE_CODE (t) == INTEGER_CST)
+                   *tp = build_int_cst_wide (tem,
+                                             TREE_INT_CST_LOW (t),
+                                             TREE_INT_CST_HIGH (t));
+                 else
+                   TREE_TYPE (t) = tem;
+               }
+           }
        }
       break;
     }
@@ -2337,7 +2348,7 @@ lower_rec_input_clauses (tree clauses, gimple_seq *ilist, gimple_seq *dlist,
              x = fold_convert_loc (clause_loc, TREE_TYPE (new_var), x);
              gimplify_assign (new_var, x, ilist);
 
-             new_var = build_fold_indirect_ref_loc (clause_loc, new_var);
+             new_var = build_simple_mem_ref_loc (clause_loc, new_var);
            }
          else if (c_kind == OMP_CLAUSE_REDUCTION
                   && OMP_CLAUSE_REDUCTION_PLACEHOLDER (c))
@@ -2555,7 +2566,7 @@ lower_lastprivate_clauses (tree clauses, tree predicate, gimple_seq *stmt_list,
 
          x = build_outer_var_ref (var, ctx);
          if (is_reference (var))
-           new_var = build_fold_indirect_ref_loc (clause_loc, new_var);
+           new_var = build_simple_mem_ref_loc (clause_loc, new_var);
          x = lang_hooks.decls.omp_clause_assign_op (c, x, new_var);
          gimplify_and_add (x, stmt_list);
        }
@@ -2622,7 +2633,7 @@ lower_reduction_clauses (tree clauses, gimple_seq *stmt_seqp, omp_context *ctx)
       var = OMP_CLAUSE_DECL (c);
       new_var = lookup_decl (var, ctx);
       if (is_reference (var))
-       new_var = build_fold_indirect_ref_loc (clause_loc, new_var);
+       new_var = build_simple_mem_ref_loc (clause_loc, new_var);
       ref = build_outer_var_ref (var, ctx);
       code = OMP_CLAUSE_REDUCTION_CODE (c);
 
@@ -2714,8 +2725,8 @@ lower_copyprivate_clauses (tree clauses, gimple_seq *slist, gimple_seq *rlist,
       if (is_reference (var))
        {
          ref = fold_convert_loc (clause_loc, TREE_TYPE (new_var), ref);
-         ref = build_fold_indirect_ref_loc (clause_loc, ref);
-         new_var = build_fold_indirect_ref_loc (clause_loc, new_var);
+         ref = build_simple_mem_ref_loc (clause_loc, ref);
+         new_var = build_simple_mem_ref_loc (clause_loc, new_var);
        }
       x = lang_hooks.decls.omp_clause_assign_op (c, new_var, ref);
       gimplify_and_add (x, rlist);
@@ -5067,8 +5078,12 @@ expand_omp_atomic_pipeline (basic_block load_bb, basic_block store_bb,
       loadedi = loaded_val;
     }
 
-  initial = force_gimple_operand_gsi (&si, build_fold_indirect_ref (iaddr),
-                                     true, NULL_TREE, true, GSI_SAME_STMT);
+  initial
+    = force_gimple_operand_gsi (&si,
+                               build2 (MEM_REF, TREE_TYPE (TREE_TYPE (iaddr)),
+                                       iaddr,
+                                       build_int_cst (TREE_TYPE (iaddr), 0)),
+                               true, NULL_TREE, true, GSI_SAME_STMT);
 
   /* Move the value to the LOADEDI temporary.  */
   if (gimple_in_ssa_p (cfun))
@@ -5212,15 +5227,15 @@ expand_omp_atomic_mutex (basic_block load_bb, basic_block store_bb,
   t = build_function_call_expr (UNKNOWN_LOCATION, t, 0);
   force_gimple_operand_gsi (&si, t, true, NULL_TREE, true, GSI_SAME_STMT);
 
-  stmt = gimple_build_assign (loaded_val, build_fold_indirect_ref (addr));
+  stmt = gimple_build_assign (loaded_val, build_simple_mem_ref (addr));
   gsi_insert_before (&si, stmt, GSI_SAME_STMT);
   gsi_remove (&si, true);
 
   si = gsi_last_bb (store_bb);
   gcc_assert (gimple_code (gsi_stmt (si)) == GIMPLE_OMP_ATOMIC_STORE);
 
-  stmt = gimple_build_assign (build_fold_indirect_ref (unshare_expr (addr)),
-                               stored_val);
+  stmt = gimple_build_assign (build_simple_mem_ref (unshare_expr (addr)),
+                             stored_val);
   gsi_insert_before (&si, stmt, GSI_SAME_STMT);
 
   t = built_in_decls[BUILT_IN_GOMP_ATOMIC_END];
@@ -6269,7 +6284,7 @@ create_task_copyfn (gimple task_stmt, omp_context *ctx)
          n = splay_tree_lookup (ctx->sfield_map, (splay_tree_key) decl);
          sf = (tree) n->value;
          sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
-         src = build_fold_indirect_ref_loc (loc, sarg);
+         src = build_simple_mem_ref_loc (loc, sarg);
          src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
          t = build2 (MODIFY_EXPR, TREE_TYPE (*p), *p, src);
          append_to_statement_list (t, &list);
@@ -6292,9 +6307,9 @@ create_task_copyfn (gimple task_stmt, omp_context *ctx)
        sf = (tree) n->value;
        if (tcctx.cb.decl_map)
          sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
-       src = build_fold_indirect_ref_loc (loc, sarg);
+       src = build_simple_mem_ref_loc (loc, sarg);
        src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
-       dst = build_fold_indirect_ref_loc (loc, arg);
+       dst = build_simple_mem_ref_loc (loc, arg);
        dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
        t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
        append_to_statement_list (t, &list);
@@ -6315,14 +6330,14 @@ create_task_copyfn (gimple task_stmt, omp_context *ctx)
            sf = (tree) n->value;
            if (tcctx.cb.decl_map)
              sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
-           src = build_fold_indirect_ref_loc (loc, sarg);
+           src = build_simple_mem_ref_loc (loc, sarg);
            src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
            if (use_pointer_for_field (decl, NULL) || is_reference (decl))
-             src = build_fold_indirect_ref_loc (loc, src);
+             src = build_simple_mem_ref_loc (loc, src);
          }
        else
          src = decl;
-       dst = build_fold_indirect_ref_loc (loc, arg);
+       dst = build_simple_mem_ref_loc (loc, arg);
        dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
        t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
        append_to_statement_list (t, &list);
@@ -6341,14 +6356,14 @@ create_task_copyfn (gimple task_stmt, omp_context *ctx)
            sf = (tree) n->value;
            if (tcctx.cb.decl_map)
              sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
-           src = build_fold_indirect_ref_loc (loc, sarg);
+           src = build_simple_mem_ref_loc (loc, sarg);
            src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
            if (use_pointer_for_field (decl, NULL))
-             src = build_fold_indirect_ref_loc (loc, src);
+             src = build_simple_mem_ref_loc (loc, src);
          }
        else
          src = decl;
-       dst = build_fold_indirect_ref_loc (loc, arg);
+       dst = build_simple_mem_ref_loc (loc, arg);
        dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
        t = build2 (MODIFY_EXPR, TREE_TYPE (dst), dst, src);
        append_to_statement_list (t, &list);
@@ -6380,10 +6395,10 @@ create_task_copyfn (gimple task_stmt, omp_context *ctx)
                                 (splay_tree_key) TREE_OPERAND (ind, 0));
          sf = (tree) n->value;
          sf = *(tree *) pointer_map_contains (tcctx.cb.decl_map, sf);
-         src = build_fold_indirect_ref_loc (loc, sarg);
+         src = build_simple_mem_ref_loc (loc, sarg);
          src = build3 (COMPONENT_REF, TREE_TYPE (sf), src, sf, NULL);
-         src = build_fold_indirect_ref_loc (loc, src);
-         dst = build_fold_indirect_ref_loc (loc, arg);
+         src = build_simple_mem_ref_loc (loc, src);
+         dst = build_simple_mem_ref_loc (loc, arg);
          dst = build3 (COMPONENT_REF, TREE_TYPE (f), dst, f, NULL);
          t = lang_hooks.decls.omp_clause_copy_ctor (c, dst, src);
          append_to_statement_list (t, &list);
@@ -6391,7 +6406,7 @@ create_task_copyfn (gimple task_stmt, omp_context *ctx)
                                 (splay_tree_key) TREE_OPERAND (ind, 0));
          df = (tree) n->value;
          df = *(tree *) pointer_map_contains (tcctx.cb.decl_map, df);
-         ptr = build_fold_indirect_ref_loc (loc, arg);
+         ptr = build_simple_mem_ref_loc (loc, arg);
          ptr = build3 (COMPONENT_REF, TREE_TYPE (df), ptr, df, NULL);
          t = build2 (MODIFY_EXPR, TREE_TYPE (ptr), ptr,
                      build_fold_addr_expr_loc (loc, dst));
index 474e0963d753b3dc516787bfdccfeed25b0a4f36..0fd31d9bc604156b713f30482ad7bdd4584888ff 100644 (file)
@@ -1,3 +1,70 @@
+2010-07-01  Richard Guenther  <rguenther@suse.de>
+
+       PR middle-end/42834
+       PR middle-end/44468
+       * gcc.c-torture/execute/20100316-1.c: New testcase.
+       * gcc.c-torture/execute/pr44468.c: Likewise.
+       * gcc.c-torture/compile/20100609-1.c: Likewise.
+       * gcc.dg/volatile2.c: Adjust.
+       * gcc.dg/plugin/selfassign.c: Likewise.
+       * gcc.dg/pr36902.c: Likewise.
+       * gcc.dg/tree-ssa/foldaddr-2.c: Remove.
+       * gcc.dg/tree-ssa/foldaddr-3.c: Likewise.
+       * gcc.dg/tree-ssa/forwprop-8.c: Adjust.
+       * gcc.dg/tree-ssa/pr17141-1.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-fre-13.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-fre-14.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-ccp-21.c: Likewise.
+       * gcc.dg/tree-ssa/pta-ptrarith-1.c: Likewise.
+       * gcc.dg/tree-ssa/20030807-7.c: Likewise.
+       * gcc.dg/tree-ssa/forwprop-10.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-fre-1.c: Likewise.
+       * gcc.dg/tree-ssa/pta-ptrarith-2.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-ccp-23.c: Likewise.
+       * gcc.dg/tree-ssa/forwprop-1.c: Likewise.
+       * gcc.dg/tree-ssa/forwprop-2.c: Likewise.
+       * gcc.dg/tree-ssa/struct-aliasing-1.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-ccp-25.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-pre-26.c: Likewise.
+       * gcc.dg/tree-ssa/struct-aliasing-2.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-ccp-26.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-sccvn-4.c: Likewise.
+       * gcc.dg/tree-ssa/ssa-pre-7.c: Likewise.
+       * gcc.dg/tree-ssa/forwprop-5.c: Likewise.
+       * gcc.dg/struct/w_prof_two_strs.c: XFAIL.
+       * gcc.dg/struct/wo_prof_escape_arg_to_local.c: Likewise.
+       * gcc.dg/struct/wo_prof_global_var.c: Likewise.
+       * gcc.dg/struct/wo_prof_malloc_size_var.c: Likewise.
+       * gcc.dg/struct/w_prof_local_array.c: Likewise.
+       * gcc.dg/struct/w_prof_single_str_global.c: Likewise.
+       * gcc.dg/struct/wo_prof_escape_str_init.c: Likewise.
+       * gcc.dg/struct/wo_prof_array_through_pointer.c: Likewise.
+       * gcc.dg/struct/w_prof_global_array.c: Likewise.
+       * gcc.dg/struct/wo_prof_array_field.c: Likewise.
+       * gcc.dg/struct/wo_prof_single_str_local.c: Likewise.
+       * gcc.dg/struct/w_prof_local_var.c: Likewise.
+       * gcc.dg/struct/wo_prof_two_strs.c: Likewise.
+       * gcc.dg/struct/wo_prof_empty_str.c: Likewise.
+       * gcc.dg/struct/wo_prof_local_array.c: Likewise.
+       * gcc.dg/struct/w_prof_global_var.c: Likewise.
+       * gcc.dg/struct/wo_prof_single_str_global.c: Likewise.
+       * gcc.dg/struct/wo_prof_escape_substr_value.c: Likewise.
+       * gcc.dg/struct/wo_prof_global_array.c: Likewise.
+       * gcc.dg/struct/wo_prof_escape_return.c: Likewise.
+       * gcc.dg/struct/wo_prof_escape_substr_array.c: Likewise.
+       * gcc.dg/struct/wo_prof_double_malloc.c: Likewise.
+       * gcc.dg/struct/w_ratio_cold_str.c: Likewise.
+       * gcc.dg/struct/wo_prof_escape_substr_pointer.c: Likewise.
+       * gcc.dg/struct/wo_prof_local_var.c: Likewise.
+       * gcc.dg/tree-prof/stringop-1.c: Adjust.
+       * g++.dg/tree-ssa/pr31146.C: Likewise.
+       * g++.dg/tree-ssa/copyprop-1.C: Likewise.
+       * g++.dg/tree-ssa/pr33604.C: Likewise.
+       * g++.dg/plugin/selfassign.c: Likewise.
+       * gfortran.dg/array_memcpy_3.f90: Likewise.
+       * gfortran.dg/array_memcpy_4.f90: Likewise.
+       * c-c++-common/torture/pr42834.c: New testcase.
+
 2010-06-30  Paolo Carlini  <paolo.carlini@oracle.com>
 
        PR c++/44628
diff --git a/gcc/testsuite/c-c++-common/torture/pr42834.c b/gcc/testsuite/c-c++-common/torture/pr42834.c
new file mode 100644 (file)
index 0000000..a1ba49e
--- /dev/null
@@ -0,0 +1,23 @@
+/* { dg-do run } */
+
+void __attribute__((noinline,noclone))
+foo(int *p, float *q) { __asm__ volatile ("" : : : "memory"); }
+
+int main()
+{
+  if (sizeof (int) == sizeof (float))
+    {
+      int i;
+      float f;
+      int *p;
+      /* Prevent i and f from being rewritten into SSA form.  */
+      foo (&i, &f);
+      i = 0;
+      f = 1.0;
+      p = (int *)&f;
+      __builtin_memcpy (&i, p, 4);
+      if (*(float *)&i != 1.0)
+       __builtin_abort ();
+    }
+  return 0;
+}
index df42abd27df1b3cef1077740454a41561afa91d3..eb8f24a45a8e301c432e94fb07a04ec8e675a38a 100644 (file)
@@ -52,9 +52,7 @@ get_real_ref_rhs (tree expr)
               /* We are only interested in an assignment with a single
                  rhs operand because if it is not, the original assignment
                  will not possibly be a self-assignment.  */
-              if (is_gimple_assign (def_stmt)
-                  && (get_gimple_rhs_class (gimple_assign_rhs_code (def_stmt))
-                      == GIMPLE_SINGLE_RHS))
+              if (gimple_assign_single_p (def_stmt))
                 return get_real_ref_rhs (gimple_assign_rhs1 (def_stmt));
               else
                 return NULL_TREE;
@@ -66,7 +64,7 @@ get_real_ref_rhs (tree expr)
       case PARM_DECL:
       case FIELD_DECL:
       case COMPONENT_REF:
-      case INDIRECT_REF:
+      case MEM_REF:
       case ARRAY_REF:
         return expr;
       default:
@@ -116,17 +114,18 @@ get_non_ssa_expr (tree expr)
           else
             return expr;
         }
-      case INDIRECT_REF:
+      case MEM_REF:
         {
           tree orig_base = TREE_OPERAND (expr, 0);
-          tree base = get_non_ssa_expr (orig_base);
-          if (!base)
-            return NULL_TREE;
-          /* If BASE is converted, build a new indirect reference tree.  */
-          if (base != orig_base)
-            return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (base)), base);
-          else
-            return expr;
+         if (TREE_CODE (orig_base) == SSA_NAME)
+           {
+             tree base = get_non_ssa_expr (orig_base);
+             if (!base)
+               return NULL_TREE;
+             return fold_build2 (MEM_REF, TREE_TYPE (expr),
+                                 base, TREE_OPERAND (expr, 1));
+           }
+         return expr;
         }
       case ARRAY_REF:
         {
@@ -153,9 +152,7 @@ get_non_ssa_expr (tree expr)
               && !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
             {
               gimple def_stmt = SSA_NAME_DEF_STMT (expr);
-              if (is_gimple_assign (def_stmt)
-                  && (get_gimple_rhs_class (gimple_assign_rhs_code (def_stmt))
-                      == GIMPLE_SINGLE_RHS))
+              if (gimple_assign_single_p (def_stmt))
                 vdecl = gimple_assign_rhs1 (def_stmt);
             }
           return get_non_ssa_expr (vdecl);
@@ -201,9 +198,7 @@ warn_self_assign (gimple stmt)
   tree rhs, lhs;
 
   /* Check assigment statement.  */
-  if (is_gimple_assign (stmt)
-      && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
-          == GIMPLE_SINGLE_RHS))
+  if (gimple_assign_single_p (stmt))
     {
       rhs = get_real_ref_rhs (gimple_assign_rhs1 (stmt));
       if (!rhs)
index 2be046915378e7308334105a6cbc284f0be89156..03f6b1207c7243dc8faa725cb933fba4164f438b 100644 (file)
@@ -25,5 +25,7 @@ int foo(Object&o)
   return o[0];
 }
 
-/* { dg-final { scan-tree-dump-not ".* = \[^>;\]*;" "dce2" } } */
+/* Remaining should be two loads.  */
+
+/* { dg-final { scan-tree-dump-times " = \[^\n\]*;" 2 "dce2" } } */
 /* { dg-final { cleanup-tree-dump "dce2" } } */
index 96790239693a8b43dd128ec76c4ffa096966bb70..478a488e3c2053e186274c556e8c642b7d19973a 100644 (file)
@@ -12,5 +12,5 @@ void foo (int j)
   *q = 1;
 }
 
-/* { dg-final { scan-tree-dump "i\\\[j.*\\\] =.* 1;" "forwprop1" } } */
+/* { dg-final { scan-tree-dump "MEM\\\[.*&i\\\]\\\[j.*\\\] =.* 1;" "forwprop1" } } */
 /* { dg-final { cleanup-tree-dump "forwprop?" } } */
index d78006220edd170c7760a793382dc54de100591c..7e820d3ef16fba00ab646117db124bc6b25296c3 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do run } */
-/* { dg-options "-O -fdump-tree-forwprop1" } */
+/* { dg-options "-O -fdump-tree-optimized-vops" } */
 
 struct Value
 {
@@ -35,12 +35,14 @@ int main(int argc, char *argv[])
   return 0;
 }
 
-/* Check that we forward propagated
+/* Check that we propagate
      D.2182_13 = (struct Ref *) &D.2137.lhs;
    to
      D.2182_13->lhs.m ={v} &I;
    yielding
-     D.2137.lhs.m ={v} &I;  */
+     D.2137.lhs.m ={v} &I;
+   so that SRA can promote all locals to registers and we end up
+   referencing a single virtual operand at abort () after optimization.  */
 
-/* { dg-final { scan-tree-dump-times "D\\\.....\\\..hs\\\.m =" 2 "forwprop1" } } */
-/* { dg-final { cleanup-tree-dump "forwprop1" } } */
+/* { dg-final { scan-tree-dump-times ".MEM_\[0-9\]*\\\(D\\\)" 1 "optimized" } } */
+/* { dg-final { cleanup-tree-dump "optimized" } } */
diff --git a/gcc/testsuite/gcc.c-torture/compile/20100609-1.c b/gcc/testsuite/gcc.c-torture/compile/20100609-1.c
new file mode 100644 (file)
index 0000000..8e1175f
--- /dev/null
@@ -0,0 +1,8 @@
+extern unsigned long int strtoul (__const char *__restrict __nptr,       char **__restrict __endptr, int __base);
+int find_reloads (int i, char *p)
+{
+  int c;
+  while ((c = *p++))
+    return strtoul (p - 1, &p, 10); 
+  return 0;
+}
diff --git a/gcc/testsuite/gcc.c-torture/execute/20100316-1.c b/gcc/testsuite/gcc.c-torture/execute/20100316-1.c
new file mode 100644 (file)
index 0000000..8367d72
--- /dev/null
@@ -0,0 +1,24 @@
+struct Foo {
+  int i;
+  unsigned precision : 10;
+  unsigned blah : 3;
+} f;
+
+int __attribute__((noinline,noclone))
+foo (struct Foo *p)
+{
+  struct Foo *q = p;
+  return (*q).precision;
+}
+
+extern void abort (void);
+
+int main()
+{
+  f.i = -1;
+  f.precision = 0;
+  f.blah = -1;
+  if (foo (&f) != 0)
+    abort ();
+  return 0;
+}
diff --git a/gcc/testsuite/gcc.c-torture/execute/pr44468.c b/gcc/testsuite/gcc.c-torture/execute/pr44468.c
new file mode 100644 (file)
index 0000000..c4bd492
--- /dev/null
@@ -0,0 +1,60 @@
+#include <stddef.h>
+
+struct S {
+  int i;
+  int j;
+};
+struct R {
+  int k;
+  struct S a;
+};
+struct Q {
+  float k;
+  struct S a;
+};
+struct Q s;
+int __attribute__((noinline,noclone))
+test1 (void *q)
+{
+  struct S *b = (struct S *)((char *)q + sizeof (int));
+  s.a.i = 0;
+  b->i = 3;
+  return s.a.i;
+}
+int __attribute__((noinline,noclone))
+test2 (void *q)
+{
+  struct S *b = &((struct R *)q)->a;
+  s.a.i = 0;
+  b->i = 3;
+  return s.a.i;
+}
+int __attribute__((noinline,noclone))
+test3 (void *q)
+{
+  s.a.i = 0;
+  ((struct S *)((char *)q + sizeof (int)))->i = 3;
+  return s.a.i;
+}
+extern void abort (void);
+int
+main()
+{
+  if (sizeof (float) != sizeof (int)
+      || offsetof (struct R, a) != sizeof (int)
+      || offsetof (struct Q, a) != sizeof (int))
+    return 0;
+  s.a.i = 1;
+  s.a.j = 2;
+  if (test1 ((void *)&s) != 3)
+    abort ();
+  s.a.i = 1;
+  s.a.j = 2;
+  if (test2 ((void *)&s) != 3)
+    abort ();
+  s.a.i = 1;
+  s.a.j = 2;
+  if (test3 ((void *)&s) != 3)
+    abort ();
+  return 0;
+}
index df42abd27df1b3cef1077740454a41561afa91d3..eb8f24a45a8e301c432e94fb07a04ec8e675a38a 100644 (file)
@@ -52,9 +52,7 @@ get_real_ref_rhs (tree expr)
               /* We are only interested in an assignment with a single
                  rhs operand because if it is not, the original assignment
                  will not possibly be a self-assignment.  */
-              if (is_gimple_assign (def_stmt)
-                  && (get_gimple_rhs_class (gimple_assign_rhs_code (def_stmt))
-                      == GIMPLE_SINGLE_RHS))
+              if (gimple_assign_single_p (def_stmt))
                 return get_real_ref_rhs (gimple_assign_rhs1 (def_stmt));
               else
                 return NULL_TREE;
@@ -66,7 +64,7 @@ get_real_ref_rhs (tree expr)
       case PARM_DECL:
       case FIELD_DECL:
       case COMPONENT_REF:
-      case INDIRECT_REF:
+      case MEM_REF:
       case ARRAY_REF:
         return expr;
       default:
@@ -116,17 +114,18 @@ get_non_ssa_expr (tree expr)
           else
             return expr;
         }
-      case INDIRECT_REF:
+      case MEM_REF:
         {
           tree orig_base = TREE_OPERAND (expr, 0);
-          tree base = get_non_ssa_expr (orig_base);
-          if (!base)
-            return NULL_TREE;
-          /* If BASE is converted, build a new indirect reference tree.  */
-          if (base != orig_base)
-            return build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (base)), base);
-          else
-            return expr;
+         if (TREE_CODE (orig_base) == SSA_NAME)
+           {
+             tree base = get_non_ssa_expr (orig_base);
+             if (!base)
+               return NULL_TREE;
+             return fold_build2 (MEM_REF, TREE_TYPE (expr),
+                                 base, TREE_OPERAND (expr, 1));
+           }
+         return expr;
         }
       case ARRAY_REF:
         {
@@ -153,9 +152,7 @@ get_non_ssa_expr (tree expr)
               && !gimple_nop_p (SSA_NAME_DEF_STMT (expr)))
             {
               gimple def_stmt = SSA_NAME_DEF_STMT (expr);
-              if (is_gimple_assign (def_stmt)
-                  && (get_gimple_rhs_class (gimple_assign_rhs_code (def_stmt))
-                      == GIMPLE_SINGLE_RHS))
+              if (gimple_assign_single_p (def_stmt))
                 vdecl = gimple_assign_rhs1 (def_stmt);
             }
           return get_non_ssa_expr (vdecl);
@@ -201,9 +198,7 @@ warn_self_assign (gimple stmt)
   tree rhs, lhs;
 
   /* Check assigment statement.  */
-  if (is_gimple_assign (stmt)
-      && (get_gimple_rhs_class (gimple_assign_rhs_code (stmt))
-          == GIMPLE_SINGLE_RHS))
+  if (gimple_assign_single_p (stmt))
     {
       rhs = get_real_ref_rhs (gimple_assign_rhs1 (stmt));
       if (!rhs)
index 8b4ba6fcdb224fceb72cb59114d0677ea31566eb..43a2d14f981f18ad252a806a4cda3bf69e28f786 100644 (file)
@@ -44,7 +44,7 @@ foo2(unsigned char * to, const unsigned char * from, int n)
       *to = *from;
       break;
     case 5:
-      to[4] = from [4]; /* { dg-warning "20:array subscript is above array bounds" } */
+      to[4] = from [4]; /* { dg-warning "array subscript is above array bounds" } */
       break;
     }
   return to;
index cbfd0bc72bbefe0673fe9738f34fdf3e092b9fe4..ca3643e43de7558531f08e3dfb1a232658112ab6 100644 (file)
@@ -26,5 +26,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" } } */
+/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final-use { cleanup-ipa-dump "*" } } */
index c51648c4ef070ce64c6430fb45465ca334d12159..baff45d917db1f7ccd7f1105abdcf3ae6da38788 100644 (file)
@@ -39,5 +39,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final-use { cleanup-ipa-dump "*" } } */
index 1a798e399d3f90cb8310111470009f4900c57b2d..8953264fea19d22047a5b814eccb88de4230baa8 100644 (file)
@@ -34,5 +34,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final-use { cleanup-ipa-dump "*" } } */
index 82f24a1a4b19ade2075b3944701ae5d42db6b426..9a23f8d53a8bd6c1a2f2c6258e1b1595b5b65f0b 100644 (file)
@@ -37,5 +37,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" } } */
+/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final-use { cleanup-ipa-dump "*" } } */
index b2223c88b8bcce4d207078c5f21226aedb3d6d11..b020239910434fbe826908ff3f83b24a09640072 100644 (file)
@@ -28,6 +28,6 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final-use { cleanup-ipa-dump "*" } } */
 
index 308c30e4fd7ac4d6d8629deac13f971a53cef6ff..e9de98c3b8a9b7736712be7453a41609a7d7c69a 100644 (file)
@@ -61,6 +61,6 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 2" "ipa_struct_reorg" } } */
+/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 2" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final-use { cleanup-ipa-dump "*" } } */
 
index 19de595e8ed815b43d62a0cd42c8b68c7f8993f5..3c26e3be7b67c5f110e966aab6f4a41a495383df 100644 (file)
@@ -39,5 +39,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" } } */
+/* { dg-final-use { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final-use { cleanup-ipa-dump "*" } } */
index 12c217aad97ec9f8924b2d8489e611691ddfa791..5d5e37b4f0e6b831bc06f4bed6e3c552adfeecac 100644 (file)
@@ -23,5 +23,5 @@ int main()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index abf50a54fec6328c91981403a7fd42e324ec5589..efb68be8bdd7da7883e1c01f594c24686c5dd689 100644 (file)
@@ -35,5 +35,5 @@ main ()
   return 0;
 }
 
-/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" } } */
+/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index b1508eec5c7e330f32da6e5461c853fd094063b6..77226b449c5d7fcd22e7b6569b5e500cd47972fd 100644 (file)
@@ -26,5 +26,5 @@ int main()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index db7f6b09cb5f7f84f319004908a9868515d79847..58411202767a8c718641828716e62adac55072b2 100644 (file)
@@ -43,5 +43,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "No structures to transform" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "No structures to transform" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index e6638b73a6057373f343886f029a88851794475d..781a847f9f735908a271ba9f63fe1e3c9fda77ed 100644 (file)
@@ -42,5 +42,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "is passed to local function...Excluded." "ipa_struct_reorg" } } */
+/* { dg-final { scan-ipa-dump "is passed to local function...Excluded." "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index 8830c55601d6fe14fe9f6c008ab8e078a41c3175..8892fa99098ef52b2b0b841bf49b3e61ea16f2c1 100644 (file)
@@ -29,5 +29,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "is return type of function...Excluded" "ipa_struct_reorg" } } */
+/* { dg-final { scan-ipa-dump "is return type of function...Excluded" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index 5f634fc119cf2497280727e3c363fc6433d7624f..6da3420f6e88ac1e29a13d6ca13ecdce004badb7 100644 (file)
@@ -28,6 +28,6 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "is initialized...Excluded" "ipa_struct_reorg" } } */
+/* { dg-final { scan-ipa-dump "is initialized...Excluded" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
 
index 55a644e197172e63342eac5be54b3db1d077bf54..bd03ec42f0534a905ce6ec570d1b5f796043eded 100644 (file)
@@ -30,5 +30,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "is a field in the structure" "ipa_struct_reorg" } } */
+/* { dg-final { scan-ipa-dump "is a field in the structure" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index 4cb94afc5df4065b82c70917c67a4b7707716763..59e0e48321a5052bb95504a794d1837944bfce43 100644 (file)
@@ -45,5 +45,5 @@ main (void)
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "is a field in the structure" "ipa_struct_reorg" } } */
+/* { dg-final { scan-ipa-dump "is a field in the structure" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index bade315390903d46122b50d7d49494ead056df62..a6b32ddd688c3dd28c2e05569f7edaea60d273ce 100644 (file)
@@ -42,5 +42,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "is a field in the structure" "ipa_struct_reorg" } } */
+/* { dg-final { scan-ipa-dump "is a field in the structure" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index 4ffd4b5a265a521d67216eb1b7ba618fbf62e57c..b61e26b12484dbff4654afa2f7953e10afacbc21 100644 (file)
@@ -29,5 +29,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index 4c00d02434ab42ecf68e4e115981b1ed101d37e7..39351e072e2901af8898aaf0da55dc3f1a1255b4 100644 (file)
@@ -42,5 +42,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index 3d92da4051719cd9ff7fedc0ef7847f1c2192d50..18d5a734740e713be2503085bb04cbaff0d934d5 100644 (file)
@@ -37,5 +37,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index a6fd7be43716e7903d40bcea6cb11acba97842ae..26a9dbd731430d826e4a2291a4dde887c43dfe40 100644 (file)
@@ -40,5 +40,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index 04b8b6621fb0416208c194fd9adf1c8da0697c60..6caadcf12305ace627fcda1e117bd853cdc2a943 100644 (file)
@@ -44,5 +44,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index 22193f2ae57c5f93167f496c3b960e25bf3a1354..812763d53ff846f55814a4fb388a8daa1427a426 100644 (file)
@@ -31,5 +31,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "Number of structures to transform is 1" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index e17ca56ee8b5c830544fd6e8d8c75401537a4a96..313af1080c1d293c5c9049b769ffae064ce0a810 100644 (file)
@@ -31,5 +31,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "No structures to transform" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "No structures to transform" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index 24dae559829eb50bf84479c8e9dc615e50816191..92da94d29239df51ebc80178c6b6fc27c270b471 100644 (file)
@@ -64,5 +64,5 @@ main ()
 }
 
 /*--------------------------------------------------------------------------*/
-/* { dg-final { scan-ipa-dump "Number of structures to transform is 2" "ipa_struct_reorg" { xfail { "avr-*-*" } } } } */
+/* { dg-final { scan-ipa-dump "Number of structures to transform is 2" "ipa_struct_reorg" { xfail *-*-* } } } */
 /* { dg-final { cleanup-ipa-dump "*" } } */
index d32a6cedc7a64630b254bd686b5f9534aeabb19e..0f477b2376c06f672d583a2b87e4c7b0eedfb19d 100644 (file)
@@ -16,7 +16,7 @@ main()
 /* { dg-final-use { scan-tree-dump "Single value 4 stringop" "tree_profile"} } */
 /* Really this ought to simplify into assignment, but we are not there yet.  */
 /* a[0] = b[0] is what we fold the resulting memcpy into.  */
-/* { dg-final-use { scan-tree-dump "a.0. = " "optimized"} } */
-/* { dg-final-use { scan-tree-dump "= b.0." "optimized"} } */
+/* { dg-final-use { scan-tree-dump " = MEM.*&b" "optimized"} } */
+/* { dg-final-use { scan-tree-dump "MEM.*&a\\\] = " "optimized"} } */
 /* { dg-final-use { cleanup-tree-dump "optimized" } } */
 /* { dg-final-use { cleanup-tree-dump "tree_profile" } } */
index f9f5fb798efbb9d7ab59f82f9a14a59905791d9f..ed6f7c0d5d5d8e688e5de8dbe8dfff6bd20fe6ad 100644 (file)
@@ -33,5 +33,5 @@ simplify_condition (cond_p)
 }
 
 /* There should be exactly one IF conditional.  */
-/* { dg-final { scan-tree-dump-times "if " 1 "vrp1" } } */
+/* { dg-final { scan-tree-dump-times "if " 1 "vrp1" { xfail *-*-* } } } */
 /* { dg-final { cleanup-tree-dump "vrp1" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/foldaddr-2.c b/gcc/testsuite/gcc.dg/tree-ssa/foldaddr-2.c
deleted file mode 100644 (file)
index dc764c3..0000000
+++ /dev/null
@@ -1,13 +0,0 @@
-/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-optimized" } */
-struct a{
-       int a;
-       int b;
-} a;
-int *
-t()
-{
-       return (int *)&a;
-}
-/* { dg-final { scan-tree-dump "a.a" "optimized"} } */
-/* { dg-final { cleanup-tree-dump "optimized" } } */
diff --git a/gcc/testsuite/gcc.dg/tree-ssa/foldaddr-3.c b/gcc/testsuite/gcc.dg/tree-ssa/foldaddr-3.c
deleted file mode 100644 (file)
index b764187..0000000
+++ /dev/null
@@ -1,28 +0,0 @@
-/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-optimized" } */
-union a
-{
-  struct s1
-  {
-    long long a;
-    long long b;
-  } s1;
-  struct s2
-  {
-    int c;
-    int d;
-  } s2;
-  struct s3
-  {
-    unsigned long long e;
-    unsigned long long f;
-  } s3;
-} a;
-int *
-t ()
-{
-  return (int *) &a;
-}
-
-/* { dg-final { scan-tree-dump "a.s2.c" "optimized"} } */
-/* { dg-final { cleanup-tree-dump "optimized" } } */
index ee3cb0ebfe52320f4be1d157647e4e0689700e75..7b07d7f488f4fa6f69abbf4636536d56b9a6da39 100644 (file)
@@ -15,5 +15,5 @@ void f(struct a * b, __SIZE_TYPE__ i)
   c[i] = 1;
 }
 
-/* { dg-final { scan-tree-dump-times "t\\\[i.*\\\] =.* 1;" 1 "forwprop1" } } */
+/* { dg-final { scan-tree-dump-times "t\\\[i.*\\\].* = 1;" 1 "forwprop1" } } */
 /* { dg-final { cleanup-tree-dump "forwprop1" } } */
index 0b865245a9659497a5358c349529e914cd9cc2bf..b46b8ece81606836484ee8a793db3a55e910090c 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do compile } */
-/* { dg-options "-O -fdump-tree-forwprop1" } */
+/* { dg-options "-O -fdump-tree-fre-details" } */
 
 int b;
 unsigned a;
@@ -15,9 +15,8 @@ void test2(void)
 }
 
 /* The indirect load should be replaced by a load from a and a
-   conversion to int.  */
+   conversion to int.  FRE should then be able to replace
+   the rhs of the store to b by 1.  */
 
-/* { dg-final { scan-tree-dump "= a;" "forwprop1" } } */
-/* { dg-final { scan-tree-dump "= \\\(int\\\) " "forwprop1" } } */
-/* { dg-final { scan-tree-dump-not "= \\\*" "forwprop1" } } */
-/* { dg-final { cleanup-tree-dump "forwprop1" } } */
+/* { dg-final { scan-tree-dump "Replaced\[^\\n\]*with 1" "fre" } } */
+/* { dg-final { cleanup-tree-dump "fre" } } */
index fe04e66fa1a54b6655341d163edc98bea2815310..2aefb9ace63a2fb7ee4d6948c0fb561fb343ec2b 100644 (file)
@@ -17,5 +17,5 @@ void f(__SIZE_TYPE__ i)
   c[i] = 1;
 }
 
-/* { dg-final { scan-tree-dump-times "t\\\[i.*\\\] =.* 1;" 1 "forwprop1" } } */
+/* { dg-final { scan-tree-dump-times "t\\\[i.*\\\].* = 1;" 1 "forwprop1" } } */
 /* { dg-final { cleanup-tree-dump "forwprop?" } } */
index 710bc5dd1144b6c82db7d2eb6ebcd33ba6b656a6..1b68d5a35f02f48d18e717c61f3a6068f8463cec 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-forwprop1 -w" } */
+/* { dg-options "-O1 -fdump-tree-esra -w" } */
 
 #define vector __attribute__((vector_size(16) ))
 struct VecClass
@@ -15,7 +15,8 @@ vector float foo( vector float v )
     return y.v;
 }
 
-/* We should be able to convert the cast to a VCE in forwprop1. */
-/* { dg-final { scan-tree-dump-times "VIEW_CONVERT_EXPR" 1 "forwprop1"} } */
-/* { dg-final { cleanup-tree-dump "forwprop1" } } */
-
+/* We should be able to remove the intermediate struct and directly
+   return x.  As we do not fold VIEW_CONVERT_EXPR<struct VecClass>(x).v
+   that doesn't happen right now.  */
+/* { dg-final { scan-tree-dump-times "VIEW_CONVERT_EXPR" 1 "esra"} } */
+/* { dg-final { cleanup-tree-dump "esra" } } */
index 4e0751f81199ac7cea4c19335cd5b59c08c1bc8a..fc74297242cce1b65f829545cf5779bb5be1aaae 100644 (file)
@@ -12,5 +12,5 @@ int foo(struct X *q)
 
 /* We should have propragated &q->a into (*pointer).  */
 /* { dg-final { scan-tree-dump-times "pointer" 0 "forwprop1"} } */
-/* { dg-final { scan-tree-dump "->a\\\[0\\\]" "forwprop1" } } */
+/* { dg-final { scan-tree-dump "\\\[0\\\]" "forwprop1" } } */
 /* { dg-final { cleanup-tree-dump "forwprop1" } } */
index 9733d8f99d84fb34493ea08fb4060c077c7402ef..4b8239e4531603aec8489353e8445c3e90bc25d4 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do compile } */ 
-/* { dg-options "-O2 -fdump-tree-pre-stats" } */
+/* { dg-options "-O2 -fdump-tree-pre-stats -fdump-tree-fre" } */
 #include <stddef.h>
 
 union tree_node;
@@ -72,7 +72,9 @@ main (void)
   remove_useless_vars (&unexpanded_var_list, 0);
   return 0;
 }
-/* { dg-final { scan-tree-dump-times "Eliminated: 1" 1 "pre" } } */
+
+/* See PR44656.  The last elimination is only done by PRE.  */
+/* { dg-final { scan-tree-dump-not "= unexpanded_var_list;" "fre" { xfail *-*-* } } } */
+/* { dg-final { scan-tree-dump-times "Eliminated: 1" 2 "pre" } } */
 /* { dg-final { scan-tree-dump-times "Insertions: 2" 1 "pre" } } */
 /* { dg-final { cleanup-tree-dump "pre" } } */
-
index af86751500258af83b35d64ab828b0a22dfb5888..853fe70c6233b0d36d2e2b59088051478a11b39c 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do compile } */
-/* { dg-options "-O2 -fdump-tree-forwprop1" } */
+/* { dg-options "-O -fdump-tree-forwprop1" } */
 
 struct A { int i; };
 int
@@ -11,5 +11,7 @@ foo(struct A *locp, int str)
   return locp->i;
 }
 
-/* { dg-final { scan-tree-dump "locp.*->i =" "forwprop1" } } */
+/* We should have propagated &locp->i into its dereference.  */
+
+/* { dg-final { scan-tree-dump "locp_\[^\\n\]* =" "forwprop1" } } */
 /* { dg-final { cleanup-tree-dump "forwprop1" } } */
index b61674dff3ac8967d3910e6071d768435e36cad6..6db9ba0d8719a74caf187aad32cf2d61a36bb09a 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do compile } */
-/* { dg-options "-O2 -fno-tree-ccp -fdump-tree-alias" } */
+/* { dg-options "-O2 -fno-tree-ccp -fdump-tree-ealias" } */
 
 extern void abort (void);
 struct X {
@@ -22,5 +22,5 @@ foo(int i, int j, int k, int off)
   return *q;
 }
 
-/* { dg-final { scan-tree-dump "q_., points-to vars: { k }" "alias" } } */
-/* { dg-final { cleanup-tree-dump "alias" } } */
+/* { dg-final { scan-tree-dump "q_., points-to vars: { k }" "ealias" } } */
+/* { dg-final { cleanup-tree-dump "ealias" } } */
index adb01b2316527f5b460ae0706ce0bf4a9a59c5a8..85b96b190e211cdc23cae095f5f1c577b5ab9869 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do compile } */
-/* { dg-options "-O2 -fno-tree-ccp -fdump-tree-alias" } */
+/* { dg-options "-O2 -fno-tree-ccp -fdump-tree-ealias" } */
 
 extern void abort (void);
 struct X {
@@ -22,5 +22,5 @@ foo(int i, int j, int k, int off)
   return *q;
 }
 
-/* { dg-final { scan-tree-dump "q_., points-to vars: { i }" "alias" } } */
-/* { dg-final { cleanup-tree-dump "alias" } } */
+/* { dg-final { scan-tree-dump "q_., points-to vars: { i }" "ealias" } } */
+/* { dg-final { cleanup-tree-dump "ealias" } } */
index 3b23c36238e7d57b34013992de28e59d3f21b6f3..3f4adf434833f477e299d96194b7e788a221f8ed 100644 (file)
@@ -21,5 +21,9 @@ int bar (void)
   return q->i;
 }
 
-/* { dg-final { scan-tree-dump-times "a.b.i" 2 "ccp1" } } */
+/* The first access is through struct A, so a.b.i is fine,
+   the second access needs to preserve the original access type struct B.  */
+
+/* { dg-final { scan-tree-dump-times "a.b.i" 1 "ccp1" } } */
+/* { dg-final { scan-tree-dump-times "MEM\\\[\\\(struct B \\\*\\\)&a\\\].i" 1 "ccp1" } } */
 /* { dg-final { cleanup-tree-dump "ccp1" } } */
index ac7f068cfd14eec9c09ab6c0294927962635c326..fb4af2d484dbe6ab243dcf0e1ec3514e30723075 100644 (file)
@@ -15,5 +15,5 @@ int foo (void)
   return *x;
 }
 
-/* { dg-final { scan-tree-dump "a.i\\\[1\\\]" "ccp1" } } */
+/* { dg-final { scan-tree-dump "MEM\\\[\\\(int \\\*\\\)&a \\\+ 4B\\\]" "ccp1" } } */
 /* { dg-final { cleanup-tree-dump "ccp1" } } */
index f29c4942ace0460a2dc1ddcce4cd3b191448f7f7..7912a57f09ed0f6f1fd649d026f36db74f067355 100644 (file)
@@ -9,6 +9,6 @@ int foo(int i)
 }
 
 /* { dg-final { scan-tree-dump "&a\\\[\[iD\]\\\." "ccp1" } } */
-/* { dg-final { scan-tree-dump "= a\\\[\[iD\]\\\." "forwprop1" } } */
+/* { dg-final { scan-tree-dump "= .*&a\\\]\\\[\[iD\]\\\." "forwprop1" } } */
 /* { dg-final { cleanup-tree-dump "ccp1" } } */
 /* { dg-final { cleanup-tree-dump "forwprop1" } } */
index 542c42939607b8434838a1a5130b1eb8061994a9..c0a548155cb4b011b8787683cd491d2ef47ef23a 100644 (file)
@@ -7,5 +7,5 @@ int foo(int i)
   return (a + 1)[i];
 }
 
-/* { dg-final { scan-tree-dump "= a\\\[D\\\." "forwprop1" } } */
+/* { dg-final { scan-tree-dump "=.*&a\\\]\\\[D\\\." "forwprop1" } } */
 /* { dg-final { cleanup-tree-dump "forwprop1" } } */
index 0cb63242c0e9a6f5079d0153e737b34a944596c3..b9ec6bc720f7ce752c260bcc7dc3352681fd407d 100644 (file)
@@ -11,6 +11,5 @@ int f(int *a)
   return *c + t;
 }
 
-/* { dg-final { scan-tree-dump "Replaced \\\(int \\\*\\\) b_.*with a_" "fre" } } */
-/* { dg-final { scan-tree-dump "Replaced \\\*c_.*with t_" "fre" } } */
+/* { dg-final { scan-tree-dump "Replaced \\\*a_\[^\n\].*with t_" "fre" } } */
 /* { dg-final { cleanup-tree-dump "fre" } } */
index ae9eb5a2002de2e4426d9527f5f4d197c4b5bcf4..972e6c69ef9f97391e1141187742a668647252f2 100644 (file)
@@ -23,6 +23,5 @@ void foo(double (*q)[4], struct Foo *tmp1)
     }
 }
 
-/* { dg-final { scan-tree-dump "Inserted .* &a" "fre" } } */
-/* { dg-final { scan-tree-dump "Replaced tmp1_.\\\(D\\\)->data" "fre" } } */
+/* { dg-final { scan-tree-dump "Replaced tmp1_.\\\(D\\\)->data with &a" "fre" } } */
 /* { dg-final { cleanup-tree-dump "fre" } } */
index 81b82fe48803cdbcc9bc8e77aad79163d1d95499..9d1b3f2ba92de5a413c2d36afcd6f3ada014af1a 100644 (file)
@@ -27,6 +27,5 @@ void foo(double (*q)[4])
   bar(a);
 }
 
-/* { dg-final { scan-tree-dump "Inserted .* &a" "fre" } } */
-/* { dg-final { scan-tree-dump "Replaced tmp1.data" "fre" } } */
+/* { dg-final { scan-tree-dump "Replaced tmp1.data with &a" "fre" } } */
 /* { dg-final { cleanup-tree-dump "fre" } } */
index 978b7abab3af2a00722b6d1c33f169dd0a39589e..b986bdd92a9f1955292a8ff6fef6806b227775f8 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do compile } */
-/* { dg-options "-Os -fdump-tree-pre-details" } */
+/* { dg-options "-O -fdump-tree-fre-details" } */
 
 typedef union
 {
@@ -23,5 +23,5 @@ void foo(SA* pResult, SB* method, SC* self)
     pResult->data = pResult->data;
 }
 
-/* { dg-final { scan-tree-dump "Deleted redundant store" "pre" } } */
-/* { dg-final { cleanup-tree-dump "pre" } } */
+/* { dg-final { scan-tree-dump "Deleted redundant store" "fre" } } */
+/* { dg-final { cleanup-tree-dump "fre" } } */
index c87f62e69c6775cb1db43b290daffc7d3e5f7c07..f87476a3a9f323cc12c81f802dda0b803720fd2a 100644 (file)
@@ -7,6 +7,6 @@ foo (int *array)
           return array[1];
       return 0;
 }
-/* We should eliminate one address calculation, and one load.  */
-/* { dg-final { scan-tree-dump-times "Eliminated: 2" 1 "fre"} } */
+/* We should eliminate one load.  */
+/* { dg-final { scan-tree-dump-times "Eliminated: 1" 1 "fre"} } */
 /* { dg-final { cleanup-tree-dump "fre" } } */
index fe41966a6d4f4e8207c9994d9eca541638c2a9e1..7caf4eec6f8f948ee6dbe990f7cd8ecfdf100ee0 100644 (file)
@@ -23,5 +23,5 @@ int vnum_test8(int *data)
 } 
 /* We should eliminate m - n, n + k, set data[5] = 0, eliminate the
    address arithmetic for data[5], and set p = 0.
-/* { dg-final { scan-tree-dump-times "Eliminated: 7" 1 "fre"} } */
+/* { dg-final { scan-tree-dump-times "Eliminated: 5" 1 "fre"} } */
 /* { dg-final { cleanup-tree-dump "fre" } } */
index 78e92d769e4bde29c6b53082ac32cb06e653aab4..7dc7833647b9b1001fd13d091f90fa78ff072bde 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do "compile" } */
-/* { dg-options "-O2 -fdump-tree-fre" } */
+/* { dg-options "-O2 -fdump-tree-fre-details" } */
 
 struct S { float f; };
 int __attribute__((noinline))
@@ -11,5 +11,5 @@ foo (float *r, struct S *p)
   return i + *q;
 }
 
-/* { dg-final { scan-tree-dump-times "\\\*q" 1 "fre" } } */
+/* { dg-final { scan-tree-dump "Replaced\[^\n\]*with i_." "fre" } } */
 /* { dg-final { cleanup-tree-dump "fre" } } */
index aa9e142bba07b5583d6f78eec64834bc2949cc72..812dac928da8136b6337789438216d7cbe70205a 100644 (file)
@@ -12,7 +12,8 @@ foo ( struct S *p)
 }
 
 
-/*  There should only be one load of p->f because fwprop can change *(int *)&p->f into just (int)p->f.  */
-/* { dg-final { scan-tree-dump-times "p_.\\\(D\\\)->f" 1 "fre" } } */
+/* There should only be one load of p->f because fwprop can change
+   *(int *)&p->f into just (int)p->f.  */
+/* { dg-final { scan-tree-dump-times "= \[^\n\]*p_.\\\(D\\\)" 1 "fre" } } */
 /* { dg-final { cleanup-tree-dump "fre" } } */
 
index 4bfc441c862f510c4c0710c427a5513ea91e0797..d663123c83db97e5437baa809ea72a9af56d2555 100644 (file)
@@ -1,5 +1,5 @@
 /* { dg-do compile } */
-/* { dg-options "-O2 -fdump-tree-gimple -fdump-tree-optimized" } */
+/* { dg-options "-O2 -fdump-tree-optimized" } */
 
 struct GTeth_desc
 {
@@ -12,14 +12,11 @@ struct GTeth_softc
 
 void foo(struct GTeth_softc *sc)
 {
-  /* Verify that we retain the cast to (volatile struct GTeth_desc *)
-     after gimplification and that we keep the volatileness on the
+  /* Verify that we retain the volatileness on the
      store until after optimization.  */
   volatile struct GTeth_desc *p = &sc->txq_desc[0];
   p->ed_cmdsts = 0;
 }
 
-/* { dg-final { scan-tree-dump "\\(volatile struct GTeth_desc \\*\\) D" "gimple" } } */
 /* { dg-final { scan-tree-dump "{v}" "optimized" } } */
-/* { dg-final { cleanup-tree-dump "gimple" } } */
 /* { dg-final { cleanup-tree-dump "optimized" } } */
index df6bd49ef26efca7a9507e09348e68a9fbbe9be0..78097308030300d4622286940219c58bc1f2ecfa 100644 (file)
@@ -11,5 +11,5 @@ subroutine bar(x)
   x = (/ 3, 1, 4, 1 /)
 end subroutine
 
-! { dg-final { scan-tree-dump-times "memcpy|ref-all" 2 "original" } }
+! { dg-final { scan-tree-dump-times "memcpy|(ref-all.*ref-all)" 2 "original" } }
 ! { dg-final { cleanup-tree-dump "original" } }
index 0f8b5cb15b0e5e1abd630fe3b6ba21d9bb66f65c..9f2279d881c7ead90b92c631aa70eb1cc8a6c61e 100644 (file)
@@ -9,5 +9,5 @@
 
   d = s
 end
-! { dg-final { scan-tree-dump-times "d = " 1 "original" } }
+! { dg-final { scan-tree-dump-times "MEM.*d\\\] = MEM" 1 "original" } }
 ! { dg-final { cleanup-tree-dump "original" } }
index 4d63de381c4a1c47cc384409db78aa8f22f4c0ef..a02d56cdc8d1fa742954e59648e3df89be23b275 100644 (file)
@@ -309,6 +309,15 @@ tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
       return;
 
     case ADDR_EXPR:
+      /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR.  */
+      if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
+       {
+         expr = TREE_OPERAND (expr, 0);
+         tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+         tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
+         aff_combination_add (comb, &tmp);
+         return;
+       }
       core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
                                  &toffset, &mode, &unsignedp, &volatilep,
                                  false);
@@ -331,6 +340,25 @@ tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
        }
       return;
 
+    case MEM_REF:
+      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
+       tree_to_aff_combination (TREE_OPERAND (TREE_OPERAND (expr, 0), 0),
+                                type, comb);
+      else if (integer_zerop (TREE_OPERAND (expr, 1)))
+       {
+         aff_combination_elt (comb, type, expr);
+         return;
+       }
+      else
+       aff_combination_elt (comb, type,
+                            build2 (MEM_REF, TREE_TYPE (expr),
+                                    TREE_OPERAND (expr, 0),
+                                    build_int_cst
+                                     (TREE_TYPE (TREE_OPERAND (expr, 1)), 0)));
+      tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
+      aff_combination_add (comb, &tmp);
+      return;
+
     default:
       break;
     }
index f28db37b7cb7499711068fdaa05dfabc9dede386..3b8ab9d9e7d5d45339417419e779e5dc78e69706 100644 (file)
@@ -2533,6 +2533,49 @@ gimple_split_edge (edge edge_in)
   return new_bb;
 }
 
+
+/* Verify properties of the address expression T with base object BASE.  */
+
+static tree
+verify_address (tree t, tree base)
+{
+  bool old_constant;
+  bool old_side_effects;
+  bool new_constant;
+  bool new_side_effects;
+
+  old_constant = TREE_CONSTANT (t);
+  old_side_effects = TREE_SIDE_EFFECTS (t);
+
+  recompute_tree_invariant_for_addr_expr (t);
+  new_side_effects = TREE_SIDE_EFFECTS (t);
+  new_constant = TREE_CONSTANT (t);
+
+  if (old_constant != new_constant)
+    {
+      error ("constant not recomputed when ADDR_EXPR changed");
+      return t;
+    }
+  if (old_side_effects != new_side_effects)
+    {
+      error ("side effects not recomputed when ADDR_EXPR changed");
+      return t;
+    }
+
+  if (!(TREE_CODE (base) == VAR_DECL
+       || TREE_CODE (base) == PARM_DECL
+       || TREE_CODE (base) == RESULT_DECL))
+    return NULL_TREE;
+
+  if (DECL_GIMPLE_REG_P (base))
+    {
+      error ("DECL_GIMPLE_REG_P set on a variable with address taken");
+      return base;
+    }
+
+  return NULL_TREE;
+}
+
 /* Callback for walk_tree, check that all elements with address taken are
    properly noticed as such.  The DATA is an int* that is 1 if TP was seen
    inside a PHI node.  */
@@ -2561,12 +2604,26 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
       break;
 
     case INDIRECT_REF:
+      error ("INDIRECT_REF in gimple IL");
+      return t;
+
+    case MEM_REF:
       x = TREE_OPERAND (t, 0);
-      if (!is_gimple_reg (x) && !is_gimple_min_invariant (x))
+      if (!is_gimple_mem_ref_addr (x))
        {
-         error ("Indirect reference's operand is not a register or a constant.");
+         error ("Invalid first operand of MEM_REF.");
          return x;
        }
+      if (TREE_CODE (TREE_OPERAND (t, 1)) != INTEGER_CST
+         || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 1))))
+       {
+         error ("Invalid offset operand of MEM_REF.");
+         return TREE_OPERAND (t, 1);
+       }
+      if (TREE_CODE (x) == ADDR_EXPR
+         && (x = verify_address (x, TREE_OPERAND (x, 0))))
+       return x;
+      *walk_subtrees = 0;
       break;
 
     case ASSERT_EXPR:
@@ -2584,31 +2641,10 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
 
     case ADDR_EXPR:
       {
-       bool old_constant;
-       bool old_side_effects;
-       bool new_constant;
-       bool new_side_effects;
+       tree tem;
 
        gcc_assert (is_gimple_address (t));
 
-       old_constant = TREE_CONSTANT (t);
-       old_side_effects = TREE_SIDE_EFFECTS (t);
-
-       recompute_tree_invariant_for_addr_expr (t);
-       new_side_effects = TREE_SIDE_EFFECTS (t);
-       new_constant = TREE_CONSTANT (t);
-
-        if (old_constant != new_constant)
-         {
-           error ("constant not recomputed when ADDR_EXPR changed");
-           return t;
-         }
-       if (old_side_effects != new_side_effects)
-         {
-           error ("side effects not recomputed when ADDR_EXPR changed");
-           return t;
-         }
-
        /* Skip any references (they will be checked when we recurse down the
           tree) and ensure that any variable used as a prefix is marked
           addressable.  */
@@ -2617,20 +2653,19 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED)
             x = TREE_OPERAND (x, 0))
          ;
 
+       if ((tem = verify_address (t, x)))
+         return tem;
+
        if (!(TREE_CODE (x) == VAR_DECL
              || TREE_CODE (x) == PARM_DECL
              || TREE_CODE (x) == RESULT_DECL))
          return NULL;
+
        if (!TREE_ADDRESSABLE (x))
          {
            error ("address taken, but ADDRESSABLE bit not set");
            return x;
          }
-       if (DECL_GIMPLE_REG_P (x))
-         {
-           error ("DECL_GIMPLE_REG_P set on a variable with address taken");
-           return x;
-         }
 
        break;
       }
@@ -2815,8 +2850,10 @@ verify_types_in_gimple_min_lval (tree expr)
   if (is_gimple_id (expr))
     return false;
 
-  if (!INDIRECT_REF_P (expr)
-      && TREE_CODE (expr) != TARGET_MEM_REF)
+  if (TREE_CODE (expr) != ALIGN_INDIRECT_REF
+      && TREE_CODE (expr) != MISALIGNED_INDIRECT_REF
+      && TREE_CODE (expr) != TARGET_MEM_REF
+      && TREE_CODE (expr) != MEM_REF)
     {
       error ("invalid expression for min lvalue");
       return true;
@@ -2833,14 +2870,7 @@ verify_types_in_gimple_min_lval (tree expr)
       debug_generic_stmt (op);
       return true;
     }
-  if (!useless_type_conversion_p (TREE_TYPE (expr),
-                                 TREE_TYPE (TREE_TYPE (op))))
-    {
-      error ("type mismatch in indirect reference");
-      debug_generic_stmt (TREE_TYPE (expr));
-      debug_generic_stmt (TREE_TYPE (TREE_TYPE (op)));
-      return true;
-    }
+  /* Memory references now generally can involve a value conversion.  */
 
   return false;
 }
@@ -2927,6 +2957,13 @@ verify_types_in_gimple_reference (tree expr, bool require_lvalue)
              debug_generic_stmt (expr);
              return true;
            }
+         else if (TREE_CODE (op) == SSA_NAME
+                  && TYPE_SIZE (TREE_TYPE (expr)) != TYPE_SIZE (TREE_TYPE (op)))
+           {
+             error ("Conversion of register to a different size.");
+             debug_generic_stmt (expr);
+             return true;
+           }
          else if (!handled_component_p (op))
            return false;
        }
@@ -2934,6 +2971,23 @@ verify_types_in_gimple_reference (tree expr, bool require_lvalue)
       expr = op;
     }
 
+  if (TREE_CODE (expr) == MEM_REF)
+    {
+      if (!is_gimple_mem_ref_addr (TREE_OPERAND (expr, 0)))
+       {
+         error ("Invalid address operand in MEM_REF.");
+         debug_generic_stmt (expr);
+         return true;
+       }
+      if (TREE_CODE (TREE_OPERAND (expr, 1)) != INTEGER_CST
+         || !POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 1))))
+       {
+         error ("Invalid offset operand in MEM_REF.");
+         debug_generic_stmt (expr);
+         return true;
+       }
+    }
+
   return ((require_lvalue || !is_gimple_min_invariant (expr))
          && verify_types_in_gimple_min_lval (expr));
 }
@@ -3642,9 +3696,12 @@ verify_gimple_assign_single (gimple stmt)
       }
 
     /* tcc_reference  */
+    case INDIRECT_REF:
+      error ("INDIRECT_REF in gimple IL");
+      return true;
+
     case COMPONENT_REF:
     case BIT_FIELD_REF:
-    case INDIRECT_REF:
     case ALIGN_INDIRECT_REF:
     case MISALIGNED_INDIRECT_REF:
     case ARRAY_REF:
@@ -3653,6 +3710,7 @@ verify_gimple_assign_single (gimple stmt)
     case REALPART_EXPR:
     case IMAGPART_EXPR:
     case TARGET_MEM_REF:
+    case MEM_REF:
       if (!is_gimple_reg (lhs)
          && is_gimple_reg_type (TREE_TYPE (lhs)))
        {
index c491ac8173c7f5d159c4551423da94edd42a0195..77b5c6a85fd4d43665dc324e56a986a6fb40937e 100644 (file)
@@ -596,10 +596,10 @@ extract_component (gimple_stmt_iterator *gsi, tree t, bool imagpart_p,
     case VAR_DECL:
     case RESULT_DECL:
     case PARM_DECL:
-    case INDIRECT_REF:
     case COMPONENT_REF:
     case ARRAY_REF:
     case VIEW_CONVERT_EXPR:
+    case MEM_REF:
       {
        tree inner_type = TREE_TYPE (TREE_TYPE (t));
 
index c41cf51f775e0b11e533db56f95638de49068a7d..7ab7779c5691b3c80ac0c92ef67d5bca728e2739 100644 (file)
@@ -746,7 +746,22 @@ dr_analyze_innermost (struct data_reference *dr)
       return false;
     }
 
-  base = build_fold_addr_expr (base);
+  if (TREE_CODE (base) == MEM_REF)
+    {
+      if (!integer_zerop (TREE_OPERAND (base, 1)))
+       {
+         if (!poffset)
+           {
+             double_int moff = mem_ref_offset (base);
+             poffset = double_int_to_tree (sizetype, moff);
+           }
+         else
+           poffset = size_binop (PLUS_EXPR, poffset, TREE_OPERAND (base, 1));
+       }
+      base = TREE_OPERAND (base, 0);
+    }
+  else
+    base = build_fold_addr_expr (base);
   if (in_loop)
     {
       if (!simple_iv (loop, loop_containing_stmt (stmt), base, &base_iv,
@@ -844,13 +859,18 @@ dr_analyze_indices (struct data_reference *dr, struct loop *nest)
       aref = TREE_OPERAND (aref, 0);
     }
 
-  if (nest && INDIRECT_REF_P (aref))
+  if (nest
+      && (INDIRECT_REF_P (aref)
+         || TREE_CODE (aref) == MEM_REF))
     {
       op = TREE_OPERAND (aref, 0);
       access_fn = analyze_scalar_evolution (loop, op);
       access_fn = instantiate_scev (before_loop, loop, access_fn);
       base = initial_condition (access_fn);
       split_constant_offset (base, &base, &off);
+      if (TREE_CODE (aref) == MEM_REF)
+       off = size_binop (PLUS_EXPR, off,
+                         fold_convert (ssizetype, TREE_OPERAND (aref, 1)));
       access_fn = chrec_replace_initial_condition (access_fn,
                        fold_convert (TREE_TYPE (base), off));
 
@@ -858,6 +878,22 @@ dr_analyze_indices (struct data_reference *dr, struct loop *nest)
       VEC_safe_push (tree, heap, access_fns, access_fn);
     }
 
+  if (TREE_CODE (aref) == MEM_REF)
+    TREE_OPERAND (aref, 1)
+      = build_int_cst (TREE_TYPE (TREE_OPERAND (aref, 1)), 0);
+
+  if (TREE_CODE (ref) == MEM_REF
+      && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR
+      && integer_zerop (TREE_OPERAND (ref, 1)))
+    ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
+
+  /* For canonicalization purposes we'd like to strip all outermost
+     zero-offset component-refs.
+     ???  For now simply handle zero-index array-refs.  */
+  while (TREE_CODE (ref) == ARRAY_REF
+        && integer_zerop (TREE_OPERAND (ref, 1)))
+    ref = TREE_OPERAND (ref, 0);
+
   DR_BASE_OBJECT (dr) = ref;
   DR_ACCESS_FNS (dr) = access_fns;
 }
@@ -870,7 +906,8 @@ dr_analyze_alias (struct data_reference *dr)
   tree ref = DR_REF (dr);
   tree base = get_base_address (ref), addr;
 
-  if (INDIRECT_REF_P (base))
+  if (INDIRECT_REF_P (base)
+      || TREE_CODE (base) == MEM_REF)
     {
       addr = TREE_OPERAND (base, 0);
       if (TREE_CODE (addr) == SSA_NAME)
@@ -1188,7 +1225,8 @@ object_address_invariant_in_loop_p (const struct loop *loop, const_tree obj)
       obj = TREE_OPERAND (obj, 0);
     }
 
-  if (!INDIRECT_REF_P (obj))
+  if (!INDIRECT_REF_P (obj)
+      && TREE_CODE (obj) != MEM_REF)
     return true;
 
   return !chrec_contains_symbols_defined_in_loop (TREE_OPERAND (obj, 0),
index 6d016fd3b52160fa3a67f2462a8c858b1da565e8..467a6781d7c6d99ccd7bcc4eaae651701c76b56b 100644 (file)
@@ -23,6 +23,7 @@ along with GCC; see the file COPYING3.  If not see
 #include "system.h"
 #include "coretypes.h"
 #include "tm.h"
+#include "toplev.h"
 #include "hashtab.h"
 #include "pointer-set.h"
 #include "tree.h"
@@ -855,6 +856,29 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
        case VIEW_CONVERT_EXPR:
          break;
 
+       case MEM_REF:
+         /* Hand back the decl for MEM[&decl, off].  */
+         if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
+           {
+             if (integer_zerop (TREE_OPERAND (exp, 1)))
+               exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+             else
+               {
+                 double_int off = mem_ref_offset (exp);
+                 off = double_int_lshift (off,
+                                          BITS_PER_UNIT == 8
+                                          ? 3 : exact_log2 (BITS_PER_UNIT),
+                                          HOST_BITS_PER_DOUBLE_INT, true);
+                 off = double_int_add (off, shwi_to_double_int (bit_offset));
+                 if (double_int_fits_in_shwi_p (off))
+                   {
+                     bit_offset = double_int_to_shwi (off);
+                     exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+                   }
+               }
+           }
+         goto done;
+
        default:
          goto done;
        }
@@ -901,6 +925,104 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset,
   return exp;
 }
 
+/* Returns the base object and a constant BITS_PER_UNIT offset in *POFFSET that
+   denotes the starting address of the memory access EXP.
+   Returns NULL_TREE if the offset is not constant or any component
+   is not BITS_PER_UNIT-aligned.  */
+
+tree
+get_addr_base_and_unit_offset (tree exp, HOST_WIDE_INT *poffset)
+{
+  HOST_WIDE_INT byte_offset = 0;
+
+  /* Compute cumulative byte-offset for nested component-refs and array-refs,
+     and find the ultimate containing object.  */
+  while (1)
+    {
+      switch (TREE_CODE (exp))
+       {
+       case BIT_FIELD_REF:
+         return NULL_TREE;
+
+       case COMPONENT_REF:
+         {
+           tree field = TREE_OPERAND (exp, 1);
+           tree this_offset = component_ref_field_offset (exp);
+           HOST_WIDE_INT hthis_offset;
+
+           if (!this_offset
+               || TREE_CODE (this_offset) != INTEGER_CST
+               || (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
+                   % BITS_PER_UNIT))
+             return NULL_TREE;
+
+           hthis_offset = TREE_INT_CST_LOW (this_offset);
+           hthis_offset += (TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field))
+                            / BITS_PER_UNIT);
+           byte_offset += hthis_offset;
+         }
+         break;
+
+       case ARRAY_REF:
+       case ARRAY_RANGE_REF:
+         {
+           tree index = TREE_OPERAND (exp, 1);
+           tree low_bound, unit_size;
+
+           /* If the resulting bit-offset is constant, track it.  */
+           if (TREE_CODE (index) == INTEGER_CST
+               && (low_bound = array_ref_low_bound (exp),
+                   TREE_CODE (low_bound) == INTEGER_CST)
+               && (unit_size = array_ref_element_size (exp),
+                   TREE_CODE (unit_size) == INTEGER_CST))
+             {
+               HOST_WIDE_INT hindex = TREE_INT_CST_LOW (index);
+
+               hindex -= TREE_INT_CST_LOW (low_bound);
+               hindex *= TREE_INT_CST_LOW (unit_size);
+               byte_offset += hindex;
+             }
+           else
+             return NULL_TREE;
+         }
+         break;
+
+       case REALPART_EXPR:
+         break;
+
+       case IMAGPART_EXPR:
+         byte_offset += TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (exp)));
+         break;
+
+       case VIEW_CONVERT_EXPR:
+         break;
+
+       case MEM_REF:
+         /* Hand back the decl for MEM[&decl, off].  */
+         if (TREE_CODE (TREE_OPERAND (exp, 0)) == ADDR_EXPR)
+           {
+             if (!integer_zerop (TREE_OPERAND (exp, 1)))
+               {
+                 double_int off = mem_ref_offset (exp);
+                 gcc_assert (off.high == -1 || off.high == 0);
+                 byte_offset += double_int_to_shwi (off);
+               }
+             exp = TREE_OPERAND (TREE_OPERAND (exp, 0), 0);
+           }
+         goto done;
+
+       default:
+         goto done;
+       }
+
+      exp = TREE_OPERAND (exp, 0);
+    }
+done:
+
+  *poffset = byte_offset;
+  return exp;
+}
+
 /* Returns true if STMT references an SSA_NAME that has
    SSA_NAME_OCCURS_IN_ABNORMAL_PHI set, otherwise false.  */
 
index 3a6292c1b2ffdee4b02e65805da73bc61932b6e9..c117013810dd42ebd4f5d66075a82f9cd8337bc7 100644 (file)
@@ -2437,6 +2437,10 @@ tree_could_trap_p (tree expr)
        return false;
       return !in_array_bounds_p (expr);
 
+    case MEM_REF:
+      if (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR)
+       return false;
+      /* Fallthru.  */
     case INDIRECT_REF:
     case ALIGN_INDIRECT_REF:
     case MISALIGNED_INDIRECT_REF:
index af3964119740efb500fd7d6c5e80f046eb8c7793..fb1373a1ad0a0af6c5073e11ef661db6be34e6ce 100644 (file)
@@ -491,6 +491,7 @@ extern tree gimple_default_def (struct function *, tree);
 extern bool stmt_references_abnormal_ssa_name (gimple);
 extern tree get_ref_base_and_extent (tree, HOST_WIDE_INT *,
                                     HOST_WIDE_INT *, HOST_WIDE_INT *);
+extern tree get_addr_base_and_unit_offset (tree, HOST_WIDE_INT *);
 extern void find_referenced_vars_in (gimple);
 
 /* In tree-phinodes.c  */
@@ -572,6 +573,7 @@ void release_ssa_name_after_update_ssa (tree);
 void compute_global_livein (bitmap, bitmap);
 void mark_sym_for_renaming (tree);
 void mark_set_for_renaming (bitmap);
+bool symbol_marked_for_renaming (tree);
 tree get_current_def (tree);
 void set_current_def (tree, tree);
 
index f1470d7a3848c3fdbb0c9f49e93da78615e7d90d..3b1c459128fb192974065b964c551971cf9dabc2 100644 (file)
@@ -811,48 +811,49 @@ remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
     {
       /* Otherwise, just copy the node.  Note that copy_tree_r already
         knows not to copy VAR_DECLs, etc., so this is safe.  */
-      if (TREE_CODE (*tp) == INDIRECT_REF)
+      if (TREE_CODE (*tp) == MEM_REF)
        {
-         /* Get rid of *& from inline substitutions that can happen when a
-            pointer argument is an ADDR_EXPR.  */
+         /* We need to re-canonicalize MEM_REFs from inline substitutions
+            that can happen when a pointer argument is an ADDR_EXPR.  */
          tree decl = TREE_OPERAND (*tp, 0);
          tree *n;
 
          n = (tree *) pointer_map_contains (id->decl_map, decl);
          if (n)
            {
-             tree type, new_tree, old;
-
-             /* If we happen to get an ADDR_EXPR in n->value, strip
-                it manually here as we'll eventually get ADDR_EXPRs
-                which lie about their types pointed to.  In this case
-                build_fold_indirect_ref wouldn't strip the
-                INDIRECT_REF, but we absolutely rely on that.  As
-                fold_indirect_ref does other useful transformations,
-                try that first, though.  */
-             type = TREE_TYPE (TREE_TYPE (*n));
-             new_tree = unshare_expr (*n);
-             old = *tp;
-             *tp = gimple_fold_indirect_ref (new_tree);
-             if (!*tp)
-               {
-                 if (TREE_CODE (new_tree) == ADDR_EXPR)
-                   {
-                     *tp = fold_indirect_ref_1 (EXPR_LOCATION (new_tree),
-                                                type, new_tree);
-                     /* ???  We should either assert here or build
-                        a VIEW_CONVERT_EXPR instead of blindly leaking
-                        incompatible types to our IL.  */
-                     if (! *tp)
-                       *tp = TREE_OPERAND (new_tree, 0);
-                   }
-                 else
-                   {
-                     *tp = build1 (INDIRECT_REF, type, new_tree);
-                     TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
-                     TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
-                   }
+             tree old = *tp;
+             tree ptr = unshare_expr (*n);
+             tree tem;
+             if ((tem = maybe_fold_offset_to_reference (EXPR_LOCATION (*tp),
+                                                        ptr,
+                                                        TREE_OPERAND (*tp, 1),
+                                                        TREE_TYPE (*tp)))
+                 && TREE_THIS_VOLATILE (tem) == TREE_THIS_VOLATILE (old))
+               {
+                 tree *tem_basep = &tem;
+                 while (handled_component_p (*tem_basep))
+                   tem_basep = &TREE_OPERAND (*tem_basep, 0);
+                 if (TREE_CODE (*tem_basep) == MEM_REF)
+                   *tem_basep
+                     = build2 (MEM_REF, TREE_TYPE (*tem_basep),
+                               TREE_OPERAND (*tem_basep, 0),
+                               fold_convert (TREE_TYPE (TREE_OPERAND (*tp, 1)),
+                                             TREE_OPERAND (*tem_basep, 1)));
+                 else
+                   *tem_basep
+                     = build2 (MEM_REF, TREE_TYPE (*tem_basep),
+                               build_fold_addr_expr (*tem_basep),
+                               build_int_cst
+                                 (TREE_TYPE (TREE_OPERAND (*tp, 1)), 0));
+                 *tp = tem;
+               }
+             else
+               {
+                 *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
+                                    ptr, TREE_OPERAND (*tp, 1));
+                 TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
                }
+             TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
              *walk_subtrees = 0;
              return NULL;
            }
@@ -887,7 +888,7 @@ remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
       else if (TREE_CODE (*tp) == ADDR_EXPR)
        {
          /* Variable substitution need not be simple.  In particular,
-            the INDIRECT_REF substitution above.  Make sure that
+            the MEM_REF substitution above.  Make sure that
             TREE_CONSTANT and friends are up-to-date.  But make sure
             to not improperly set TREE_BLOCK on some sub-expressions.  */
          int invariant = is_gimple_min_invariant (*tp);
@@ -895,13 +896,7 @@ remap_gimple_op_r (tree *tp, int *walk_subtrees, void *data)
          id->block = NULL_TREE;
          walk_tree (&TREE_OPERAND (*tp, 0), remap_gimple_op_r, data, NULL);
          id->block = block;
-
-         /* Handle the case where we substituted an INDIRECT_REF
-            into the operand of the ADDR_EXPR.  */
-         if (TREE_CODE (TREE_OPERAND (*tp, 0)) == INDIRECT_REF)
-           *tp = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0);
-         else
-           recompute_tree_invariant_for_addr_expr (*tp);
+         recompute_tree_invariant_for_addr_expr (*tp);
 
          /* If this used to be invariant, but is not any longer,
             then regimplification is probably needed.  */
@@ -1092,6 +1087,25 @@ copy_tree_body_r (tree *tp, int *walk_subtrees, void *data)
              return NULL;
            }
        }
+      else if (TREE_CODE (*tp) == MEM_REF)
+       {
+         /* We need to re-canonicalize MEM_REFs from inline substitutions
+            that can happen when a pointer argument is an ADDR_EXPR.  */
+         tree decl = TREE_OPERAND (*tp, 0);
+         tree *n;
+
+         n = (tree *) pointer_map_contains (id->decl_map, decl);
+         if (n)
+           {
+             tree old = *tp;
+             *tp = fold_build2 (MEM_REF, TREE_TYPE (*tp),
+                                unshare_expr (*n), TREE_OPERAND (*tp, 1));
+             TREE_THIS_VOLATILE (*tp) = TREE_THIS_VOLATILE (old);
+             TREE_NO_WARNING (*tp) = TREE_NO_WARNING (old);
+             *walk_subtrees = 0;
+             return NULL;
+           }
+       }
 
       /* Here is the "usual case".  Copy this tree node, and then
         tweak some special cases.  */
index 05b313cbe3e05b8930bbdc0e585d9fce564ecbe7..e49fb035bc124064f13c1d9f3919785dc2397107 100644 (file)
@@ -556,7 +556,7 @@ set_livein_block (tree var, basic_block bb)
 
 /* Return true if symbol SYM is marked for renaming.  */
 
-static inline bool
+bool
 symbol_marked_for_renaming (tree sym)
 {
   return bitmap_bit_p (SYMS_TO_RENAME (cfun), DECL_UID (sym));
index be1a1ee2058bf1289bf6ba789ead2f5f551b3860..099a7fe479f727a44e41ab986751acbe034d30b5 100644 (file)
@@ -397,7 +397,7 @@ generate_builtin (struct loop *loop, bitmap partition, bool copy_p)
   op1 = gimple_assign_rhs1 (write);
 
   if (!(TREE_CODE (op0) == ARRAY_REF
-       || TREE_CODE (op0) == INDIRECT_REF))
+       || TREE_CODE (op0) == MEM_REF))
     goto end;
 
   /* The new statements will be placed before LOOP.  */
index 2437bc48470d15cda728b082dcd12cf0b8ad143d..e70524543c5db1af466a5dcecf75848fd5761001 100644 (file)
@@ -790,7 +790,8 @@ mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
               }
             else if (TREE_CODE (var) == COMPONENT_REF)
               var = TREE_OPERAND (var, 0);
-            else if (INDIRECT_REF_P (var))
+            else if (INDIRECT_REF_P (var)
+                    || TREE_CODE (var) == MEM_REF)
               {
                base = TREE_OPERAND (var, 0);
                 break;
@@ -868,6 +869,18 @@ mf_xform_derefs_1 (gimple_stmt_iterator *iter, tree *tp,
                           size_int (-1));
       break;
 
+    case MEM_REF:
+      addr = build2 (POINTER_PLUS_EXPR, TREE_TYPE (TREE_OPERAND (t, 1)),
+                    TREE_OPERAND (t, 0),
+                    fold_convert (sizetype, TREE_OPERAND (t, 1)));
+      base = addr;
+      limit = fold_build2_loc (location, POINTER_PLUS_EXPR, ptr_type_node,
+                          fold_build2_loc (location,
+                                       POINTER_PLUS_EXPR, ptr_type_node, base,
+                                       size),
+                          size_int (-1));
+      break;
+
     case TARGET_MEM_REF:
       addr = tree_mem_ref_addr (ptr_type_node, t);
       base = addr;
index cd1530740959d2864832985d2550d6f130eac296..db704b7905578f6b922e48c1a9c7b0abbdf33aa2 100644 (file)
@@ -84,6 +84,7 @@ struct nesting_info
 
   struct pointer_map_t *field_map;
   struct pointer_map_t *var_map;
+  struct pointer_set_t *mem_refs;
   bitmap suppress_expansion;
 
   tree context;
@@ -717,6 +718,7 @@ create_nesting_tree (struct cgraph_node *cgn)
   struct nesting_info *info = XCNEW (struct nesting_info);
   info->field_map = pointer_map_create ();
   info->var_map = pointer_map_create ();
+  info->mem_refs = pointer_set_create ();
   info->suppress_expansion = BITMAP_ALLOC (&nesting_info_bitmap_obstack);
   info->context = cgn->decl;
 
@@ -758,7 +760,7 @@ get_static_chain (struct nesting_info *info, tree target_context,
        {
          tree field = get_chain_field (i);
 
-         x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+         x = build_simple_mem_ref (x);
          x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
          x = init_tmp_var (info, x, gsi);
        }
@@ -793,12 +795,12 @@ get_frame_field (struct nesting_info *info, tree target_context,
        {
          tree field = get_chain_field (i);
 
-         x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+         x = build_simple_mem_ref (x);
          x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
          x = init_tmp_var (info, x, gsi);
        }
 
-      x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+      x = build_simple_mem_ref (x);
     }
 
   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
@@ -841,16 +843,16 @@ get_nonlocal_debug_decl (struct nesting_info *info, tree decl)
       for (i = info->outer; i->context != target_context; i = i->outer)
        {
          field = get_chain_field (i);
-         x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+         x = build_simple_mem_ref (x);
          x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
        }
-      x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+      x = build_simple_mem_ref (x);
     }
 
   field = lookup_field_for_decl (i, decl, INSERT);
   x = build3 (COMPONENT_REF, TREE_TYPE (field), x, field, NULL_TREE);
   if (use_pointer_in_frame (decl))
-    x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+    x = build_simple_mem_ref (x);
 
   /* ??? We should be remapping types as well, surely.  */
   new_decl = build_decl (DECL_SOURCE_LOCATION (decl),
@@ -927,7 +929,7 @@ convert_nonlocal_reference_op (tree *tp, int *walk_subtrees, void *data)
              if (use_pointer_in_frame (t))
                {
                  x = init_tmp_var (info, x, &wi->gsi);
-                 x = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (x)), x);
+                 x = build_simple_mem_ref (x);
                }
            }
 
@@ -1498,6 +1500,21 @@ convert_local_reference_op (tree *tp, int *walk_subtrees, void *data)
       wi->val_only = save_val_only;
       break;
 
+    case MEM_REF:
+      save_val_only = wi->val_only;
+      wi->val_only = true;
+      wi->is_lhs = false;
+      walk_tree (&TREE_OPERAND (t, 0), convert_local_reference_op,
+                wi, NULL);
+      /* We need to re-fold the MEM_REF as component references as
+        part of a ADDR_EXPR address are not allowed.  But we cannot
+        fold here, as the chain record type is not yet finalized.  */
+      if (TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
+         && !DECL_P (TREE_OPERAND (TREE_OPERAND (t, 0), 0)))
+       pointer_set_insert (info->mem_refs, tp);
+      wi->val_only = save_val_only;
+      break;
+
     case VIEW_CONVERT_EXPR:
       /* Just request to look at the subtrees, leaving val_only and lhs
         untouched.  This might actually be for !val_only + lhs, in which
@@ -2247,6 +2264,15 @@ remap_vla_decls (tree block, struct nesting_info *root)
   pointer_map_destroy (id.cb.decl_map);
 }
 
+/* Fold the MEM_REF *E.  */
+static bool
+fold_mem_refs (const void *e, void *data ATTRIBUTE_UNUSED)
+{
+  tree *ref_p = CONST_CAST2(tree *, const tree *, (const tree *)e);
+  *ref_p = fold (*ref_p);
+  return true;
+}
+
 /* Do "everything else" to clean up or complete state collected by the
    various walking passes -- lay out the types and decls, generate code
    to initialize the frame decl, store critical expressions in the
@@ -2461,6 +2487,9 @@ finalize_nesting_tree_1 (struct nesting_info *root)
                     root->debug_var_chain);
     }
 
+  /* Fold the rewritten MEM_REF trees.  */
+  pointer_set_traverse (root->mem_refs, fold_mem_refs, NULL);
+
   /* Dump the translated tree function.  */
   if (dump_file)
     {
@@ -2514,6 +2543,7 @@ free_nesting_tree (struct nesting_info *root)
       next = iter_nestinfo_next (node);
       pointer_map_destroy (node->var_map);
       pointer_map_destroy (node->field_map);
+      pointer_set_destroy (node->mem_refs);
       free (node);
       node = next;
     }
index 5c7d6f599c8733fb546a8b806a107dbc26f654c3..58e8ee47a7d7a6f6a59affeccc6d9ff8edc23396 100644 (file)
@@ -141,6 +141,10 @@ compute_object_offset (const_tree expr, const_tree var)
       off = size_binop (MULT_EXPR, TYPE_SIZE_UNIT (TREE_TYPE (expr)), t);
       break;
 
+    case MEM_REF:
+      gcc_assert (TREE_CODE (TREE_OPERAND (expr, 0)) == ADDR_EXPR);
+      return TREE_OPERAND (expr, 1);
+
     default:
       return error_mark_node;
     }
@@ -166,15 +170,21 @@ addr_object_size (struct object_size_info *osi, const_tree ptr,
     pt_var = get_base_address (pt_var);
 
   if (pt_var
-      && TREE_CODE (pt_var) == INDIRECT_REF
+      && TREE_CODE (pt_var) == MEM_REF
       && TREE_CODE (TREE_OPERAND (pt_var, 0)) == SSA_NAME
       && POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (pt_var, 0))))
     {
       unsigned HOST_WIDE_INT sz;
 
       if (!osi || (object_size_type & 1) != 0)
-       sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
-                                         object_size_type & ~1);
+       {
+         sz = compute_builtin_object_size (TREE_OPERAND (pt_var, 0),
+                                           object_size_type & ~1);
+         if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
+           sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
+         else
+           sz = offset_limit;
+       }
       else
        {
          tree var = TREE_OPERAND (pt_var, 0);
@@ -185,6 +195,10 @@ addr_object_size (struct object_size_info *osi, const_tree ptr,
            sz = object_sizes[object_size_type][SSA_NAME_VERSION (var)];
          else
            sz = unknown[object_size_type];
+         if (host_integerp (TREE_OPERAND (pt_var, 1), 0))
+           sz -= TREE_INT_CST_LOW (TREE_OPERAND (pt_var, 1));
+         else
+           sz = offset_limit;
        }
 
       if (sz != unknown[object_size_type] && sz < offset_limit)
@@ -225,7 +239,7 @@ addr_object_size (struct object_size_info *osi, const_tree ptr,
                  && tree_int_cst_lt (pt_var_size,
                                      TYPE_SIZE_UNIT (TREE_TYPE (var)))))
            var = pt_var;
-         else if (var != pt_var && TREE_CODE (pt_var) == INDIRECT_REF)
+         else if (var != pt_var && TREE_CODE (pt_var) == MEM_REF)
            {
              tree v = var;
              /* For &X->fld, compute object size only if fld isn't the last
@@ -328,12 +342,14 @@ addr_object_size (struct object_size_info *osi, const_tree ptr,
        }
       if (var != pt_var
          && pt_var_size
-         && TREE_CODE (pt_var) == INDIRECT_REF
+         && TREE_CODE (pt_var) == MEM_REF
          && bytes != error_mark_node)
        {
          tree bytes2 = compute_object_offset (TREE_OPERAND (ptr, 0), pt_var);
          if (bytes2 != error_mark_node)
            {
+             bytes2 = size_binop (PLUS_EXPR, bytes2,
+                                  TREE_OPERAND (pt_var, 1));
              if (TREE_CODE (bytes2) == INTEGER_CST
                  && tree_int_cst_lt (pt_var_size, bytes2))
                bytes2 = size_zero_node;
@@ -746,10 +762,20 @@ plus_stmt_object_size (struct object_size_info *osi, tree var, gimple stmt)
   unsigned HOST_WIDE_INT bytes;
   tree op0, op1;
 
-  gcc_assert (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR);
-
-  op0 = gimple_assign_rhs1 (stmt);
-  op1 = gimple_assign_rhs2 (stmt);
+  if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+    {
+      op0 = gimple_assign_rhs1 (stmt);
+      op1 = gimple_assign_rhs2 (stmt);
+    }
+  else if (gimple_assign_rhs_code (stmt) == ADDR_EXPR)
+    {
+      tree rhs = TREE_OPERAND (gimple_assign_rhs1 (stmt), 0);
+      gcc_assert (TREE_CODE (rhs) == MEM_REF);
+      op0 = TREE_OPERAND (rhs, 0);
+      op1 = TREE_OPERAND (rhs, 1);
+    }
+  else
+    gcc_unreachable ();
 
   if (object_sizes[object_size_type][varno] == unknown[object_size_type])
     return false;
@@ -897,13 +923,14 @@ collect_object_sizes_for (struct object_size_info *osi, tree var)
     {
     case GIMPLE_ASSIGN:
       {
-        if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+       tree rhs = gimple_assign_rhs1 (stmt);
+        if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
+           || (gimple_assign_rhs_code (stmt) == ADDR_EXPR
+               && TREE_CODE (TREE_OPERAND (rhs, 0)) == MEM_REF))
           reexamine = plus_stmt_object_size (osi, var, stmt);
         else if (gimple_assign_single_p (stmt)
                  || gimple_assign_unary_nop_p (stmt))
           {
-            tree rhs = gimple_assign_rhs1 (stmt);
-
             if (TREE_CODE (rhs) == SSA_NAME
                 && POINTER_TYPE_P (TREE_TYPE (rhs)))
               reexamine = merge_object_sizes (osi, var, rhs, 0);
index a17655ed699ff7fe1515f46b285ddde9091a5575..af2708667d2a4c93d4087a8a0688799b17090826 100644 (file)
@@ -357,7 +357,7 @@ take_address_of (tree obj, tree type, edge entry, htab_t decl_address)
 
   if (var_p != &obj)
     {
-      *var_p = build1 (INDIRECT_REF, TREE_TYPE (*var_p), name);
+      *var_p = build_simple_mem_ref (name);
       name = force_gimple_operand (build_addr (obj, current_function_decl),
                                   &stmts, true, NULL_TREE);
       if (!gimple_seq_empty_p (stmts))
@@ -456,7 +456,7 @@ eliminate_local_variables_1 (tree *tp, int *walk_subtrees, void *data)
       type = TREE_TYPE (t);
       addr_type = build_pointer_type (type);
       addr = take_address_of (t, addr_type, dta->entry, dta->decl_address);
-      *tp = build1 (INDIRECT_REF, TREE_TYPE (*tp), addr);
+      *tp = build_simple_mem_ref (addr);
 
       dta->changed = true;
       return NULL_TREE;
@@ -857,7 +857,6 @@ create_call_for_reduction_1 (void **slot, void *data)
   struct clsn_data *const clsn_data = (struct clsn_data *) data;
   gimple_stmt_iterator gsi;
   tree type = TREE_TYPE (PHI_RESULT (reduc->reduc_phi));
-  tree struct_type = TREE_TYPE (TREE_TYPE (clsn_data->load));
   tree load_struct;
   basic_block bb;
   basic_block new_bb;
@@ -866,7 +865,7 @@ create_call_for_reduction_1 (void **slot, void *data)
   tree tmp_load, name;
   gimple load;
 
-  load_struct = fold_build1 (INDIRECT_REF, struct_type, clsn_data->load);
+  load_struct = build_simple_mem_ref (clsn_data->load);
   t = build3 (COMPONENT_REF, type, load_struct, reduc->field, NULL_TREE);
 
   addr = build_addr (t, current_function_decl);
@@ -925,13 +924,12 @@ create_loads_for_reductions (void **slot, void *data)
   gimple stmt;
   gimple_stmt_iterator gsi;
   tree type = TREE_TYPE (gimple_assign_lhs (red->reduc_stmt));
-  tree struct_type = TREE_TYPE (TREE_TYPE (clsn_data->load));
   tree load_struct;
   tree name;
   tree x;
 
   gsi = gsi_after_labels (clsn_data->load_bb);
-  load_struct = fold_build1 (INDIRECT_REF, struct_type, clsn_data->load);
+  load_struct = build_simple_mem_ref (clsn_data->load);
   load_struct = build3 (COMPONENT_REF, type, load_struct, red->field,
                        NULL_TREE);
 
@@ -1012,7 +1010,6 @@ create_loads_and_stores_for_name (void **slot, void *data)
   gimple stmt;
   gimple_stmt_iterator gsi;
   tree type = TREE_TYPE (elt->new_name);
-  tree struct_type = TREE_TYPE (TREE_TYPE (clsn_data->load));
   tree load_struct;
 
   gsi = gsi_last_bb (clsn_data->store_bb);
@@ -1022,7 +1019,7 @@ create_loads_and_stores_for_name (void **slot, void *data)
   gsi_insert_after (&gsi, stmt, GSI_NEW_STMT);
 
   gsi = gsi_last_bb (clsn_data->load_bb);
-  load_struct = fold_build1 (INDIRECT_REF, struct_type, clsn_data->load);
+  load_struct = build_simple_mem_ref (clsn_data->load);
   t = build3 (COMPONENT_REF, type, load_struct, elt->field, NULL_TREE);
   stmt = gimple_build_assign (elt->new_name, t);
   SSA_NAME_DEF_STMT (elt->new_name) = stmt;
index de147e7ec96269d49d4909ec96b27366afe9098b..4af074ff5e511cc858621213d26fc43aa6dac3e9 100644 (file)
@@ -1345,14 +1345,16 @@ ref_at_iteration (struct loop *loop, tree ref, int iter)
       if (!op0)
        return NULL_TREE;
     }
-  else if (!INDIRECT_REF_P (ref))
+  else if (!INDIRECT_REF_P (ref)
+          && TREE_CODE (ref) != MEM_REF)
     return unshare_expr (ref);
 
-  if (INDIRECT_REF_P (ref))
+  if (INDIRECT_REF_P (ref)
+      || TREE_CODE (ref) == MEM_REF)
     {
-      /* Take care for INDIRECT_REF and MISALIGNED_INDIRECT_REF at
+      /* Take care for MEM_REF and MISALIGNED_INDIRECT_REF at
          the same time.  */
-      ret = copy_node (ref);
+      ret = unshare_expr (ref);
       idx = TREE_OPERAND (ref, 0);
       idx_p = &TREE_OPERAND (ret, 0);
     }
index f7bbef43a1852668ac5f74e976a86f692eaf1f4d..03cc102458124d6fc50e3a70fd34500f46a9736f 100644 (file)
@@ -794,6 +794,55 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
       NIY;
       break;
 
+    case MEM_REF:
+      {
+       if (integer_zerop (TREE_OPERAND (node, 1))
+           /* Same pointer types, but ignoring POINTER_TYPE vs.
+              REFERENCE_TYPE.  */
+           && (TREE_TYPE (TREE_TYPE (TREE_OPERAND (node, 0)))
+               == TREE_TYPE (TREE_TYPE (TREE_OPERAND (node, 1))))
+           && (TYPE_MODE (TREE_TYPE (TREE_OPERAND (node, 0)))
+               == TYPE_MODE (TREE_TYPE (TREE_OPERAND (node, 1))))
+           && (TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (node, 0)))
+               == TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (node, 1))))
+           && (TYPE_QUALS (TREE_TYPE (TREE_OPERAND (node, 0)))
+               == TYPE_QUALS (TREE_TYPE (TREE_OPERAND (node, 1))))
+           /* Same value types ignoring qualifiers.  */
+           && (TYPE_MAIN_VARIANT (TREE_TYPE (node))
+               == TYPE_MAIN_VARIANT
+                   (TREE_TYPE (TREE_TYPE (TREE_OPERAND (node, 1))))))
+         {
+           if (TREE_CODE (TREE_OPERAND (node, 0)) != ADDR_EXPR)
+             {
+               pp_string (buffer, "*");
+               dump_generic_node (buffer, TREE_OPERAND (node, 0),
+                                  spc, flags, false);
+             }
+           else
+             dump_generic_node (buffer,
+                                TREE_OPERAND (TREE_OPERAND (node, 0), 0),
+                                spc, flags, false);
+         }
+       else
+         {
+           pp_string (buffer, "MEM[");
+           pp_string (buffer, "(");
+           dump_generic_node (buffer, TREE_TYPE (TREE_OPERAND (node, 1)),
+                              spc, flags, false);
+           pp_string (buffer, ")");
+           dump_generic_node (buffer, TREE_OPERAND (node, 0),
+                              spc, flags, false);
+           if (!integer_zerop (TREE_OPERAND (node, 1)))
+             {
+               pp_string (buffer, " + ");
+               dump_generic_node (buffer, TREE_OPERAND (node, 1),
+                                  spc, flags, false);
+             }
+           pp_string (buffer, "]");
+         }
+       break;
+      }
+
     case TARGET_MEM_REF:
       {
        const char *sep = "";
@@ -1100,7 +1149,25 @@ dump_generic_node (pretty_printer *buffer, tree node, int spc, int flags,
     case COMPONENT_REF:
       op0 = TREE_OPERAND (node, 0);
       str = ".";
-      if (op0 && TREE_CODE (op0) == INDIRECT_REF)
+      if (op0
+         && (TREE_CODE (op0) == INDIRECT_REF
+             || (TREE_CODE (op0) == MEM_REF
+                 && TREE_CODE (TREE_OPERAND (op0, 0)) != ADDR_EXPR
+                 && integer_zerop (TREE_OPERAND (op0, 1))
+                 /* Same pointer types, but ignoring POINTER_TYPE vs.
+                    REFERENCE_TYPE.  */
+                 && (TREE_TYPE (TREE_TYPE (TREE_OPERAND (op0, 0)))
+                     == TREE_TYPE (TREE_TYPE (TREE_OPERAND (op0, 1))))
+                 && (TYPE_MODE (TREE_TYPE (TREE_OPERAND (op0, 0)))
+                     == TYPE_MODE (TREE_TYPE (TREE_OPERAND (op0, 1))))
+                 && (TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (op0, 0)))
+                     == TYPE_REF_CAN_ALIAS_ALL (TREE_TYPE (TREE_OPERAND (op0, 1))))
+                 && (TYPE_QUALS (TREE_TYPE (TREE_OPERAND (op0, 0)))
+                     == TYPE_QUALS (TREE_TYPE (TREE_OPERAND (op0, 1))))
+                 /* Same value types ignoring qualifiers.  */
+                 && (TYPE_MAIN_VARIANT (TREE_TYPE (op0))
+                     == TYPE_MAIN_VARIANT
+                         (TREE_TYPE (TREE_TYPE (TREE_OPERAND (op0, 1))))))))
        {
          op0 = TREE_OPERAND (op0, 0);
          str = "->";
@@ -2760,6 +2827,13 @@ print_call_name (pretty_printer *buffer, tree node, int flags)
        dump_generic_node (buffer, op0, 0, flags, false);
       break;
 
+    case MEM_REF:
+      if (integer_zerop (TREE_OPERAND (op0, 1)))
+       {
+         op0 = TREE_OPERAND (op0, 0);
+         goto again;
+       }
+      /* Fallthru.  */
     case COMPONENT_REF:
     case SSA_NAME:
     case OBJ_TYPE_REF:
index 13d897fd5ced418451fdc7f2bf709638288ac261..0e372567c8954e677e03026b93ac80b13d7dadd8 100644 (file)
@@ -1170,6 +1170,24 @@ follow_ssa_edge_expr (struct loop *loop, gimple at_stmt, tree expr,
                                    halting_phi, evolution_of_loop, limit);
       break;
 
+    case ADDR_EXPR:
+      /* Handle &MEM[ptr + CST] which is equivalent to POINTER_PLUS_EXPR.  */
+      if (TREE_CODE (TREE_OPERAND (expr, 0)) == MEM_REF)
+       {
+         expr = TREE_OPERAND (expr, 0);
+         rhs0 = TREE_OPERAND (expr, 0);
+         rhs1 = TREE_OPERAND (expr, 1);
+         type = TREE_TYPE (rhs0);
+         STRIP_USELESS_TYPE_CONVERSION (rhs0);
+         STRIP_USELESS_TYPE_CONVERSION (rhs1);
+         res = follow_ssa_edge_binary (loop, at_stmt, type,
+                                       rhs0, POINTER_PLUS_EXPR, rhs1,
+                                       halting_phi, evolution_of_loop, limit);
+       }
+      else
+       res = t_false;
+      break;
+
     case ASSERT_EXPR:
       /* This assignment is of the form: "a_1 = ASSERT_EXPR <a_2, ...>"
         It must be handled as a copy assignment of the form a_1 = a_2.  */
index 84d950f770f4935d96af9a3c45ce4295e1f4fb81..05e3cf800aadad6cdf625f37325394e430c3bcee 100644 (file)
@@ -751,7 +751,8 @@ create_access (tree expr, gimple stmt, bool write)
 
   base = get_ref_base_and_extent (expr, &offset, &size, &max_size);
 
-  if (sra_mode == SRA_MODE_EARLY_IPA && INDIRECT_REF_P (base))
+  if (sra_mode == SRA_MODE_EARLY_IPA
+      && TREE_CODE (base) == MEM_REF)
     {
       base = get_ssa_base_param (TREE_OPERAND (base, 0));
       if (!base)
@@ -885,15 +886,10 @@ completely_scalarize_record (tree base, tree decl, HOST_WIDE_INT offset)
 static void
 disqualify_base_of_expr (tree t, const char *reason)
 {
-  while (handled_component_p (t))
-    t = TREE_OPERAND (t, 0);
-
-  if (sra_mode == SRA_MODE_EARLY_IPA)
-    {
-      if (INDIRECT_REF_P (t))
-       t = TREE_OPERAND (t, 0);
-      t = get_ssa_base_param (t);
-    }
+  t = get_base_address (t);
+  if (sra_mode == SRA_MODE_EARLY_IPA
+      && TREE_CODE (t) == MEM_REF)
+    t = get_ssa_base_param (TREE_OPERAND (t, 0));
 
   if (t && DECL_P (t))
     disqualify_candidate (t, reason);
@@ -935,8 +931,9 @@ build_access_from_expr_1 (tree expr, gimple stmt, bool write)
 
   switch (TREE_CODE (expr))
     {
-    case INDIRECT_REF:
-      if (sra_mode != SRA_MODE_EARLY_IPA)
+    case MEM_REF:
+      if (TREE_CODE (TREE_OPERAND (expr, 0)) != ADDR_EXPR
+         && sra_mode != SRA_MODE_EARLY_IPA)
        return NULL;
       /* fall through */
     case VAR_DECL:
@@ -1285,7 +1282,21 @@ make_fancy_name_1 (tree expr)
        break;
       sprintf (buffer, HOST_WIDE_INT_PRINT_DEC, TREE_INT_CST_LOW (index));
       obstack_grow (&name_obstack, buffer, strlen (buffer));
+      break;
 
+    case ADDR_EXPR:
+      make_fancy_name_1 (TREE_OPERAND (expr, 0));
+      break;
+
+    case MEM_REF:
+      make_fancy_name_1 (TREE_OPERAND (expr, 0));
+      if (!integer_zerop (TREE_OPERAND (expr, 1)))
+       {
+         obstack_1grow (&name_obstack, '$');
+         sprintf (buffer, HOST_WIDE_INT_PRINT_DEC,
+                  TREE_INT_CST_LOW (TREE_OPERAND (expr, 1)));
+         obstack_grow (&name_obstack, buffer, strlen (buffer));
+       }
       break;
 
     case BIT_FIELD_REF:
@@ -1308,7 +1319,11 @@ make_fancy_name (tree expr)
   return XOBFINISH (&name_obstack, char *);
 }
 
-/* Helper function for build_ref_for_offset.  */
+/* Helper function for build_ref_for_offset.
+
+   FIXME: Eventually this should be rewritten to either re-use the
+   original access expression unshared (which is good for alias
+   analysis) or to build a MEM_REF expression.  */
 
 static bool
 build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
@@ -1406,12 +1421,7 @@ build_ref_for_offset_1 (tree *res, tree type, HOST_WIDE_INT offset,
    type TYPE at the given OFFSET of the type EXP_TYPE.  If EXPR is NULL, the
    function only determines whether it can build such a reference without
    actually doing it, otherwise, the tree it points to is unshared first and
-   then used as a base for furhter sub-references.
-
-   FIXME: Eventually this should be replaced with
-   maybe_fold_offset_to_reference() from tree-ssa-ccp.c but that requires a
-   minor rewrite of fold_stmt.
- */
+   then used as a base for furhter sub-references.  */
 
 bool
 build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
@@ -1426,7 +1436,7 @@ build_ref_for_offset (tree *expr, tree type, HOST_WIDE_INT offset,
     {
       type = TREE_TYPE (type);
       if (expr)
-       *expr = fold_build1_loc (loc, INDIRECT_REF, type, *expr);
+       *expr = build_simple_mem_ref_loc (loc, *expr);
     }
 
   return build_ref_for_offset_1 (expr, type, offset, exp_type);
@@ -3026,8 +3036,11 @@ ptr_parm_has_direct_uses (tree parm)
          tree lhs = gimple_get_lhs (stmt);
          while (handled_component_p (lhs))
            lhs = TREE_OPERAND (lhs, 0);
-         if (INDIRECT_REF_P (lhs)
-             && TREE_OPERAND (lhs, 0) == name)
+         if (TREE_CODE (lhs) == MEM_REF
+             && TREE_OPERAND (lhs, 0) == name
+             && integer_zerop (TREE_OPERAND (lhs, 1))
+             && types_compatible_p (TREE_TYPE (lhs),
+                                    TREE_TYPE (TREE_TYPE (name))))
            uses_ok++;
        }
       if (gimple_assign_single_p (stmt))
@@ -3035,8 +3048,11 @@ ptr_parm_has_direct_uses (tree parm)
          tree rhs = gimple_assign_rhs1 (stmt);
          while (handled_component_p (rhs))
            rhs = TREE_OPERAND (rhs, 0);
-         if (INDIRECT_REF_P (rhs)
-             && TREE_OPERAND (rhs, 0) == name)
+         if (TREE_CODE (rhs) == MEM_REF
+             && TREE_OPERAND (rhs, 0) == name
+             && integer_zerop (TREE_OPERAND (rhs, 1))
+             && types_compatible_p (TREE_TYPE (rhs),
+                                    TREE_TYPE (TREE_TYPE (name))))
            uses_ok++;
        }
       else if (is_gimple_call (stmt))
@@ -3047,8 +3063,11 @@ ptr_parm_has_direct_uses (tree parm)
              tree arg = gimple_call_arg (stmt, i);
              while (handled_component_p (arg))
                arg = TREE_OPERAND (arg, 0);
-             if (INDIRECT_REF_P (arg)
-                 && TREE_OPERAND (arg, 0) == name)
+             if (TREE_CODE (arg) == MEM_REF
+                 && TREE_OPERAND (arg, 0) == name
+                 && integer_zerop (TREE_OPERAND (arg, 1))
+                 && types_compatible_p (TREE_TYPE (arg),
+                                        TREE_TYPE (TREE_TYPE (name))))
                uses_ok++;
            }
        }
@@ -3917,8 +3936,11 @@ sra_ipa_modify_expr (tree *expr, bool convert,
   if (!base || size == -1 || max_size == -1)
     return false;
 
-  if (INDIRECT_REF_P (base))
-    base = TREE_OPERAND (base, 0);
+  if (TREE_CODE (base) == MEM_REF)
+    {
+      offset += mem_ref_offset (base).low * BITS_PER_UNIT;
+      base = TREE_OPERAND (base, 0);
+    }
 
   base = get_ssa_base_param (base);
   if (!base || TREE_CODE (base) != PARM_DECL)
@@ -3939,14 +3961,7 @@ sra_ipa_modify_expr (tree *expr, bool convert,
     return false;
 
   if (cand->by_ref)
-    {
-      tree folded;
-      src = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (cand->reduction)),
-                   cand->reduction);
-      folded = gimple_fold_indirect_ref (src);
-      if (folded)
-        src = folded;
-    }
+    src = build_simple_mem_ref (cand->reduction);
   else
     src = cand->reduction;
 
index 28b8fb779c2e74d9a01c0b11be4e7da0cdb8b27a..78eb362ca5c7f238ccd93529bc16b46a2a7dc793 100644 (file)
@@ -182,7 +182,8 @@ ptr_deref_may_alias_decl_p (tree ptr, tree decl)
     {
       tree base = get_base_address (TREE_OPERAND (ptr, 0));
       if (base
-         && INDIRECT_REF_P (base))
+         && (INDIRECT_REF_P (base)
+             || TREE_CODE (base) == MEM_REF))
        ptr = TREE_OPERAND (base, 0);
       else if (base
               && SSA_VAR_P (base))
@@ -238,7 +239,8 @@ ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
     {
       tree base = get_base_address (TREE_OPERAND (ptr1, 0));
       if (base
-         && INDIRECT_REF_P (base))
+         && (INDIRECT_REF_P (base)
+             || TREE_CODE (base) == MEM_REF))
        ptr1 = TREE_OPERAND (base, 0);
       else if (base
               && SSA_VAR_P (base))
@@ -250,7 +252,8 @@ ptr_derefs_may_alias_p (tree ptr1, tree ptr2)
     {
       tree base = get_base_address (TREE_OPERAND (ptr2, 0));
       if (base
-         && INDIRECT_REF_P (base))
+         && (INDIRECT_REF_P (base)
+             || TREE_CODE (base) == MEM_REF))
        ptr2 = TREE_OPERAND (base, 0);
       else if (base
               && SSA_VAR_P (base))
@@ -299,7 +302,8 @@ ptr_deref_may_alias_ref_p_1 (tree ptr, ao_ref *ref)
 {
   tree base = ao_ref_base (ref);
 
-  if (INDIRECT_REF_P (base))
+  if (INDIRECT_REF_P (base)
+      || TREE_CODE (base) == MEM_REF)
     return ptr_derefs_may_alias_p (ptr, TREE_OPERAND (base, 0));
   else if (SSA_VAR_P (base))
     return ptr_deref_may_alias_decl_p (ptr, base);
@@ -470,12 +474,18 @@ ao_ref_base (ao_ref *ref)
 
 /* Returns the base object alias set of the memory reference *REF.  */
 
-static alias_set_type ATTRIBUTE_UNUSED
+static alias_set_type
 ao_ref_base_alias_set (ao_ref *ref)
 {
+  tree base_ref;
   if (ref->base_alias_set != -1)
     return ref->base_alias_set;
-  ref->base_alias_set = get_alias_set (ao_ref_base (ref));
+  if (!ref->ref)
+    return 0;
+  base_ref = ref->ref;
+  while (handled_component_p (base_ref))
+    base_ref = TREE_OPERAND (base_ref, 0);
+  ref->base_alias_set = get_alias_set (base_ref);
   return ref->base_alias_set;
 }
 
@@ -505,7 +515,8 @@ ao_ref_init_from_ptr_and_size (ao_ref *ref, tree ptr, tree size)
                                         &ref->offset, &t1, &t2);
   else
     {
-      ref->base = build1 (INDIRECT_REF, char_type_node, ptr);
+      ref->base = build2 (MEM_REF, char_type_node,
+                         ptr, build_int_cst (ptr_type_node, 0));
       ref->offset = 0;
     }
   if (size
@@ -665,33 +676,45 @@ decl_refs_may_alias_p (tree base1,
    if non-NULL are the complete memory reference trees.  */
 
 static bool
-indirect_ref_may_alias_decl_p (tree ref1, tree ptr1,
-                              HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
+indirect_ref_may_alias_decl_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
+                              HOST_WIDE_INT offset1,
+                              HOST_WIDE_INT max_size1 ATTRIBUTE_UNUSED,
                               alias_set_type ref1_alias_set,
                               alias_set_type base1_alias_set,
-                              tree ref2, tree base2,
+                              tree ref2 ATTRIBUTE_UNUSED, tree base2,
                               HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
                               alias_set_type ref2_alias_set,
-                              alias_set_type base2_alias_set)
+                              alias_set_type base2_alias_set, bool tbaa_p)
 {
+  tree ptr1 = TREE_OPERAND (base1, 0);
+  tree ptrtype1;
+  HOST_WIDE_INT offset1p = offset1;
+
+  if (TREE_CODE (base1) == MEM_REF)
+    offset1p = offset1 + mem_ref_offset (base1).low * BITS_PER_UNIT;
+
   /* If only one reference is based on a variable, they cannot alias if
      the pointer access is beyond the extent of the variable access.
      (the pointer base cannot validly point to an offset less than zero
      of the variable).
      They also cannot alias if the pointer may not point to the decl.  */
-  if (max_size2 != -1
-      && !ranges_overlap_p (offset1, max_size1, 0, offset2 + max_size2))
+  if (!ranges_overlap_p (MAX (0, offset1p), -1, offset2, max_size2))
     return false;
   if (!ptr_deref_may_alias_decl_p (ptr1, base2))
     return false;
 
   /* Disambiguations that rely on strict aliasing rules follow.  */
-  if (!flag_strict_aliasing)
+  if (!flag_strict_aliasing || !tbaa_p)
     return true;
 
+  if (TREE_CODE (base1) == MEM_REF)
+    ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
+  else
+    ptrtype1 = TREE_TYPE (ptr1);
+
   /* If the alias set for a pointer access is zero all bets are off.  */
   if (base1_alias_set == -1)
-    base1_alias_set = get_deref_alias_set (ptr1);
+    base1_alias_set = get_deref_alias_set (ptrtype1);
   if (base1_alias_set == 0)
     return true;
   if (base2_alias_set == -1)
@@ -699,22 +722,52 @@ indirect_ref_may_alias_decl_p (tree ref1, tree ptr1,
 
   /* If both references are through the same type, they do not alias
      if the accesses do not overlap.  This does extra disambiguation
-     for mixed/pointer accesses but requires strict aliasing.  */
-  if (same_type_for_tbaa (TREE_TYPE (TREE_TYPE (ptr1)),
-                         TREE_TYPE (base2)) == 1)
+     for mixed/pointer accesses but requires strict aliasing.
+     For MEM_REFs we require that the component-ref offset we computed
+     is relative to the start of the type which we ensure by
+     comparing rvalue and access type and disregarding the constant
+     pointer offset.  */
+  if ((TREE_CODE (base1) != MEM_REF
+       || same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1)
+      && same_type_for_tbaa (TREE_TYPE (ptrtype1), TREE_TYPE (base2)) == 1)
     return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
 
-  /* The only way to access a variable is through a pointer dereference
-     of the same alias set or a subset of it.  */
+  /* When we are trying to disambiguate an access with a pointer dereference
+     as base versus one with a decl as base we can use both the size
+     of the decl and its dynamic type for extra disambiguation.
+     ???  We do not know anything about the dynamic type of the decl
+     other than that its alias-set contains base2_alias_set as a subset
+     which does not help us here.  */
+  /* As we know nothing useful about the dynamic type of the decl just
+     use the usual conflict check rather than a subset test.
+     ???  We could introduce -fvery-strict-aliasing when the language
+     does not allow decls to have a dynamic type that differs from their
+     static type.  Then we can check 
+     !alias_set_subset_of (base1_alias_set, base2_alias_set) instead.  */
   if (base1_alias_set != base2_alias_set
-      && !alias_set_subset_of (base1_alias_set, base2_alias_set))
+      && !alias_sets_conflict_p (base1_alias_set, base2_alias_set))
+    return false;
+  /* If the size of the access relevant for TBAA through the pointer
+     is bigger than the size of the decl we can't possibly access the
+     decl via that pointer.  */
+  if (DECL_SIZE (base2) && COMPLETE_TYPE_P (TREE_TYPE (ptrtype1))
+      && TREE_CODE (DECL_SIZE (base2)) == INTEGER_CST
+      && TREE_CODE (TYPE_SIZE (TREE_TYPE (ptrtype1))) == INTEGER_CST
+      /* ???  This in turn may run afoul when a decl of type T which is
+        a member of union type U is accessed through a pointer to
+        type U and sizeof T is smaller than sizeof U.  */
+      && TREE_CODE (TREE_TYPE (ptrtype1)) != UNION_TYPE
+      && TREE_CODE (TREE_TYPE (ptrtype1)) != QUAL_UNION_TYPE
+      && tree_int_cst_lt (DECL_SIZE (base2), TYPE_SIZE (TREE_TYPE (ptrtype1))))
     return false;
 
   /* Do access-path based disambiguation.  */
   if (ref1 && ref2
       && handled_component_p (ref1)
-      && handled_component_p (ref2))
-    return aliasing_component_refs_p (ref1, TREE_TYPE (TREE_TYPE (ptr1)),
+      && handled_component_p (ref2)
+      && (TREE_CODE (base1) != MEM_REF
+         || same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1))
+    return aliasing_component_refs_p (ref1, TREE_TYPE (ptrtype1),
                                      ref1_alias_set, base1_alias_set,
                                      offset1, max_size1,
                                      ref2, TREE_TYPE (base2),
@@ -732,42 +785,65 @@ indirect_ref_may_alias_decl_p (tree ref1, tree ptr1,
    if non-NULL are the complete memory reference trees. */
 
 static bool
-indirect_refs_may_alias_p (tree ref1, tree ptr1,
+indirect_refs_may_alias_p (tree ref1 ATTRIBUTE_UNUSED, tree base1,
                           HOST_WIDE_INT offset1, HOST_WIDE_INT max_size1,
                           alias_set_type ref1_alias_set,
                           alias_set_type base1_alias_set,
-                          tree ref2, tree ptr2,
+                          tree ref2 ATTRIBUTE_UNUSED, tree base2,
                           HOST_WIDE_INT offset2, HOST_WIDE_INT max_size2,
                           alias_set_type ref2_alias_set,
-                          alias_set_type base2_alias_set)
+                          alias_set_type base2_alias_set, bool tbaa_p)
 {
+  tree ptr1 = TREE_OPERAND (base1, 0);
+  tree ptr2 = TREE_OPERAND (base2, 0);
+  tree ptrtype1, ptrtype2;
+
   /* If both bases are based on pointers they cannot alias if they may not
      point to the same memory object or if they point to the same object
      and the accesses do not overlap.  */
   if (operand_equal_p (ptr1, ptr2, 0))
-    return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
+    {
+      if (TREE_CODE (base1) == MEM_REF)
+       offset1 += mem_ref_offset (base1).low * BITS_PER_UNIT;
+      if (TREE_CODE (base2) == MEM_REF)
+       offset2 += mem_ref_offset (base2).low * BITS_PER_UNIT;
+      return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
+    }
   if (!ptr_derefs_may_alias_p (ptr1, ptr2))
     return false;
 
   /* Disambiguations that rely on strict aliasing rules follow.  */
-  if (!flag_strict_aliasing)
+  if (!flag_strict_aliasing || !tbaa_p)
     return true;
 
+  if (TREE_CODE (base1) == MEM_REF)
+    ptrtype1 = TREE_TYPE (TREE_OPERAND (base1, 1));
+  else
+    ptrtype1 = TREE_TYPE (ptr1);
+  if (TREE_CODE (base2) == MEM_REF)
+    ptrtype2 = TREE_TYPE (TREE_OPERAND (base2, 1));
+  else
+    ptrtype2 = TREE_TYPE (ptr2);
+
   /* If the alias set for a pointer access is zero all bets are off.  */
   if (base1_alias_set == -1)
-    base1_alias_set = get_deref_alias_set (ptr1);
+    base1_alias_set = get_deref_alias_set (ptrtype1);
   if (base1_alias_set == 0)
     return true;
   if (base2_alias_set == -1)
-    base2_alias_set = get_deref_alias_set (ptr2);
+    base2_alias_set = get_deref_alias_set (ptrtype2);
   if (base2_alias_set == 0)
     return true;
 
   /* If both references are through the same type, they do not alias
      if the accesses do not overlap.  This does extra disambiguation
      for mixed/pointer accesses but requires strict aliasing.  */
-  if (same_type_for_tbaa (TREE_TYPE (TREE_TYPE (ptr1)),
-                         TREE_TYPE (TREE_TYPE (ptr2))) == 1)
+  if ((TREE_CODE (base1) != MEM_REF
+       || same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1)
+      && (TREE_CODE (base2) != MEM_REF
+         || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1)
+      && same_type_for_tbaa (TREE_TYPE (ptrtype1),
+                            TREE_TYPE (ptrtype2)) == 1)
     return ranges_overlap_p (offset1, max_size1, offset2, max_size2);
 
   /* Do type-based disambiguation.  */
@@ -778,11 +854,15 @@ indirect_refs_may_alias_p (tree ref1, tree ptr1,
   /* Do access-path based disambiguation.  */
   if (ref1 && ref2
       && handled_component_p (ref1)
-      && handled_component_p (ref2))
-    return aliasing_component_refs_p (ref1, TREE_TYPE (TREE_TYPE (ptr1)),
+      && handled_component_p (ref2)
+      && (TREE_CODE (base1) != MEM_REF
+         || same_type_for_tbaa (TREE_TYPE (base1), TREE_TYPE (ptrtype1)) == 1)
+      && (TREE_CODE (base2) != MEM_REF
+         || same_type_for_tbaa (TREE_TYPE (base2), TREE_TYPE (ptrtype2)) == 1))
+    return aliasing_component_refs_p (ref1, TREE_TYPE (ptrtype1),
                                      ref1_alias_set, base1_alias_set,
                                      offset1, max_size1,
-                                     ref2, TREE_TYPE (TREE_TYPE (ptr2)),
+                                     ref2, TREE_TYPE (ptrtype2),
                                      ref2_alias_set, base2_alias_set,
                                      offset2, max_size2, false);
 
@@ -798,19 +878,20 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
   HOST_WIDE_INT offset1 = 0, offset2 = 0;
   HOST_WIDE_INT max_size1 = -1, max_size2 = -1;
   bool var1_p, var2_p, ind1_p, ind2_p;
-  alias_set_type set;
 
   gcc_checking_assert ((!ref1->ref
                        || TREE_CODE (ref1->ref) == SSA_NAME
                        || DECL_P (ref1->ref)
                        || handled_component_p (ref1->ref)
                        || INDIRECT_REF_P (ref1->ref)
+                       || TREE_CODE (ref1->ref) == MEM_REF
                        || TREE_CODE (ref1->ref) == TARGET_MEM_REF)
                       && (!ref2->ref
                           || TREE_CODE (ref2->ref) == SSA_NAME
                           || DECL_P (ref2->ref)
                           || handled_component_p (ref2->ref)
                           || INDIRECT_REF_P (ref2->ref)
+                          || TREE_CODE (ref2->ref) == MEM_REF
                           || TREE_CODE (ref2->ref) == TARGET_MEM_REF));
 
   /* Decompose the references into their base objects and the access.  */
@@ -851,8 +932,9 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
     return decl_refs_may_alias_p (base1, offset1, max_size1,
                                  base2, offset2, max_size2);
 
-  ind1_p = INDIRECT_REF_P (base1);
-  ind2_p = INDIRECT_REF_P (base2);
+  ind1_p = INDIRECT_REF_P (base1) || (TREE_CODE (base1) == MEM_REF);
+  ind2_p = INDIRECT_REF_P (base2) || (TREE_CODE (base2) == MEM_REF);
+
   /* Canonicalize the pointer-vs-decl case.  */
   if (ind1_p && var2_p)
     {
@@ -869,59 +951,6 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
       ind2_p = true;
     }
 
-  /* If we are about to disambiguate pointer-vs-decl try harder to
-     see must-aliases and give leeway to some invalid cases.
-     This covers a pretty minimal set of cases only and does not
-     when called from the RTL oracle.  It handles cases like
-
-       int i = 1;
-       return *(float *)&i;
-
-     and also fixes gfortran.dg/lto/pr40725.  */
-  if (var1_p && ind2_p
-      && cfun
-      && gimple_in_ssa_p (cfun)
-      && TREE_CODE (TREE_OPERAND (base2, 0)) == SSA_NAME)
-    {
-      gimple def_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (base2, 0));
-      while (is_gimple_assign (def_stmt)
-            && (gimple_assign_rhs_code (def_stmt) == SSA_NAME
-                || CONVERT_EXPR_CODE_P (gimple_assign_rhs_code (def_stmt))))
-       {
-         tree rhs = gimple_assign_rhs1 (def_stmt);
-         HOST_WIDE_INT offset, size, max_size;
-
-         /* Look through SSA name copies and pointer conversions.  */
-         if (TREE_CODE (rhs) == SSA_NAME
-             && POINTER_TYPE_P (TREE_TYPE (rhs)))
-           {
-             def_stmt = SSA_NAME_DEF_STMT (rhs);
-             continue;
-           }
-         if (TREE_CODE (rhs) != ADDR_EXPR)
-           break;
-
-         /* If the pointer is defined as an address based on a decl
-            use plain offset disambiguation and ignore TBAA.  */
-         rhs = TREE_OPERAND (rhs, 0);
-         rhs = get_ref_base_and_extent (rhs, &offset, &size, &max_size);
-         if (SSA_VAR_P (rhs))
-           {
-             base2 = rhs;
-             offset2 += offset;
-             if (size != max_size
-                 || max_size == -1)
-               max_size2 = -1;
-             return decl_refs_may_alias_p (base1, offset1, max_size1,
-                                           base2, offset2, max_size2);
-           }
-
-         /* Do not continue looking through &p->x to limit time
-            complexity.  */
-         break;
-       }
-    }
-
   /* First defer to TBAA if possible.  */
   if (tbaa_p
       && flag_strict_aliasing
@@ -937,21 +966,23 @@ refs_may_alias_p_1 (ao_ref *ref1, ao_ref *ref2, bool tbaa_p)
     return true;
 
   /* Dispatch to the pointer-vs-decl or pointer-vs-pointer disambiguators.  */
-  set = tbaa_p ? -1 : 0;
   if (var1_p && ind2_p)
-    return indirect_ref_may_alias_decl_p (ref2->ref, TREE_OPERAND (base2, 0),
+    return indirect_ref_may_alias_decl_p (ref2->ref, base2,
                                          offset2, max_size2,
-                                         ao_ref_alias_set (ref2), set,
+                                         ao_ref_alias_set (ref2), -1,
                                          ref1->ref, base1,
                                          offset1, max_size1,
-                                         ao_ref_alias_set (ref1), set);
+                                         ao_ref_alias_set (ref1),
+                                         ao_ref_base_alias_set (ref1),
+                                         tbaa_p);
   else if (ind1_p && ind2_p)
-    return indirect_refs_may_alias_p (ref1->ref, TREE_OPERAND (base1, 0),
+    return indirect_refs_may_alias_p (ref1->ref, base1,
                                      offset1, max_size1,
-                                     ao_ref_alias_set (ref1), set,
-                                     ref2->ref, TREE_OPERAND (base2, 0),
+                                     ao_ref_alias_set (ref1), -1,
+                                     ref2->ref, base2,
                                      offset2, max_size2,
-                                     ao_ref_alias_set (ref2), set);
+                                     ao_ref_alias_set (ref2), -1,
+                                     tbaa_p);
 
   gcc_unreachable ();
 }
@@ -1110,7 +1141,8 @@ ref_maybe_used_by_call_p_1 (gimple call, ao_ref *ref)
       if (pt_solution_includes (gimple_call_use_set (call), base))
        return true;
     }
-  else if (INDIRECT_REF_P (base)
+  else if ((INDIRECT_REF_P (base)
+           || TREE_CODE (base) == MEM_REF)
           && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
     {
       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
@@ -1281,7 +1313,8 @@ call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
              if (DECL_P (base)
                  && !TREE_STATIC (base))
                return true;
-             else if (INDIRECT_REF_P (base)
+             else if ((INDIRECT_REF_P (base)
+                       || TREE_CODE (base) == MEM_REF)
                       && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME
                       && (pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0))))
                return pi->pt.anything || pi->pt.nonlocal;
@@ -1360,7 +1393,8 @@ call_may_clobber_ref_p_1 (gimple call, ao_ref *ref)
   /* Check if the base variable is call-clobbered.  */
   if (DECL_P (base))
     return pt_solution_includes (gimple_call_clobber_set (call), base);
-  else if (INDIRECT_REF_P (base)
+  else if ((INDIRECT_REF_P (base)
+           || TREE_CODE (base) == MEM_REF)
           && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
     {
       struct ptr_info_def *pi = SSA_NAME_PTR_INFO (TREE_OPERAND (base, 0));
index be4509c237c255e5037fd79809e7228c9f510e71..912c5059363aa9d287dda98bc4b0ecbf20a5179d 100644 (file)
@@ -896,20 +896,22 @@ ccp_fold (gimple stmt)
                  base = &TREE_OPERAND (rhs, 0);
                  while (handled_component_p (*base))
                    base = &TREE_OPERAND (*base, 0);
-                 if (TREE_CODE (*base) == INDIRECT_REF
+                 if (TREE_CODE (*base) == MEM_REF
                      && TREE_CODE (TREE_OPERAND (*base, 0)) == SSA_NAME)
                    {
                      prop_value_t *val = get_value (TREE_OPERAND (*base, 0));
                      if (val->lattice_val == CONSTANT
-                         && TREE_CODE (val->value) == ADDR_EXPR
-                         && may_propagate_address_into_dereference
-                              (val->value, *base))
+                         && TREE_CODE (val->value) == ADDR_EXPR)
                        {
+                         tree ret, save = *base;
+                         tree new_base;
+                         new_base = fold_build2 (MEM_REF, TREE_TYPE (*base),
+                                                 unshare_expr (val->value),
+                                                 TREE_OPERAND (*base, 1));
                          /* We need to return a new tree, not modify the IL
                             or share parts of it.  So play some tricks to
                             avoid manually building it.  */
-                         tree ret, save = *base;
-                         *base = TREE_OPERAND (val->value, 0);
+                         *base = new_base;
                          ret = unshare_expr (rhs);
                          recompute_tree_invariant_for_addr_expr (ret);
                          *base = save;
@@ -955,15 +957,19 @@ ccp_fold (gimple stmt)
                                           TREE_CODE (rhs),
                                           TREE_TYPE (rhs), val->value);
                    }
-                 else if (TREE_CODE (rhs) == INDIRECT_REF
+                 else if (TREE_CODE (rhs) == MEM_REF
                           && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME)
                    {
                      prop_value_t *val = get_value (TREE_OPERAND (rhs, 0));
                      if (val->lattice_val == CONSTANT
-                         && TREE_CODE (val->value) == ADDR_EXPR
-                         && useless_type_conversion_p (TREE_TYPE (rhs),
-                                                       TREE_TYPE (TREE_TYPE (val->value))))
-                       rhs = TREE_OPERAND (val->value, 0);
+                         && TREE_CODE (val->value) == ADDR_EXPR)
+                       {
+                         tree tem = fold_build2 (MEM_REF, TREE_TYPE (rhs),
+                                                 unshare_expr (val->value),
+                                                 TREE_OPERAND (rhs, 1));
+                         if (tem)
+                           rhs = tem;
+                       }
                    }
                  return fold_const_aggregate_ref (rhs);
                }
@@ -987,16 +993,10 @@ ccp_fold (gimple stmt)
                 allowed places.  */
              if (CONVERT_EXPR_CODE_P (subcode)
                  && POINTER_TYPE_P (TREE_TYPE (lhs))
-                 && POINTER_TYPE_P (TREE_TYPE (op0))
-                 /* Do not allow differences in volatile qualification
-                    as this might get us confused as to whether a
-                    propagation destination statement is volatile
-                    or not.  See PR36988.  */
-                 && (TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (lhs)))
-                     == TYPE_VOLATILE (TREE_TYPE (TREE_TYPE (op0)))))
+                 && POINTER_TYPE_P (TREE_TYPE (op0)))
                {
                  tree tem;
-                 /* Still try to generate a constant of correct type.  */
+                 /* Try to re-construct array references on-the-fly.  */
                  if (!useless_type_conversion_p (TREE_TYPE (lhs),
                                                  TREE_TYPE (op0))
                      && ((tem = maybe_fold_offset_to_address
@@ -1018,19 +1018,21 @@ ccp_fold (gimple stmt)
               tree op0 = get_rhs_assign_op_for_ccp (stmt, 1);
               tree op1 = get_rhs_assign_op_for_ccp (stmt, 2);
 
-             /* Fold &foo + CST into an invariant reference if possible.  */
+             /* Translate &x + CST into an invariant form suitable for
+                further propagation.  */
              if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
                  && TREE_CODE (op0) == ADDR_EXPR
                  && TREE_CODE (op1) == INTEGER_CST)
                {
-                 tree tem = maybe_fold_offset_to_address
-                   (loc, op0, op1, TREE_TYPE (op0));
-                 if (tem != NULL_TREE)
-                   return tem;
+                 tree off = fold_convert (ptr_type_node, op1);
+                 return build_fold_addr_expr
+                          (fold_build2 (MEM_REF,
+                                        TREE_TYPE (TREE_TYPE (op0)),
+                                        unshare_expr (op0), off));
                }
 
               return fold_binary_loc (loc, subcode,
-                                 gimple_expr_type (stmt), op0, op1);
+                                     gimple_expr_type (stmt), op0, op1);
             }
 
           case GIMPLE_TERNARY_RHS:
@@ -1299,18 +1301,97 @@ fold_const_aggregate_ref (tree t)
        break;
       }
 
-    case INDIRECT_REF:
-      {
-       tree base = TREE_OPERAND (t, 0);
-       if (TREE_CODE (base) == SSA_NAME
-           && (value = get_value (base))
-           && value->lattice_val == CONSTANT
-           && TREE_CODE (value->value) == ADDR_EXPR
-           && useless_type_conversion_p (TREE_TYPE (t),
-                                         TREE_TYPE (TREE_TYPE (value->value))))
-         return fold_const_aggregate_ref (TREE_OPERAND (value->value, 0));
-       break;
-      }
+    case MEM_REF:
+      /* Get the base object we are accessing.  */
+      base = TREE_OPERAND (t, 0);
+      if (TREE_CODE (base) == SSA_NAME
+         && (value = get_value (base))
+         && value->lattice_val == CONSTANT)
+       base = value->value;
+      if (TREE_CODE (base) != ADDR_EXPR)
+       return NULL_TREE;
+      base = TREE_OPERAND (base, 0);
+      switch (TREE_CODE (base))
+       {
+       case VAR_DECL:
+         if (DECL_P (base)
+             && !AGGREGATE_TYPE_P (TREE_TYPE (base))
+             && integer_zerop (TREE_OPERAND (t, 1)))
+           return get_symbol_constant_value (base);
+
+         if (!TREE_READONLY (base)
+             || TREE_CODE (TREE_TYPE (base)) != ARRAY_TYPE
+             || !targetm.binds_local_p (base))
+           return NULL_TREE;
+
+         ctor = DECL_INITIAL (base);
+         break;
+
+       case STRING_CST:
+       case CONSTRUCTOR:
+         ctor = base;
+         break;
+
+       default:
+         return NULL_TREE;
+       }
+
+      if (ctor == NULL_TREE
+         || (TREE_CODE (ctor) != CONSTRUCTOR
+             && TREE_CODE (ctor) != STRING_CST)
+         || !TREE_STATIC (ctor))
+       return NULL_TREE;
+
+      /* Get the byte offset.  */
+      idx = TREE_OPERAND (t, 1);
+
+      /* Fold read from constant string.  */
+      if (TREE_CODE (ctor) == STRING_CST)
+       {
+         if ((TYPE_MODE (TREE_TYPE (t))
+              == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+             && (GET_MODE_CLASS (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+                 == MODE_INT)
+             && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor)))) == 1
+             && compare_tree_int (idx, TREE_STRING_LENGTH (ctor)) < 0)
+           return build_int_cst_type (TREE_TYPE (t),
+                                      (TREE_STRING_POINTER (ctor)
+                                       [TREE_INT_CST_LOW (idx)]));
+         return NULL_TREE;
+       }
+
+      /* ???  Implement byte-offset indexing into a non-array CONSTRUCTOR.  */
+      if (TREE_CODE (TREE_TYPE (ctor)) == ARRAY_TYPE
+         && (TYPE_MODE (TREE_TYPE (t))
+             == TYPE_MODE (TREE_TYPE (TREE_TYPE (ctor))))
+         && GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t))) != 0
+         && integer_zerop
+              (int_const_binop
+                 (TRUNC_MOD_EXPR, idx,
+                  size_int (GET_MODE_SIZE (TYPE_MODE (TREE_TYPE (t)))), 0)))
+       {
+         idx = int_const_binop (TRUNC_DIV_EXPR, idx,
+                                size_int (GET_MODE_SIZE
+                                            (TYPE_MODE (TREE_TYPE (t)))), 0);
+         FOR_EACH_CONSTRUCTOR_ELT (CONSTRUCTOR_ELTS (ctor), cnt, cfield, cval)
+           if (tree_int_cst_equal (cfield, idx))
+             {
+               STRIP_NOPS (cval);
+               if (TREE_CODE (cval) == ADDR_EXPR)
+                 {
+                   tree base = get_base_address (TREE_OPERAND (cval, 0));
+                   if (base && TREE_CODE (base) == VAR_DECL)
+                     add_referenced_var (base);
+                 }
+               if (useless_type_conversion_p (TREE_TYPE (t), TREE_TYPE (cval)))
+                 return cval;
+               else if (CONSTANT_CLASS_P (cval))
+                 return fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (t), cval);
+               else
+                 return NULL_TREE;
+             }
+       }
+      break;
 
     default:
       break;
@@ -1498,7 +1579,7 @@ ccp_fold_stmt (gimple_stmt_iterator *gsi)
          {
            tree rhs = unshare_expr (val->value);
            if (!useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (rhs)))
-             rhs = fold_convert (TREE_TYPE (lhs), rhs);
+             rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (lhs), rhs);
            gimple_assign_set_rhs_from_tree (gsi, rhs);
            return true;
          }
index cfbc26a7ee73b3ac1d4f2d86635b12504646ebae..edec49d374ffbcd85f7ae0cd78dd8a2f4a32455e 100644 (file)
@@ -499,6 +499,9 @@ ref_may_be_aliased (tree ref)
 {
   while (handled_component_p (ref))
     ref = TREE_OPERAND (ref, 0);
+  if (TREE_CODE (ref) == MEM_REF
+      && TREE_CODE (TREE_OPERAND (ref, 0)) == ADDR_EXPR)
+    ref = TREE_OPERAND (TREE_OPERAND (ref, 0), 0);
   return !(DECL_P (ref)
           && !may_be_aliased (ref));
 }
index eb6c831f7c8e98625275d18656da1ed00c6f7341..5044afffcd9d1655bf999f0df4cb7a455a089dd4 100644 (file)
@@ -628,9 +628,14 @@ forward_propagate_addr_into_variable_array_index (tree offset,
 {
   tree index, tunit;
   gimple offset_def, use_stmt = gsi_stmt (*use_stmt_gsi);
-  tree tmp;
+  tree new_rhs, tmp;
 
-  tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
+  if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
+    tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (def_rhs)));
+  else if (TREE_CODE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))) == ARRAY_TYPE)
+    tunit = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))));
+  else
+    return false;
   if (!host_integerp (tunit, 1))
     return false;
 
@@ -697,10 +702,28 @@ forward_propagate_addr_into_variable_array_index (tree offset,
   /* Replace the pointer addition with array indexing.  */
   index = force_gimple_operand_gsi (use_stmt_gsi, index, true, NULL_TREE,
                                    true, GSI_SAME_STMT);
-  gimple_assign_set_rhs_from_tree (use_stmt_gsi, unshare_expr (def_rhs));
+  if (TREE_CODE (TREE_OPERAND (def_rhs, 0)) == ARRAY_REF)
+    {
+      new_rhs = unshare_expr (def_rhs);
+      TREE_OPERAND (TREE_OPERAND (new_rhs, 0), 1) = index;
+    }
+  else
+    {
+      new_rhs = build4 (ARRAY_REF, TREE_TYPE (TREE_TYPE (TREE_TYPE (def_rhs))),
+                       unshare_expr (TREE_OPERAND (def_rhs, 0)),
+                       index, integer_zero_node, NULL_TREE);
+      new_rhs = build_fold_addr_expr (new_rhs);
+      if (!useless_type_conversion_p (TREE_TYPE (gimple_assign_lhs (use_stmt)),
+                                     TREE_TYPE (new_rhs)))
+       {
+         new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true,
+                                             NULL_TREE, true, GSI_SAME_STMT);
+         new_rhs = fold_convert (TREE_TYPE (gimple_assign_lhs (use_stmt)),
+                                 new_rhs);
+       }
+    }
+  gimple_assign_set_rhs_from_tree (use_stmt_gsi, new_rhs);
   use_stmt = gsi_stmt (*use_stmt_gsi);
-  TREE_OPERAND (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0), 1)
-    = index;
 
   /* That should have created gimple, so there is no need to
      record information to undo the propagation.  */
@@ -725,11 +748,9 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
                               bool single_use_p)
 {
   tree lhs, rhs, rhs2, array_ref;
-  tree *rhsp, *lhsp;
   gimple use_stmt = gsi_stmt (*use_stmt_gsi);
   enum tree_code rhs_code;
   bool res = true;
-  bool addr_p = false;
 
   gcc_assert (TREE_CODE (def_rhs) == ADDR_EXPR);
 
@@ -767,31 +788,120 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
       return true;
     }
 
+  /* Propagate through constant pointer adjustments.  */
+  if (TREE_CODE (lhs) == SSA_NAME
+      && rhs_code == POINTER_PLUS_EXPR
+      && rhs == name
+      && TREE_CODE (gimple_assign_rhs2 (use_stmt)) == INTEGER_CST)
+    {
+      tree new_def_rhs;
+      /* As we come here with non-invariant addresses in def_rhs we need
+         to make sure we can build a valid constant offsetted address
+        for further propagation.  Simply rely on fold building that
+        and check after the fact.  */
+      new_def_rhs = fold_build2 (MEM_REF, TREE_TYPE (TREE_TYPE (rhs)),
+                                def_rhs,
+                                fold_convert (ptr_type_node,
+                                              gimple_assign_rhs2 (use_stmt)));
+      if (TREE_CODE (new_def_rhs) == MEM_REF
+         && TREE_CODE (TREE_OPERAND (new_def_rhs, 0)) == ADDR_EXPR
+         && !DECL_P (TREE_OPERAND (TREE_OPERAND (new_def_rhs, 0), 0))
+         && !CONSTANT_CLASS_P (TREE_OPERAND (TREE_OPERAND (new_def_rhs, 0), 0)))
+       return false;
+      new_def_rhs = build_fold_addr_expr_with_type (new_def_rhs,
+                                                   TREE_TYPE (rhs));
+
+      /* Recurse.  If we could propagate into all uses of lhs do not
+        bother to replace into the current use but just pretend we did.  */
+      if (TREE_CODE (new_def_rhs) == ADDR_EXPR
+         && forward_propagate_addr_expr (lhs, new_def_rhs))
+       return true;
+
+      if (useless_type_conversion_p (TREE_TYPE (lhs), TREE_TYPE (new_def_rhs)))
+       gimple_assign_set_rhs_with_ops (use_stmt_gsi, TREE_CODE (new_def_rhs),
+                                       new_def_rhs, NULL_TREE);
+      else if (is_gimple_min_invariant (new_def_rhs))
+       gimple_assign_set_rhs_with_ops (use_stmt_gsi, NOP_EXPR,
+                                       new_def_rhs, NULL_TREE);
+      else
+       return false;
+      gcc_assert (gsi_stmt (*use_stmt_gsi) == use_stmt);
+      update_stmt (use_stmt);
+      return true;
+    }
+
   /* Now strip away any outer COMPONENT_REF/ARRAY_REF nodes from the LHS.
      ADDR_EXPR will not appear on the LHS.  */
-  lhsp = gimple_assign_lhs_ptr (use_stmt);
-  while (handled_component_p (*lhsp))
-    lhsp = &TREE_OPERAND (*lhsp, 0);
-  lhs = *lhsp;
+  lhs = gimple_assign_lhs (use_stmt);
+  while (handled_component_p (lhs))
+    lhs = TREE_OPERAND (lhs, 0);
 
-  /* Now see if the LHS node is an INDIRECT_REF using NAME.  If so,
+  /* Now see if the LHS node is a MEM_REF using NAME.  If so,
      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
-  if (TREE_CODE (lhs) == INDIRECT_REF
+  if (TREE_CODE (lhs) == MEM_REF
       && TREE_OPERAND (lhs, 0) == name)
     {
-      if (may_propagate_address_into_dereference (def_rhs, lhs)
-         && (lhsp != gimple_assign_lhs_ptr (use_stmt)
-             || useless_type_conversion_p
-                  (TREE_TYPE (TREE_OPERAND (def_rhs, 0)), TREE_TYPE (rhs))))
+      tree def_rhs_base;
+      HOST_WIDE_INT def_rhs_offset;
+      /* If the address is invariant we can always fold it.  */
+      if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
+                                                        &def_rhs_offset)))
        {
-         *lhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
-         fold_stmt_inplace (use_stmt);
+         double_int off = mem_ref_offset (lhs);
+         tree new_ptr;
+         off = double_int_add (off,
+                               shwi_to_double_int (def_rhs_offset));
+         if (TREE_CODE (def_rhs_base) == MEM_REF)
+           {
+             off = double_int_add (off, mem_ref_offset (def_rhs_base));
+             new_ptr = TREE_OPERAND (def_rhs_base, 0);
+           }
+         else
+           new_ptr = build_fold_addr_expr (def_rhs_base);
+         TREE_OPERAND (lhs, 0) = new_ptr;
+         TREE_OPERAND (lhs, 1)
+           = double_int_to_tree (TREE_TYPE (TREE_OPERAND (lhs, 1)), off);
          tidy_after_forward_propagate_addr (use_stmt);
-
          /* Continue propagating into the RHS if this was not the only use.  */
          if (single_use_p)
            return true;
        }
+      /* If the LHS is a plain dereference and the value type is the same as
+         that of the pointed-to type of the address we can put the
+        dereferenced address on the LHS preserving the original alias-type.  */
+      else if (gimple_assign_lhs (use_stmt) == lhs
+              && useless_type_conversion_p
+                   (TREE_TYPE (TREE_OPERAND (def_rhs, 0)),
+                    TREE_TYPE (gimple_assign_rhs1 (use_stmt))))
+       {
+         tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
+         tree new_offset, new_base, saved;
+         while (handled_component_p (*def_rhs_basep))
+           def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
+         saved = *def_rhs_basep;
+         if (TREE_CODE (*def_rhs_basep) == MEM_REF)
+           {
+             new_base = TREE_OPERAND (*def_rhs_basep, 0);
+             new_offset
+               = int_const_binop (PLUS_EXPR, TREE_OPERAND (lhs, 1),
+                                  TREE_OPERAND (*def_rhs_basep, 1), 0);
+           }
+         else
+           {
+             new_base = build_fold_addr_expr (*def_rhs_basep);
+             new_offset = TREE_OPERAND (lhs, 1);
+           }
+         *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
+                                  new_base, new_offset);
+         gimple_assign_set_lhs (use_stmt,
+                                unshare_expr (TREE_OPERAND (def_rhs, 0)));
+         *def_rhs_basep = saved;
+         tidy_after_forward_propagate_addr (use_stmt);
+         /* Continue propagating into the RHS if this was not the
+            only use.  */
+         if (single_use_p)
+           return true;
+       }
       else
        /* We can have a struct assignment dereferencing our name twice.
           Note that we didn't propagate into the lhs to not falsely
@@ -801,79 +911,76 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
 
   /* Strip away any outer COMPONENT_REF, ARRAY_REF or ADDR_EXPR
      nodes from the RHS.  */
-  rhsp = gimple_assign_rhs1_ptr (use_stmt);
-  if (TREE_CODE (*rhsp) == ADDR_EXPR)
-    {
-      rhsp = &TREE_OPERAND (*rhsp, 0);
-      addr_p = true;
-    }
-  while (handled_component_p (*rhsp))
-    rhsp = &TREE_OPERAND (*rhsp, 0);
-  rhs = *rhsp;
+  rhs = gimple_assign_rhs1 (use_stmt);
+  if (TREE_CODE (rhs) == ADDR_EXPR)
+    rhs = TREE_OPERAND (rhs, 0);
+  while (handled_component_p (rhs))
+    rhs = TREE_OPERAND (rhs, 0);
 
-  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so,
+  /* Now see if the RHS node is a MEM_REF using NAME.  If so,
      propagate the ADDR_EXPR into the use of NAME and fold the result.  */
-  if (TREE_CODE (rhs) == INDIRECT_REF
-      && TREE_OPERAND (rhs, 0) == name
-      && may_propagate_address_into_dereference (def_rhs, rhs))
+  if (TREE_CODE (rhs) == MEM_REF
+      && TREE_OPERAND (rhs, 0) == name)
     {
-      *rhsp = unshare_expr (TREE_OPERAND (def_rhs, 0));
-      fold_stmt_inplace (use_stmt);
-      tidy_after_forward_propagate_addr (use_stmt);
-      return res;
+      tree def_rhs_base;
+      HOST_WIDE_INT def_rhs_offset;
+      if ((def_rhs_base = get_addr_base_and_unit_offset (TREE_OPERAND (def_rhs, 0),
+                                                        &def_rhs_offset)))
+       {
+         double_int off = mem_ref_offset (rhs);
+         tree new_ptr;
+         off = double_int_add (off,
+                               shwi_to_double_int (def_rhs_offset));
+         if (TREE_CODE (def_rhs_base) == MEM_REF)
+           {
+             off = double_int_add (off, mem_ref_offset (def_rhs_base));
+             new_ptr = TREE_OPERAND (def_rhs_base, 0);
+           }
+         else
+           new_ptr = build_fold_addr_expr (def_rhs_base);
+         TREE_OPERAND (rhs, 0) = new_ptr;
+         TREE_OPERAND (rhs, 1)
+           = double_int_to_tree (TREE_TYPE (TREE_OPERAND (rhs, 1)), off);
+         fold_stmt_inplace (use_stmt);
+         tidy_after_forward_propagate_addr (use_stmt);
+         return res;
+       }
+      /* If the LHS is a plain dereference and the value type is the same as
+         that of the pointed-to type of the address we can put the
+        dereferenced address on the LHS preserving the original alias-type.  */
+      else if (gimple_assign_rhs1 (use_stmt) == rhs
+              && useless_type_conversion_p
+                   (TREE_TYPE (gimple_assign_lhs (use_stmt)),
+                    TREE_TYPE (TREE_OPERAND (def_rhs, 0))))
+       {
+         tree *def_rhs_basep = &TREE_OPERAND (def_rhs, 0);
+         tree new_offset, new_base, saved;
+         while (handled_component_p (*def_rhs_basep))
+           def_rhs_basep = &TREE_OPERAND (*def_rhs_basep, 0);
+         saved = *def_rhs_basep;
+         if (TREE_CODE (*def_rhs_basep) == MEM_REF)
+           {
+             new_base = TREE_OPERAND (*def_rhs_basep, 0);
+             new_offset
+               = int_const_binop (PLUS_EXPR, TREE_OPERAND (rhs, 1),
+                                  TREE_OPERAND (*def_rhs_basep, 1), 0);
+           }
+         else
+           {
+             new_base = build_fold_addr_expr (*def_rhs_basep);
+             new_offset = TREE_OPERAND (rhs, 1);
+           }
+         *def_rhs_basep = build2 (MEM_REF, TREE_TYPE (*def_rhs_basep),
+                                  new_base, new_offset);
+         gimple_assign_set_rhs1 (use_stmt,
+                                 unshare_expr (TREE_OPERAND (def_rhs, 0)));
+         *def_rhs_basep = saved;
+         fold_stmt_inplace (use_stmt);
+         tidy_after_forward_propagate_addr (use_stmt);
+         return res;
+       }
     }
 
-  /* Now see if the RHS node is an INDIRECT_REF using NAME.  If so,
-     propagate the ADDR_EXPR into the use of NAME and try to
-     create a VCE and fold the result.  */
-  if (TREE_CODE (rhs) == INDIRECT_REF
-      && TREE_OPERAND (rhs, 0) == name
-      && TYPE_SIZE (TREE_TYPE (rhs))
-      && TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
-      /* Function decls should not be used for VCE either as it could be a
-         function descriptor that we want and not the actual function code.  */
-      && TREE_CODE (TREE_OPERAND (def_rhs, 0)) != FUNCTION_DECL
-      /* We should not convert volatile loads to non volatile loads. */
-      && !TYPE_VOLATILE (TREE_TYPE (rhs))
-      && !TYPE_VOLATILE (TREE_TYPE (TREE_OPERAND (def_rhs, 0)))
-      && operand_equal_p (TYPE_SIZE (TREE_TYPE (rhs)),
-                         TYPE_SIZE (TREE_TYPE (TREE_OPERAND (def_rhs, 0))), 0)
-      /* Make sure we only do TBAA compatible replacements.  */
-      && get_alias_set (TREE_OPERAND (def_rhs, 0)) == get_alias_set (rhs))
-   {
-     tree def_rhs_base, new_rhs = unshare_expr (TREE_OPERAND (def_rhs, 0));
-     new_rhs = fold_build1 (VIEW_CONVERT_EXPR, TREE_TYPE (rhs), new_rhs);
-     if (TREE_CODE (new_rhs) != VIEW_CONVERT_EXPR)
-       {
-        /* If we have folded the VIEW_CONVERT_EXPR then the result is only
-           valid if we can replace the whole rhs of the use statement.  */
-        if (rhs != gimple_assign_rhs1 (use_stmt))
-          return false;
-        new_rhs = force_gimple_operand_gsi (use_stmt_gsi, new_rhs, true, NULL,
-                                            true, GSI_NEW_STMT);
-        gimple_assign_set_rhs1 (use_stmt, new_rhs);
-        tidy_after_forward_propagate_addr (use_stmt);
-        return res;
-       }
-     /* If the defining rhs comes from an indirect reference, then do not
-        convert into a VIEW_CONVERT_EXPR.  Likewise if we'll end up taking
-       the address of a V_C_E of a constant.  */
-     def_rhs_base = TREE_OPERAND (def_rhs, 0);
-     while (handled_component_p (def_rhs_base))
-       def_rhs_base = TREE_OPERAND (def_rhs_base, 0);
-     if (!INDIRECT_REF_P (def_rhs_base)
-        && (!addr_p
-            || !is_gimple_min_invariant (def_rhs)))
-       {
-        /* We may have arbitrary VIEW_CONVERT_EXPRs in a nested component
-           reference.  Place it there and fold the thing.  */
-        *rhsp = new_rhs;
-        fold_stmt_inplace (use_stmt);
-        tidy_after_forward_propagate_addr (use_stmt);
-        return res;
-       }
-   }
-
   /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
      is nothing to do. */
   if (gimple_assign_rhs_code (use_stmt) != POINTER_PLUS_EXPR
@@ -885,9 +992,10 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
      element zero in an array.  If that is not the case then there
      is nothing to do.  */
   array_ref = TREE_OPERAND (def_rhs, 0);
-  if (TREE_CODE (array_ref) != ARRAY_REF
-      || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
-      || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
+  if ((TREE_CODE (array_ref) != ARRAY_REF
+       || TREE_CODE (TREE_TYPE (TREE_OPERAND (array_ref, 0))) != ARRAY_TYPE
+       || TREE_CODE (TREE_OPERAND (array_ref, 1)) != INTEGER_CST)
+      && TREE_CODE (TREE_TYPE (array_ref)) != ARRAY_TYPE)
     return false;
 
   rhs2 = gimple_assign_rhs2 (use_stmt);
@@ -923,7 +1031,8 @@ forward_propagate_addr_expr_1 (tree name, tree def_rhs,
      array elements, then the result is converted into the proper
      type for the arithmetic.  */
   if (TREE_CODE (rhs2) == SSA_NAME
-      && integer_zerop (TREE_OPERAND (array_ref, 1))
+      && (TREE_CODE (array_ref) != ARRAY_REF
+         || integer_zerop (TREE_OPERAND (array_ref, 1)))
       && useless_type_conversion_p (TREE_TYPE (name), TREE_TYPE (def_rhs))
       /* Avoid problems with IVopts creating PLUS_EXPRs with a
         different type than their operands.  */
@@ -1300,13 +1409,35 @@ tree_ssa_forward_propagate_single_use_vars (void)
                  else
                    gsi_next (&gsi);
                }
-             else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR
-                      && is_gimple_min_invariant (rhs))
+             else if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
                {
-                 /* Make sure to fold &a[0] + off_1 here.  */
-                 fold_stmt_inplace (stmt);
-                 update_stmt (stmt);
-                 if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+                 if (TREE_CODE (gimple_assign_rhs2 (stmt)) == INTEGER_CST
+                     /* ???  Better adjust the interface to that function
+                        instead of building new trees here.  */
+                     && forward_propagate_addr_expr
+                          (lhs,
+                           build1 (ADDR_EXPR,
+                                   TREE_TYPE (rhs),
+                                   fold_build2 (MEM_REF,
+                                                TREE_TYPE (TREE_TYPE (rhs)),
+                                                rhs,
+                                                fold_convert
+                                                  (ptr_type_node,
+                                                   gimple_assign_rhs2 (stmt))))))
+                   {
+                     release_defs (stmt);
+                     todoflags |= TODO_remove_unused_locals;
+                     gsi_remove (&gsi, true);
+                   }
+                 else if (is_gimple_min_invariant (rhs))
+                   {
+                     /* Make sure to fold &a[0] + off_1 here.  */
+                     fold_stmt_inplace (stmt);
+                     update_stmt (stmt);
+                     if (gimple_assign_rhs_code (stmt) == POINTER_PLUS_EXPR)
+                       gsi_next (&gsi);
+                   }
+                 else
                    gsi_next (&gsi);
                }
              else if ((gimple_assign_rhs_code (stmt) == BIT_NOT_EXPR
index ce8bc34ebbac84a68a15656a9ff31a3bca76dee0..6aaeaa09b5977a493d6c91f01c11177e4d071710 100644 (file)
@@ -274,7 +274,7 @@ for_each_index (tree *addr_p, bool (*cbck) (tree, tree *, void *), void *data)
 
        case MISALIGNED_INDIRECT_REF:
        case ALIGN_INDIRECT_REF:
-       case INDIRECT_REF:
+       case MEM_REF:
          nxt = &TREE_OPERAND (*addr_p, 0);
          return cbck (*addr_p, nxt, data);
 
@@ -1985,11 +1985,15 @@ gen_lsm_tmp_name (tree ref)
     {
     case MISALIGNED_INDIRECT_REF:
     case ALIGN_INDIRECT_REF:
-    case INDIRECT_REF:
+    case MEM_REF:
       gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
       lsm_tmp_name_add ("_");
       break;
 
+    case ADDR_EXPR:
+      gen_lsm_tmp_name (TREE_OPERAND (ref, 0));
+      break;
+
     case BIT_FIELD_REF:
     case VIEW_CONVERT_EXPR:
     case ARRAY_RANGE_REF:
@@ -2150,7 +2154,8 @@ ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p)
   tree base;
 
   base = get_base_address (ref->mem);
-  if (INDIRECT_REF_P (base))
+  if (INDIRECT_REF_P (base)
+      || TREE_CODE (base) == MEM_REF)
     base = TREE_OPERAND (base, 0);
 
   get_all_locs_in_loop (loop, ref, &locs);
@@ -2169,7 +2174,8 @@ ref_always_accessed_p (struct loop *loop, mem_ref_p ref, bool stored_p)
          lhs = get_base_address (gimple_get_lhs (loc->stmt));
          if (!lhs)
            continue;
-         if (INDIRECT_REF_P (lhs))
+         if (INDIRECT_REF_P (lhs)
+             || TREE_CODE (lhs) == MEM_REF)
            lhs = TREE_OPERAND (lhs, 0);
          if (lhs != base)
            continue;
index 32b8935265388a3697aea55ff0fd5a39e3cb6f84..46356588c94b046db0d92de3608afdedcf1c5ba0 100644 (file)
@@ -813,7 +813,7 @@ determine_base_object (tree expr)
       if (!base)
        return expr;
 
-      if (TREE_CODE (base) == INDIRECT_REF)
+      if (TREE_CODE (base) == MEM_REF)
        return determine_base_object (TREE_OPERAND (base, 0));
 
       return fold_convert (ptr_type_node,
@@ -1694,9 +1694,11 @@ find_interesting_uses_address (struct ivopts_data *data, gimple stmt, tree *op_p
          tree *ref = &TREE_OPERAND (base, 0);
          while (handled_component_p (*ref))
            ref = &TREE_OPERAND (*ref, 0);
-         if (TREE_CODE (*ref) == INDIRECT_REF)
+         if (TREE_CODE (*ref) == MEM_REF)
            {
-             tree tem = gimple_fold_indirect_ref (TREE_OPERAND (*ref, 0));
+             tree tem = fold_binary (MEM_REF, TREE_TYPE (*ref),
+                                     TREE_OPERAND (*ref, 0),
+                                     TREE_OPERAND (*ref, 1));
              if (tem)
                *ref = tem;
            }
@@ -2018,7 +2020,8 @@ strip_offset_1 (tree expr, bool inside_addr, bool top_compref,
       expr = build_fold_addr_expr (op0);
       return fold_convert (orig_type, expr);
 
-    case INDIRECT_REF:
+    case MEM_REF:
+      /* ???  Offset operand?  */
       inside_addr = false;
       break;
 
@@ -3889,7 +3892,7 @@ fallback:
       return infinite_cost;
 
     if (address_p)
-      comp = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (comp)), comp);
+      comp = build_simple_mem_ref (comp);
 
     return new_cost (computation_cost (comp, speed), 0);
   }
index c0c89113d41496a425c77165d345131e896717fb..9956cc551b4153b2454e2303943353d682f2be9c 100644 (file)
@@ -2625,7 +2625,7 @@ array_at_struct_end_p (tree ref)
 
   /* Unless the reference is through a pointer, the size of the array matches
      its declaration.  */
-  if (!base || !INDIRECT_REF_P (base))
+  if (!base || (!INDIRECT_REF_P (base) && TREE_CODE (base) != MEM_REF))
     return false;
 
   for (;handled_component_p (ref); ref = parent)
@@ -2651,7 +2651,6 @@ array_at_struct_end_p (tree ref)
         Therefore, continue checking.  */
     }
 
-  gcc_assert (INDIRECT_REF_P (ref));
   return true;
 }
 
index b784a68b18eeeaec2eaf23f03af782439ccbdca9..fdc906a8b1ba3a390cdd8e658344d774c78e23ab 100644 (file)
@@ -127,6 +127,12 @@ static struct
    clobbering sites like function calls or ASM_EXPRs.  */
 #define opf_implicit   (1 << 2)
 
+/* Operand is in a place where address-taken does not imply addressable.  */
+#define opf_non_addressable (1 << 3)
+
+/* Operand is in a place where opf_non_addressable does not apply.  */
+#define opf_not_non_addressable (1 << 4)
+
 /* Array for building all the def operands.  */
 static VEC(tree,heap) *build_defs;
 
@@ -693,15 +699,22 @@ mark_address_taken (tree ref)
      be referenced using pointer arithmetic.  See PR 21407 and the
      ensuing mailing list discussion.  */
   var = get_base_address (ref);
-  if (var && DECL_P (var))
-    TREE_ADDRESSABLE (var) = 1;
+  if (var)
+    {
+      if (DECL_P (var))
+       TREE_ADDRESSABLE (var) = 1;
+      else if (TREE_CODE (var) == MEM_REF
+              && TREE_CODE (TREE_OPERAND (var, 0)) == ADDR_EXPR
+              && DECL_P (TREE_OPERAND (TREE_OPERAND (var, 0), 0)))
+       TREE_ADDRESSABLE (TREE_OPERAND (TREE_OPERAND (var, 0), 0)) = 1;
+    }
 }
 
 
-/* A subroutine of get_expr_operands to handle INDIRECT_REF,
+/* A subroutine of get_expr_operands to handle MEM_REF,
    ALIGN_INDIRECT_REF and MISALIGNED_INDIRECT_REF.
 
-   STMT is the statement being processed, EXPR is the INDIRECT_REF
+   STMT is the statement being processed, EXPR is the MEM_REF
       that got us here.
 
    FLAGS is as in get_expr_operands.
@@ -725,7 +738,8 @@ get_indirect_ref_operands (gimple stmt, tree expr, int flags,
   /* If requested, add a USE operand for the base pointer.  */
   if (recurse_on_base)
     get_expr_operands (stmt, pptr,
-                      opf_use | (flags & opf_no_vops));
+                      opf_non_addressable | opf_use
+                      | (flags & (opf_no_vops|opf_not_non_addressable)));
 }
 
 
@@ -802,7 +816,7 @@ get_asm_expr_operands (gimple stmt)
       if (!allows_reg && allows_mem)
        mark_address_taken (TREE_VALUE (link));
 
-      get_expr_operands (stmt, &TREE_VALUE (link), opf_def);
+      get_expr_operands (stmt, &TREE_VALUE (link), opf_def | opf_not_non_addressable);
     }
 
   /* Gather all input operands.  */
@@ -818,7 +832,7 @@ get_asm_expr_operands (gimple stmt)
       if (!allows_reg && allows_mem)
        mark_address_taken (TREE_VALUE (link));
 
-      get_expr_operands (stmt, &TREE_VALUE (link), 0);
+      get_expr_operands (stmt, &TREE_VALUE (link), opf_not_non_addressable);
     }
 
   /* Clobber all memory and addressable symbols for asm ("" : : : "memory");  */
@@ -862,7 +876,9 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
         reference to it, but the fact that the statement takes its
         address will be of interest to some passes (e.g. alias
         resolution).  */
-      if (!is_gimple_debug (stmt))
+      if ((!(flags & opf_non_addressable)
+          || (flags & opf_not_non_addressable))
+         && !is_gimple_debug (stmt))
        mark_address_taken (TREE_OPERAND (expr, 0));
 
       /* If the address is invariant, there may be no interesting
@@ -876,7 +892,8 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
         here are ARRAY_REF indices which will always be real operands
         (GIMPLE does not allow non-registers as array indices).  */
       flags |= opf_no_vops;
-      get_expr_operands (stmt, &TREE_OPERAND (expr, 0), flags);
+      get_expr_operands (stmt, &TREE_OPERAND (expr, 0),
+                        flags | opf_not_non_addressable);
       return;
 
     case SSA_NAME:
@@ -898,7 +915,7 @@ get_expr_operands (gimple stmt, tree *expr_p, int flags)
       /* fall through */
 
     case ALIGN_INDIRECT_REF:
-    case INDIRECT_REF:
+    case MEM_REF:
       get_indirect_ref_operands (stmt, expr, flags, true);
       return;
 
index 53ab31a15df3b07477fb74b9d2f36f1e58d81f5e..482f267a6483d9441a04f2f867a7c8098c6b1c5d 100644 (file)
@@ -994,10 +994,10 @@ abs_replacement (basic_block cond_bb, basic_block middle_bb,
 
 /* Auxiliary functions to determine the set of memory accesses which
    can't trap because they are preceded by accesses to the same memory
-   portion.  We do that for INDIRECT_REFs, so we only need to track
+   portion.  We do that for MEM_REFs, so we only need to track
    the SSA_NAME of the pointer indirectly referenced.  The algorithm
    simply is a walk over all instructions in dominator order.  When
-   we see an INDIRECT_REF we determine if we've already seen a same
+   we see an MEM_REF we determine if we've already seen a same
    ref anywhere up to the root of the dominator tree.  If we do the
    current access can't trap.  If we don't see any dominating access
    the current access might trap, but might also make later accesses
@@ -1011,7 +1011,7 @@ abs_replacement (basic_block cond_bb, basic_block middle_bb,
    trap even if a store doesn't (write-only memory).  This probably is
    overly conservative.  */
 
-/* A hash-table of SSA_NAMEs, and in which basic block an INDIRECT_REF
+/* A hash-table of SSA_NAMEs, and in which basic block an MEM_REF
    through it was seen, which would constitute a no-trap region for
    same accesses.  */
 struct name_to_bb
@@ -1024,7 +1024,7 @@ struct name_to_bb
 /* The hash table for remembering what we've seen.  */
 static htab_t seen_ssa_names;
 
-/* The set of INDIRECT_REFs which can't trap.  */
+/* The set of MEM_REFs which can't trap.  */
 static struct pointer_set_t *nontrap_set;
 
 /* The hash function, based on the pointer to the pointer SSA_NAME.  */
@@ -1047,7 +1047,7 @@ name_to_bb_eq (const void *p1, const void *p2)
 }
 
 /* We see the expression EXP in basic block BB.  If it's an interesting
-   expression (an INDIRECT_REF through an SSA_NAME) possibly insert the
+   expression (an MEM_REF through an SSA_NAME) possibly insert the
    expression into the set NONTRAP or the hash table of seen expressions.
    STORE is true if this expression is on the LHS, otherwise it's on
    the RHS.  */
@@ -1055,7 +1055,7 @@ static void
 add_or_mark_expr (basic_block bb, tree exp,
                  struct pointer_set_t *nontrap, bool store)
 {
-  if (INDIRECT_REF_P (exp)
+  if (TREE_CODE (exp) == MEM_REF
       && TREE_CODE (TREE_OPERAND (exp, 0)) == SSA_NAME)
     {
       tree name = TREE_OPERAND (exp, 0);
@@ -1064,7 +1064,7 @@ add_or_mark_expr (basic_block bb, tree exp,
       struct name_to_bb *n2bb;
       basic_block found_bb = 0;
 
-      /* Try to find the last seen INDIRECT_REF through the same
+      /* Try to find the last seen MEM_REF through the same
          SSA_NAME, which can trap.  */
       map.ssa_name = name;
       map.bb = 0;
@@ -1074,7 +1074,7 @@ add_or_mark_expr (basic_block bb, tree exp,
       if (n2bb)
         found_bb = n2bb->bb;
 
-      /* If we've found a trapping INDIRECT_REF, _and_ it dominates EXP
+      /* If we've found a trapping MEM_REF, _and_ it dominates EXP
          (it's in a basic block on the path from us to the dominator root)
         then we can't trap.  */
       if (found_bb && found_bb->aux == (void *)1)
@@ -1135,7 +1135,7 @@ nt_fini_block (struct dom_walk_data *data ATTRIBUTE_UNUSED, basic_block bb)
 /* This is the entry point of gathering non trapping memory accesses.
    It will do a dominator walk over the whole function, and it will
    make use of the bb->aux pointers.  It returns a set of trees
-   (the INDIRECT_REFs itself) which can't trap.  */
+   (the MEM_REFs itself) which can't trap.  */
 static struct pointer_set_t *
 get_non_trapping (void)
 {
@@ -1200,7 +1200,8 @@ cond_store_replacement (basic_block middle_bb, basic_block join_bb,
   locus = gimple_location (assign);
   lhs = gimple_assign_lhs (assign);
   rhs = gimple_assign_rhs1 (assign);
-  if (!INDIRECT_REF_P (lhs))
+  if (TREE_CODE (lhs) != MEM_REF
+      || TREE_CODE (TREE_OPERAND (lhs, 0)) != SSA_NAME)
     return false;
 
   /* RHS is either a single SSA_NAME or a constant. */
index dc1cb28f7c34be26fe2d5ecef0e9e11d1bd4448f..6595515e1ada03196b37c21ff50a1707f86a9dd2 100644 (file)
@@ -139,7 +139,7 @@ phiprop_insert_phi (basic_block bb, gimple phi, gimple use_stmt,
   edge e;
 
   gcc_assert (is_gimple_assign (use_stmt)
-             && gimple_assign_rhs_code (use_stmt) == INDIRECT_REF);
+             && gimple_assign_rhs_code (use_stmt) == MEM_REF);
 
   /* Build a new PHI node to replace the definition of
      the indirect reference lhs.  */
@@ -295,8 +295,11 @@ propagate_with_phi (basic_block bb, gimple phi, struct phiprop_d *phivn,
       /* Check whether this is a load of *ptr.  */
       if (!(is_gimple_assign (use_stmt)
            && TREE_CODE (gimple_assign_lhs (use_stmt)) == SSA_NAME
-           && gimple_assign_rhs_code (use_stmt) == INDIRECT_REF
+           && gimple_assign_rhs_code (use_stmt) == MEM_REF
            && TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 0) == ptr
+           && integer_zerop (TREE_OPERAND (gimple_assign_rhs1 (use_stmt), 1))
+           && types_compatible_p (TREE_TYPE (gimple_assign_rhs1 (use_stmt)),
+                                  TREE_TYPE (TREE_TYPE (ptr)))
            /* We cannot replace a load that may throw or is volatile.  */
            && !stmt_can_throw_internal (use_stmt)))
        continue;
index a62439f2af23e6469076ebc4638422c9b8fa8f71..97eb7d2c1887eab162607810e6b1e976ede47416 100644 (file)
@@ -1629,12 +1629,28 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
            newop.op0 = op0;
            newop.op1 = op1;
            newop.op2 = op2;
+           /* If it transforms a non-constant ARRAY_REF into a constant
+              one, adjust the constant offset.  */
+           if (newop.opcode == ARRAY_REF
+               && newop.off == -1
+               && TREE_CODE (op0) == INTEGER_CST
+               && TREE_CODE (op1) == INTEGER_CST
+               && TREE_CODE (op2) == INTEGER_CST)
+             {
+               double_int off = tree_to_double_int (op0);
+               off = double_int_add (off,
+                                     double_int_neg
+                                       (tree_to_double_int (op1)));
+               off = double_int_mul (off, tree_to_double_int (op2));
+               if (double_int_fits_in_shwi_p (off))
+                 newop.off = off.low;
+             }
            VEC_replace (vn_reference_op_s, newoperands, j, &newop);
            /* If it transforms from an SSA_NAME to an address, fold with
               a preceding indirect reference.  */
            if (j > 0 && op0 && TREE_CODE (op0) == ADDR_EXPR
                && VEC_index (vn_reference_op_s,
-                             newoperands, j - 1)->opcode == INDIRECT_REF)
+                             newoperands, j - 1)->opcode == MEM_REF)
              vn_reference_fold_indirect (&newoperands, &j);
          }
        if (i != VEC_length (vn_reference_op_s, operands))
@@ -1661,6 +1677,7 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
          {
            unsigned int new_val_id;
            pre_expr constant;
+           bool converted = false;
 
            tree result = vn_reference_lookup_pieces (newvuse, ref->set,
                                                      ref->type,
@@ -1669,6 +1686,13 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
            if (result)
              VEC_free (vn_reference_op_s, heap, newoperands);
 
+           if (result
+               && !useless_type_conversion_p (ref->type, TREE_TYPE (result)))
+             {
+               result = fold_build1 (VIEW_CONVERT_EXPR, ref->type, result);
+               converted = true;
+             }
+
            if (result && is_gimple_min_invariant (result))
              {
                gcc_assert (!newoperands);
@@ -1679,7 +1703,54 @@ phi_translate_1 (pre_expr expr, bitmap_set_t set1, bitmap_set_t set2,
            expr->kind = REFERENCE;
            expr->id = 0;
 
-           if (newref)
+           if (converted)
+             {
+               vn_nary_op_t nary;
+               tree nresult;
+
+               gcc_assert (CONVERT_EXPR_P (result)
+                           || TREE_CODE (result) == VIEW_CONVERT_EXPR);
+
+               nresult = vn_nary_op_lookup_pieces (1, TREE_CODE (result),
+                                                   TREE_TYPE (result),
+                                                   TREE_OPERAND (result, 0),
+                                                   NULL_TREE, NULL_TREE,
+                                                   NULL_TREE,
+                                                   &nary);
+               if (nresult && is_gimple_min_invariant (nresult))
+                 return get_or_alloc_expr_for_constant (nresult);
+
+               expr->kind = NARY;
+               if (nary)
+                 {
+                   PRE_EXPR_NARY (expr) = nary;
+                   constant = fully_constant_expression (expr);
+                   if (constant != expr)
+                     return constant;
+
+                   new_val_id = nary->value_id;
+                   get_or_alloc_expression_id (expr);
+                 }
+               else
+                 {
+                   new_val_id = get_next_value_id ();
+                   VEC_safe_grow_cleared (bitmap_set_t, heap,
+                                          value_expressions,
+                                          get_max_value_id() + 1);
+                   nary = vn_nary_op_insert_pieces (1, TREE_CODE (result),
+                                                    TREE_TYPE (result),
+                                                    TREE_OPERAND (result, 0),
+                                                    NULL_TREE, NULL_TREE,
+                                                    NULL_TREE, NULL_TREE,
+                                                    new_val_id);
+                   PRE_EXPR_NARY (expr) = nary;
+                   constant = fully_constant_expression (expr);
+                   if (constant != expr)
+                     return constant;
+                   get_or_alloc_expression_id (expr);
+                 }
+             }
+           else if (newref)
              {
                PRE_EXPR_REFERENCE (expr) = newref;
                constant = fully_constant_expression (expr);
@@ -2598,7 +2669,7 @@ can_PRE_operation (tree op)
   return UNARY_CLASS_P (op)
     || BINARY_CLASS_P (op)
     || COMPARISON_CLASS_P (op)
-    || TREE_CODE (op) == INDIRECT_REF
+    || TREE_CODE (op) == MEM_REF 
     || TREE_CODE (op) == COMPONENT_REF
     || TREE_CODE (op) == VIEW_CONVERT_EXPR
     || TREE_CODE (op) == CALL_EXPR
@@ -2674,6 +2745,29 @@ create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
        return folded;
       }
       break;
+    case MEM_REF:
+      {
+       tree baseop = create_component_ref_by_pieces_1 (block, ref, operand,
+                                                       stmts, domstmt);
+       tree offset = currop->op0;
+       if (!baseop)
+         return NULL_TREE;
+       if (TREE_CODE (baseop) == ADDR_EXPR
+           && handled_component_p (TREE_OPERAND (baseop, 0)))
+         {
+           HOST_WIDE_INT off;
+           tree base;
+           base = get_addr_base_and_unit_offset (TREE_OPERAND (baseop, 0),
+                                                 &off);
+           gcc_assert (base);
+           offset = int_const_binop (PLUS_EXPR, offset,
+                                     build_int_cst (TREE_TYPE (offset),
+                                                    off), 0);
+           baseop = build_fold_addr_expr (base);
+         }
+       return fold_build2 (MEM_REF, currop->type, baseop, offset);
+      }
+      break;
     case TARGET_MEM_REF:
       {
        vn_reference_op_t nextop = VEC_index (vn_reference_op_s, ref->operands,
@@ -2728,7 +2822,6 @@ create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
       break;
     case ALIGN_INDIRECT_REF:
     case MISALIGNED_INDIRECT_REF:
-    case INDIRECT_REF:
       {
        tree folded;
        tree genop1 = create_component_ref_by_pieces_1 (block, ref,
@@ -2880,7 +2973,7 @@ create_component_ref_by_pieces_1 (basic_block block, vn_reference_t ref,
 }
 
 /* For COMPONENT_REF's and ARRAY_REF's, we can't have any intermediates for the
-   COMPONENT_REF or INDIRECT_REF or ARRAY_REF portion, because we'd end up with
+   COMPONENT_REF or MEM_REF or ARRAY_REF portion, because we'd end up with
    trying to rename aggregates into ssa form directly, which is a no no.
 
    Thus, this routine doesn't create temporaries, it just builds a
@@ -3131,7 +3224,7 @@ create_expression_by_pieces (basic_block block, pre_expr expr,
   VN_INFO (name)->value_id = value_id;
   nameexpr = get_or_alloc_expr_for_name (name);
   add_to_value (value_id, nameexpr);
-  if (!in_fre)
+  if (NEW_SETS (block))
     bitmap_value_replace_in_set (NEW_SETS (block), nameexpr);
   bitmap_value_replace_in_set (AVAIL_OUT (block), nameexpr);
 
@@ -3310,6 +3403,8 @@ insert_into_preds_of_block (basic_block block, unsigned int exprnum,
                      avail[bprime->index] = get_or_alloc_expr_for_name (forcedexpr);
                    }
                }
+             else
+               avail[bprime->index] = get_or_alloc_expr_for_constant (builtexpr);
            }
        }
       else if (eprime->kind == NAME)
@@ -4723,7 +4818,7 @@ execute_pre (bool do_fre)
   if (!do_fre)
     loop_optimizer_init (LOOPS_NORMAL);
 
-  if (!run_scc_vn (do_fre))
+  if (!run_scc_vn ())
     {
       if (!do_fre)
        loop_optimizer_finalize ();
index b27fe0c0bc54d9312dcb4f67c15fefbae86ef1b0..cc667207ee0e44eb450f3dbe70a2dca13de508b2 100644 (file)
@@ -156,8 +156,6 @@ static unsigned int next_value_id;
 static unsigned int next_dfs_num;
 static VEC (tree, heap) *sccstack;
 
-static bool may_insert;
-
 
 DEF_VEC_P(vn_ssa_aux_t);
 DEF_VEC_ALLOC_P(vn_ssa_aux_t, heap);
@@ -431,9 +429,41 @@ vn_reference_compute_hash (const vn_reference_t vr1)
   hashval_t result = 0;
   int i;
   vn_reference_op_t vro;
+  HOST_WIDE_INT off = -1;
+  bool deref = false;
 
   for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
-    result = vn_reference_op_compute_hash (vro, result);
+    {
+      if (vro->opcode == MEM_REF)
+       deref = true;
+      else if (vro->opcode != ADDR_EXPR)
+       deref = false;
+      if (vro->off != -1)
+       {
+         if (off == -1)
+           off = 0;
+         off += vro->off;
+       }
+      else
+       {
+         if (off != -1
+             && off != 0)
+           result = iterative_hash_hashval_t (off, result);
+         off = -1;
+         if (deref
+             && vro->opcode == ADDR_EXPR)
+           {
+             if (vro->op0)
+               {
+                 tree op = TREE_OPERAND (vro->op0, 0);
+                 result = iterative_hash_hashval_t (TREE_CODE (op), result);
+                 result = iterative_hash_expr (op, result);
+               }
+           }
+         else
+           result = vn_reference_op_compute_hash (vro, result);
+       }
+    }
   if (vr1->vuse)
     result += SSA_NAME_VERSION (vr1->vuse);
 
@@ -446,8 +476,7 @@ vn_reference_compute_hash (const vn_reference_t vr1)
 int
 vn_reference_eq (const void *p1, const void *p2)
 {
-  int i;
-  vn_reference_op_t vro;
+  unsigned i, j;
 
   const_vn_reference_t const vr1 = (const_vn_reference_t) p1;
   const_vn_reference_t const vr2 = (const_vn_reference_t) p2;
@@ -466,17 +495,58 @@ vn_reference_eq (const void *p1, const void *p2)
   if (vr1->operands == vr2->operands)
     return true;
 
-  /* We require that address operands be canonicalized in a way that
-     two memory references will have the same operands if they are
-     equivalent.  */
-  if (VEC_length (vn_reference_op_s, vr1->operands)
-      != VEC_length (vn_reference_op_s, vr2->operands))
+  if (!expressions_equal_p (TYPE_SIZE (vr1->type), TYPE_SIZE (vr2->type)))
     return false;
 
-  for (i = 0; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro); i++)
-    if (!vn_reference_op_eq (VEC_index (vn_reference_op_s, vr2->operands, i),
-                            vro))
-      return false;
+  i = 0;
+  j = 0;
+  do
+    {
+      HOST_WIDE_INT off1 = 0, off2 = 0;
+      vn_reference_op_t vro1, vro2;
+      vn_reference_op_s tem1, tem2;
+      bool deref1 = false, deref2 = false;
+      for (; VEC_iterate (vn_reference_op_s, vr1->operands, i, vro1); i++)
+       {
+         if (vro1->opcode == MEM_REF)
+           deref1 = true;
+         if (vro1->off == -1)
+           break;
+         off1 += vro1->off;
+       }
+      for (; VEC_iterate (vn_reference_op_s, vr2->operands, j, vro2); j++)
+       {
+         if (vro2->opcode == MEM_REF)
+           deref2 = true;
+         if (vro2->off == -1)
+           break;
+         off2 += vro2->off;
+       }
+      if (off1 != off2)
+       return false;
+      if (deref1 && vro1->opcode == ADDR_EXPR)
+       {
+         memset (&tem1, 0, sizeof (tem1));
+         tem1.op0 = TREE_OPERAND (vro1->op0, 0);
+         tem1.type = TREE_TYPE (tem1.op0);
+         tem1.opcode = TREE_CODE (tem1.op0);
+         vro1 = &tem1;
+       }
+      if (deref2 && vro2->opcode == ADDR_EXPR)
+       {
+         memset (&tem2, 0, sizeof (tem2));
+         tem2.op0 = TREE_OPERAND (vro2->op0, 0);
+         tem2.type = TREE_TYPE (tem2.op0);
+         tem2.opcode = TREE_CODE (tem2.op0);
+         vro2 = &tem2;
+       }
+      if (!vn_reference_op_eq (vro1, vro2))
+       return false;
+      ++j;
+      ++i;
+    }
+  while (VEC_length (vn_reference_op_s, vr1->operands) != i
+        || VEC_length (vn_reference_op_s, vr2->operands) != j);
 
   return true;
 }
@@ -503,6 +573,7 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
       temp.op0 = TMR_INDEX (ref);
       temp.op1 = TMR_STEP (ref);
       temp.op2 = TMR_OFFSET (ref);
+      temp.off = -1;
       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
 
       memset (&temp, 0, sizeof (temp));
@@ -510,6 +581,7 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
       temp.opcode = TREE_CODE (base);
       temp.op0 = base;
       temp.op1 = TMR_ORIGINAL (ref);
+      temp.off = -1;
       VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
       return;
     }
@@ -524,17 +596,23 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
       /* We do not care for spurious type qualifications.  */
       temp.type = TYPE_MAIN_VARIANT (TREE_TYPE (ref));
       temp.opcode = TREE_CODE (ref);
+      temp.off = -1;
 
       switch (temp.opcode)
        {
        case ALIGN_INDIRECT_REF:
-       case INDIRECT_REF:
          /* The only operand is the address, which gets its own
             vn_reference_op_s structure.  */
          break;
        case MISALIGNED_INDIRECT_REF:
          temp.op0 = TREE_OPERAND (ref, 1);
          break;
+       case MEM_REF:
+         /* The base address gets its own vn_reference_op_s structure.  */
+         temp.op0 = TREE_OPERAND (ref, 1);
+         if (host_integerp (TREE_OPERAND (ref, 1), 0))
+           temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1));
+         break;
        case BIT_FIELD_REF:
          /* Record bits and position.  */
          temp.op0 = TREE_OPERAND (ref, 1);
@@ -547,17 +625,25 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
          temp.type = NULL_TREE;
          temp.op0 = TREE_OPERAND (ref, 1);
          temp.op1 = TREE_OPERAND (ref, 2);
-         /* If this is a reference to a union member, record the union
-            member size as operand.  Do so only if we are doing
-            expression insertion (during FRE), as PRE currently gets
-            confused with this.  */
-         if (may_insert
-             && temp.op1 == NULL_TREE
-             && TREE_CODE (DECL_CONTEXT (temp.op0)) == UNION_TYPE
-             && integer_zerop (DECL_FIELD_OFFSET (temp.op0))
-             && integer_zerop (DECL_FIELD_BIT_OFFSET (temp.op0))
-             && host_integerp (DECL_SIZE (temp.op0), 0))
-           temp.op0 = DECL_SIZE (temp.op0);
+         {
+           tree this_offset = component_ref_field_offset (ref);
+           if (this_offset
+               && TREE_CODE (this_offset) == INTEGER_CST)
+             {
+               tree bit_offset = DECL_FIELD_BIT_OFFSET (TREE_OPERAND (ref, 1));
+               if (TREE_INT_CST_LOW (bit_offset) % BITS_PER_UNIT == 0)
+                 {
+                   double_int off
+                     = double_int_add (tree_to_double_int (this_offset),
+                                       double_int_sdiv
+                                         (tree_to_double_int (bit_offset),
+                                          uhwi_to_double_int (BITS_PER_UNIT),
+                                          TRUNC_DIV_EXPR));
+                   if (double_int_fits_in_shwi_p (off))
+                     temp.off = off.low;
+                 }
+             }
+         }
          break;
        case ARRAY_RANGE_REF:
        case ARRAY_REF:
@@ -566,6 +652,18 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
          /* Always record lower bounds and element size.  */
          temp.op1 = array_ref_low_bound (ref);
          temp.op2 = array_ref_element_size (ref);
+         if (TREE_CODE (temp.op0) == INTEGER_CST
+             && TREE_CODE (temp.op1) == INTEGER_CST
+             && TREE_CODE (temp.op2) == INTEGER_CST)
+           {
+             double_int off = tree_to_double_int (temp.op0);
+             off = double_int_add (off,
+                                   double_int_neg
+                                     (tree_to_double_int (temp.op1)));
+             off = double_int_mul (off, tree_to_double_int (temp.op2));
+             if (double_int_fits_in_shwi_p (off))
+               temp.off = off.low;
+           }
          break;
        case STRING_CST:
        case INTEGER_CST:
@@ -592,9 +690,13 @@ copy_reference_ops_from_ref (tree ref, VEC(vn_reference_op_s, heap) **result)
             ref in the chain of references (IE they require an
             operand), so we don't have to put anything
             for op* as it will be handled by the iteration  */
-       case IMAGPART_EXPR:
        case REALPART_EXPR:
        case VIEW_CONVERT_EXPR:
+         temp.off = 0;
+         break;
+       case IMAGPART_EXPR:
+         /* This is only interesting for its constant offset.  */
+         temp.off = TREE_INT_CST_LOW (TYPE_SIZE_UNIT (TREE_TYPE (ref)));
          break;
        default:
          gcc_unreachable ();
@@ -627,16 +729,12 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
   HOST_WIDE_INT max_size;
   HOST_WIDE_INT size = -1;
   tree size_tree = NULL_TREE;
+  alias_set_type base_alias_set = -1;
 
   /* First get the final access size from just the outermost expression.  */
   op = VEC_index (vn_reference_op_s, ops, 0);
   if (op->opcode == COMPONENT_REF)
-    {
-      if (TREE_CODE (op->op0) == INTEGER_CST)
-       size_tree = op->op0;
-      else
-       size_tree = DECL_SIZE (op->op0);
-    }
+    size_tree = DECL_SIZE (op->op0);
   else if (op->opcode == BIT_FIELD_REF)
     size_tree = op->op0;
   else
@@ -667,13 +765,31 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
        {
        /* These may be in the reference ops, but we cannot do anything
           sensible with them here.  */
-       case CALL_EXPR:
        case ADDR_EXPR:
+         /* Apart from ADDR_EXPR arguments to MEM_REF.  */
+         if (base != NULL_TREE
+             && TREE_CODE (base) == MEM_REF
+             && op->op0
+             && DECL_P (TREE_OPERAND (op->op0, 0)))
+           {
+             vn_reference_op_t pop = VEC_index (vn_reference_op_s, ops, i-1);
+             base = TREE_OPERAND (op->op0, 0);
+             if (pop->off == -1)
+               {
+                 max_size = -1;
+                 offset = 0;
+               }
+             else
+               offset += pop->off * BITS_PER_UNIT;
+             op0_p = NULL;
+             break;
+           }
+         /* Fallthru.  */
+       case CALL_EXPR:
          return false;
 
        /* Record the base objects.  */
        case ALIGN_INDIRECT_REF:
-       case INDIRECT_REF:
          *op0_p = build1 (op->opcode, op->type, NULL_TREE);
          op0_p = &TREE_OPERAND (*op0_p, 0);
          break;
@@ -684,11 +800,19 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
          op0_p = &TREE_OPERAND (*op0_p, 0);
          break;
 
+       case MEM_REF:
+         base_alias_set = get_deref_alias_set (op->op0);
+         *op0_p = build2 (MEM_REF, op->type,
+                          NULL_TREE, op->op0);
+         op0_p = &TREE_OPERAND (*op0_p, 0);
+         break;
+
        case VAR_DECL:
        case PARM_DECL:
        case RESULT_DECL:
        case SSA_NAME:
          *op0_p = op->op0;
+         op0_p = NULL;
          break;
 
        /* And now the usual component-reference style ops.  */
@@ -703,11 +827,8 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
               cannot use component_ref_field_offset.  Do the interesting
               parts manually.  */
 
-           /* Our union trick, done for offset zero only.  */
-           if (TREE_CODE (field) == INTEGER_CST)
-             ;
-           else if (op->op1
-                    || !host_integerp (DECL_FIELD_OFFSET (field), 1))
+           if (op->op1
+               || !host_integerp (DECL_FIELD_OFFSET (field), 1))
              max_size = -1;
            else
              {
@@ -768,7 +889,10 @@ ao_ref_init_from_vn_reference (ao_ref *ref,
   ref->size = size;
   ref->max_size = max_size;
   ref->ref_alias_set = set;
-  ref->base_alias_set = -1;
+  if (base_alias_set != -1)
+    ref->base_alias_set = base_alias_set;
+  else
+    ref->base_alias_set = get_alias_set (base);
 
   return true;
 }
@@ -789,6 +913,7 @@ copy_reference_ops_from_call (gimple call,
   temp.opcode = CALL_EXPR;
   temp.op0 = gimple_call_fn (call);
   temp.op1 = gimple_call_chain (call);
+  temp.off = -1;
   VEC_safe_push (vn_reference_op_s, heap, *result, &temp);
 
   /* Copy the call arguments.  As they can be references as well,
@@ -830,62 +955,30 @@ void
 vn_reference_fold_indirect (VEC (vn_reference_op_s, heap) **ops,
                            unsigned int *i_p)
 {
-  VEC(vn_reference_op_s, heap) *mem = NULL;
-  vn_reference_op_t op;
   unsigned int i = *i_p;
-  unsigned int j;
-
-  /* Get ops for the addressed object.  */
-  op = VEC_index (vn_reference_op_s, *ops, i);
-  /* ???  If this is our usual typeof &ARRAY vs. &ARRAY[0] problem, work
-     around it to avoid later ICEs.  */
-  if (TREE_CODE (TREE_TYPE (TREE_OPERAND (op->op0, 0))) == ARRAY_TYPE
-      && TREE_CODE (TREE_TYPE (TREE_TYPE (op->op0))) != ARRAY_TYPE)
-    {
-      vn_reference_op_s aref;
-      tree dom;
-      aref.type = TYPE_MAIN_VARIANT (TREE_TYPE (TREE_TYPE (op->op0)));
-      aref.opcode = ARRAY_REF;
-      aref.op0 = integer_zero_node;
-      if ((dom = TYPE_DOMAIN (TREE_TYPE (TREE_OPERAND (op->op0, 0))))
-         && TYPE_MIN_VALUE (dom))
-       aref.op0 = TYPE_MIN_VALUE (dom);
-      aref.op1 = aref.op0;
-      aref.op2 = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (op->op0)));
-      VEC_safe_push (vn_reference_op_s, heap, mem, &aref);
-    }
-  copy_reference_ops_from_ref (TREE_OPERAND (op->op0, 0), &mem);
-
-  /* Do the replacement - we should have at least one op in mem now.  */
-  if (VEC_length (vn_reference_op_s, mem) == 1)
-    {
-      VEC_replace (vn_reference_op_s, *ops, i - 1,
-                  VEC_index (vn_reference_op_s, mem, 0));
-      VEC_ordered_remove (vn_reference_op_s, *ops, i);
-      i--;
-    }
-  else if (VEC_length (vn_reference_op_s, mem) == 2)
-    {
-      VEC_replace (vn_reference_op_s, *ops, i - 1,
-                  VEC_index (vn_reference_op_s, mem, 0));
-      VEC_replace (vn_reference_op_s, *ops, i,
-                  VEC_index (vn_reference_op_s, mem, 1));
-    }
-  else if (VEC_length (vn_reference_op_s, mem) > 2)
-    {
-      VEC_replace (vn_reference_op_s, *ops, i - 1,
-                  VEC_index (vn_reference_op_s, mem, 0));
-      VEC_replace (vn_reference_op_s, *ops, i,
-                  VEC_index (vn_reference_op_s, mem, 1));
-      /* ???  There is no VEC_splice.  */
-      for (j = 2; VEC_iterate (vn_reference_op_s, mem, j, op); j++)
-       VEC_safe_insert (vn_reference_op_s, heap, *ops, ++i, op);
+  vn_reference_op_t op = VEC_index (vn_reference_op_s, *ops, i);
+  vn_reference_op_t mem_op = VEC_index (vn_reference_op_s, *ops, i - 1);
+  tree addr_base;
+  HOST_WIDE_INT addr_offset;
+
+  /* The only thing we have to do is from &OBJ.foo.bar add the offset
+     from .foo.bar to the preceeding MEM_REF offset and replace the
+     address with &OBJ.  */
+  addr_base = get_addr_base_and_unit_offset (TREE_OPERAND (op->op0, 0),
+                                            &addr_offset);
+  gcc_checking_assert (addr_base && TREE_CODE (addr_base) != MEM_REF);
+  if (addr_base != op->op0)
+    {
+      double_int off = tree_to_double_int (mem_op->op0);
+      off = double_int_sext (off, TYPE_PRECISION (TREE_TYPE (mem_op->op0)));
+      off = double_int_add (off, shwi_to_double_int (addr_offset));
+      mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off);
+      op->op0 = build_fold_addr_expr (addr_base);
+      if (host_integerp (mem_op->op0, 0))
+       mem_op->off = TREE_INT_CST_LOW (mem_op->op0);
+      else
+       mem_op->off = -1;
     }
-  else
-    gcc_unreachable ();
-
-  VEC_free (vn_reference_op_s, heap, mem);
-  *i_p = i;
 }
 
 /* Optimize the reference REF to a constant if possible or return
@@ -978,20 +1071,35 @@ valueize_refs (VEC (vn_reference_op_s, heap) *orig)
             the opcode.  */
          if (TREE_CODE (vro->op0) != SSA_NAME && vro->opcode == SSA_NAME)
            vro->opcode = TREE_CODE (vro->op0);
-         /* If it transforms from an SSA_NAME to an address, fold with
-            a preceding indirect reference.  */
-         if (i > 0 && TREE_CODE (vro->op0) == ADDR_EXPR
-             && VEC_index (vn_reference_op_s,
-                           orig, i - 1)->opcode == INDIRECT_REF)
-           {
-             vn_reference_fold_indirect (&orig, &i);
-             continue;
-           }
        }
       if (vro->op1 && TREE_CODE (vro->op1) == SSA_NAME)
        vro->op1 = SSA_VAL (vro->op1);
       if (vro->op2 && TREE_CODE (vro->op2) == SSA_NAME)
        vro->op2 = SSA_VAL (vro->op2);
+      /* If it transforms from an SSA_NAME to an address, fold with
+        a preceding indirect reference.  */
+      if (i > 0
+         && vro->op0
+         && TREE_CODE (vro->op0) == ADDR_EXPR
+         && VEC_index (vn_reference_op_s,
+                       orig, i - 1)->opcode == MEM_REF)
+       vn_reference_fold_indirect (&orig, &i);
+      /* If it transforms a non-constant ARRAY_REF into a constant
+        one, adjust the constant offset.  */
+      else if (vro->opcode == ARRAY_REF
+              && vro->off == -1
+              && TREE_CODE (vro->op0) == INTEGER_CST
+              && TREE_CODE (vro->op1) == INTEGER_CST
+              && TREE_CODE (vro->op2) == INTEGER_CST)
+       {
+         double_int off = tree_to_double_int (vro->op0);
+         off = double_int_add (off,
+                               double_int_neg
+                                 (tree_to_double_int (vro->op1)));
+         off = double_int_mul (off, tree_to_double_int (vro->op2));
+         if (double_int_fits_in_shwi_p (off))
+           vro->off = off.low;
+       }
     }
 
   return orig;
@@ -1172,7 +1280,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_)
      the copy kills ref.  */
   else if (gimple_assign_single_p (def_stmt)
           && (DECL_P (gimple_assign_rhs1 (def_stmt))
-              || INDIRECT_REF_P (gimple_assign_rhs1 (def_stmt))
+              || TREE_CODE (gimple_assign_rhs1 (def_stmt)) == MEM_REF
               || handled_component_p (gimple_assign_rhs1 (def_stmt))))
     {
       tree base2;
@@ -2092,9 +2200,9 @@ visit_reference_op_load (tree lhs, tree op, gimple stmt)
        result = vn_nary_op_lookup (val, NULL);
       /* If the expression is not yet available, value-number lhs to
         a new SSA_NAME we create.  */
-      if (!result && may_insert)
+      if (!result)
         {
-         result = make_ssa_name (SSA_NAME_VAR (lhs), NULL);
+         result = make_ssa_name (SSA_NAME_VAR (lhs), gimple_build_nop ());
          /* Initialize value-number information properly.  */
          VN_INFO_GET (result)->valnum = result;
          VN_INFO (result)->value_id = get_next_value_id ();
@@ -3266,14 +3374,12 @@ set_hashtable_value_ids (void)
    due to resource constraints.  */
 
 bool
-run_scc_vn (bool may_insert_arg)
+run_scc_vn (void)
 {
   size_t i;
   tree param;
   bool changed = true;
 
-  may_insert = may_insert_arg;
-
   init_scc_vn ();
   current_info = valid_info;
 
@@ -3297,7 +3403,6 @@ run_scc_vn (bool may_insert_arg)
        if (!DFS (name))
          {
            free_scc_vn ();
-           may_insert = false;
            return false;
          }
     }
@@ -3359,7 +3464,6 @@ run_scc_vn (bool may_insert_arg)
        }
     }
 
-  may_insert = false;
   return true;
 }
 
index 8f93b0b25183d18a4645281d5e8cdbd3012171f2..ba7fc5c3e49bffc310c81efda416b363f64779a1 100644 (file)
@@ -72,6 +72,8 @@ typedef const struct vn_phi_s *const_vn_phi_t;
 typedef struct vn_reference_op_struct
 {
   enum tree_code opcode;
+  /* Constant offset this op adds or -1 if it is variable.  */
+  HOST_WIDE_INT off;
   tree type;
   tree op0;
   tree op1;
@@ -167,7 +169,7 @@ typedef struct vn_ssa_aux
 extern vn_ssa_aux_t VN_INFO (tree);
 extern vn_ssa_aux_t VN_INFO_GET (tree);
 tree vn_get_expr_for (tree);
-bool run_scc_vn (bool);
+bool run_scc_vn (void);
 void free_scc_vn (void);
 tree vn_nary_op_lookup (tree, vn_nary_op_t *);
 tree vn_nary_op_lookup_stmt (gimple, vn_nary_op_t *);
index ac31781e198f76b74cfac85e987cf5cd1bd19c3e..9deec26b11d5bbcc461247a5385a64ed6fcddcf6 100644 (file)
@@ -190,8 +190,11 @@ is_hidden_global_store (gimple stmt)
            return true;
 
        }
-      else if (INDIRECT_REF_P (lhs))
+      else if (INDIRECT_REF_P (lhs)
+              || TREE_CODE (lhs) == MEM_REF)
        return ptr_deref_may_alias_global_p (TREE_OPERAND (lhs, 0));
+      else if (CONSTANT_CLASS_P (lhs))
+       return true;
       else
        gcc_unreachable ();
     }
index 05fd6d971f2456e303a866d1d016863622d8dea5..77ff17a6962554a6594f3ab87b90e532b47a845b 100644 (file)
@@ -3107,7 +3107,8 @@ get_constraint_for_component_ref (tree t, VEC(ce_s, heap) **results,
      &0->a.b */
   forzero = t;
   while (handled_component_p (forzero)
-        || INDIRECT_REF_P (forzero))
+        || INDIRECT_REF_P (forzero)
+        || TREE_CODE (forzero) == MEM_REF)
     forzero = TREE_OPERAND (forzero, 0);
 
   if (CONSTANT_CLASS_P (forzero) && integer_zerop (forzero))
@@ -3334,9 +3335,10 @@ get_constraint_for_1 (tree t, VEC (ce_s, heap) **results, bool address_p)
       {
        switch (TREE_CODE (t))
          {
-         case INDIRECT_REF:
+         case MEM_REF:
            {
-             get_constraint_for_1 (TREE_OPERAND (t, 0), results, address_p);
+             get_constraint_for_ptr_offset (TREE_OPERAND (t, 0),
+                                            TREE_OPERAND (t, 1), results);
              do_deref (results);
              return;
            }
@@ -4572,7 +4574,11 @@ find_func_clobbers (gimple origt)
        tem = TREE_OPERAND (tem, 0);
       if ((DECL_P (tem)
           && !auto_var_in_fn_p (tem, cfun->decl))
-         || INDIRECT_REF_P (tem))
+         || INDIRECT_REF_P (tem)
+         || (TREE_CODE (tem) == MEM_REF
+             && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
+                  && auto_var_in_fn_p
+                       (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
        {
          struct constraint_expr lhsc, *rhsp;
          unsigned i;
@@ -4596,7 +4602,11 @@ find_func_clobbers (gimple origt)
        tem = TREE_OPERAND (tem, 0);
       if ((DECL_P (tem)
           && !auto_var_in_fn_p (tem, cfun->decl))
-         || INDIRECT_REF_P (tem))
+         || INDIRECT_REF_P (tem)
+         || (TREE_CODE (tem) == MEM_REF
+             && !(TREE_CODE (TREE_OPERAND (tem, 0)) == ADDR_EXPR
+                  && auto_var_in_fn_p
+                       (TREE_OPERAND (TREE_OPERAND (tem, 0), 0), cfun->decl))))
        {
          struct constraint_expr lhs, *rhsp;
          unsigned i;
index 2f5b8305f744a38ba2c587d827c24ea1f00e2365..a997f7884b0fd59d9770448b6e63f65b7d20fdbd 100644 (file)
@@ -616,6 +616,24 @@ find_replaceable_in_bb (temp_expr_table_p tab, basic_block bb)
                      }
                  }
 
+             /* If the stmt does a memory store and the replacement
+                is a load aliasing it avoid creating overlapping
+                assignments which we cannot expand correctly.  */
+             if (gimple_vdef (stmt)
+                 && gimple_assign_single_p (stmt))
+               {
+                 gimple def_stmt = SSA_NAME_DEF_STMT (use);
+                 while (is_gimple_assign (def_stmt)
+                        && gimple_assign_rhs_code (def_stmt) == SSA_NAME)
+                   def_stmt
+                     = SSA_NAME_DEF_STMT (gimple_assign_rhs1 (def_stmt));
+                 if (gimple_vuse (def_stmt)
+                     && gimple_assign_single_p (def_stmt)
+                     && refs_may_alias_p (gimple_assign_lhs (stmt),
+                                          gimple_assign_rhs1 (def_stmt)))
+                   same_root_var = true;
+               }
+
              /* Mark expression as replaceable unless stmt is volatile or the
                 def variable has the same root variable as something in the
                 substitution list.  */
index 866ce6eb2b7656d09e430a625daee54c017a9131..2ea2e685b9d91d873d111a2def2bafe23f5f9063 100644 (file)
@@ -1204,6 +1204,12 @@ useless_type_conversion_p (tree outer_type, tree inner_type)
          != TYPE_ADDR_SPACE (TREE_TYPE (inner_type)))
        return false;
 
+      /* Do not lose casts to restrict qualified pointers.  */
+      if ((TYPE_RESTRICT (outer_type)
+          != TYPE_RESTRICT (inner_type))
+         && TYPE_RESTRICT (outer_type))
+       return false;
+
       /* If the outer type is (void *) or a pointer to an incomplete
         record type or a pointer to an unprototyped function,
         then the conversion is not necessary.  */
@@ -1216,12 +1222,6 @@ useless_type_conversion_p (tree outer_type, tree inner_type)
              && useless_type_conversion_p (TREE_TYPE (TREE_TYPE (outer_type)),
                                            TREE_TYPE (TREE_TYPE (inner_type)))))
        return true;
-
-      /* Do not lose casts to restrict qualified pointers.  */
-      if ((TYPE_RESTRICT (outer_type)
-          != TYPE_RESTRICT (inner_type))
-         && TYPE_RESTRICT (outer_type))
-       return false;
     }
 
   /* From now on qualifiers on value types do not matter.  */
@@ -1273,41 +1273,18 @@ useless_type_conversion_p (tree outer_type, tree inner_type)
   else if (POINTER_TYPE_P (inner_type)
           && POINTER_TYPE_P (outer_type))
     {
-      /* Don't lose casts between pointers to volatile and non-volatile
-        qualified types.  Doing so would result in changing the semantics
-        of later accesses.  For function types the volatile qualifier
-        is used to indicate noreturn functions.  */
-      if (TREE_CODE (TREE_TYPE (outer_type)) != FUNCTION_TYPE
-         && TREE_CODE (TREE_TYPE (outer_type)) != METHOD_TYPE
-         && TREE_CODE (TREE_TYPE (inner_type)) != FUNCTION_TYPE
-         && TREE_CODE (TREE_TYPE (inner_type)) != METHOD_TYPE
-         && (TYPE_VOLATILE (TREE_TYPE (outer_type))
-             != TYPE_VOLATILE (TREE_TYPE (inner_type)))
-         && TYPE_VOLATILE (TREE_TYPE (outer_type)))
-       return false;
-
-      /* We require explicit conversions from incomplete target types.  */
-      if (!COMPLETE_TYPE_P (TREE_TYPE (inner_type))
-         && COMPLETE_TYPE_P (TREE_TYPE (outer_type)))
-       return false;
-
-      /* Do not lose casts between pointers that when dereferenced access
-        memory with different alias sets.  */
-      if (get_deref_alias_set (inner_type) != get_deref_alias_set (outer_type))
+      /* Do not lose casts to function pointer types.  */
+      if ((TREE_CODE (TREE_TYPE (outer_type)) == FUNCTION_TYPE
+          || TREE_CODE (TREE_TYPE (outer_type)) == METHOD_TYPE)
+         && !useless_type_conversion_p (TREE_TYPE (outer_type),
+                                        TREE_TYPE (inner_type)))
        return false;
 
       /* We do not care for const qualification of the pointed-to types
         as const qualification has no semantic value to the middle-end.  */
 
-      /* Otherwise pointers/references are equivalent if their pointed
-        to types are effectively the same.  We can strip qualifiers
-        on pointed-to types for further comparison, which is done in
-        the callee.  Note we have to use true compatibility here
-        because addresses are subject to propagation into dereferences
-        and thus might get the original type exposed which is equivalent
-        to a reverse conversion.  */
-      return types_compatible_p (TREE_TYPE (outer_type),
-                                TREE_TYPE (inner_type));
+      /* Otherwise pointers/references are equivalent.  */
+      return true;
     }
 
   /* Recurse for complex types.  */
@@ -1673,8 +1650,9 @@ warn_uninitialized_var (tree *tp, int *walk_subtrees, void *data_)
   /* We do not care about LHS.  */
   if (wi->is_lhs)
     {
-      /* Except for operands of INDIRECT_REF.  */
-      if (!INDIRECT_REF_P (t))
+      /* Except for operands of dereferences.  */
+      if (!INDIRECT_REF_P (t)
+         && TREE_CODE (t) != MEM_REF)
        return NULL_TREE;
       t = TREE_OPERAND (t, 0);
     }
@@ -1822,6 +1800,34 @@ struct gimple_opt_pass pass_early_warn_uninitialized =
  }
 };
 
+
+/* If necessary, rewrite the base of the reference tree *TP from
+   a MEM_REF to a plain or converted symbol.  */
+
+static void
+maybe_rewrite_mem_ref_base (tree *tp)
+{
+  tree sym;
+
+  while (handled_component_p (*tp))
+    tp = &TREE_OPERAND (*tp, 0);
+  if (TREE_CODE (*tp) == MEM_REF
+      && TREE_CODE (TREE_OPERAND (*tp, 0)) == ADDR_EXPR
+      && integer_zerop (TREE_OPERAND (*tp, 1))
+      && (sym = TREE_OPERAND (TREE_OPERAND (*tp, 0), 0))
+      && DECL_P (sym)
+      && !TREE_ADDRESSABLE (sym)
+      && symbol_marked_for_renaming (sym))
+    {
+      if (!useless_type_conversion_p (TREE_TYPE (*tp),
+                                     TREE_TYPE (sym)))
+       *tp = build1 (VIEW_CONVERT_EXPR,
+                       TREE_TYPE (*tp), sym);
+      else
+       *tp = sym;
+    }
+}
+
 /* Compute TREE_ADDRESSABLE and DECL_GIMPLE_REG_P for local variables.  */
 
 void
@@ -1853,17 +1859,50 @@ execute_update_addresses_taken (bool do_optimize)
            {
               tree lhs = gimple_get_lhs (stmt);
 
-              /* We may not rewrite TMR_SYMBOL to SSA.  */
-              if (lhs && TREE_CODE (lhs) == TARGET_MEM_REF
-                  && TMR_SYMBOL (lhs))
-                bitmap_set_bit (not_reg_needs, DECL_UID (TMR_SYMBOL (lhs)));
+              /* A plain decl does not need it set.  */
+              if (lhs && !DECL_P (lhs))
+               {
+                 if (handled_component_p (lhs))
+                   lhs = get_base_address (lhs);
+
+                  if (DECL_P (lhs))
+                    bitmap_set_bit (not_reg_needs, DECL_UID (lhs));
+                 else if (TREE_CODE (lhs) == MEM_REF
+                          && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR)
+                   {
+                     tree decl = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0);
+                     if (DECL_P (decl)
+                         && (!integer_zerop (TREE_OPERAND (lhs, 1))
+                             || (DECL_SIZE (decl)
+                                 != TYPE_SIZE (TREE_TYPE (lhs)))))
+                       bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+                   }
+                }
+           }
+
+         if (gimple_assign_single_p (stmt))
+           {
+             tree rhs = gimple_assign_rhs1 (stmt);
 
               /* A plain decl does not need it set.  */
-              else if (lhs && handled_component_p (lhs))
-                {
-                  var = get_base_address (lhs);
-                  if (DECL_P (var))
-                    bitmap_set_bit (not_reg_needs, DECL_UID (var));
+              if (!DECL_P (rhs))
+               {
+                 tree base = rhs;
+                 while (handled_component_p (base))
+                   base = TREE_OPERAND (base, 0);
+
+                 /* But watch out for MEM_REFs we cannot lower to a
+                    VIEW_CONVERT_EXPR.  */
+                 if (TREE_CODE (base) == MEM_REF
+                     && TREE_CODE (TREE_OPERAND (base, 0)) == ADDR_EXPR)
+                   {
+                     tree decl = TREE_OPERAND (TREE_OPERAND (base, 0), 0);
+                     if (DECL_P (decl)
+                         && (!integer_zerop (TREE_OPERAND (base, 1))
+                             || (DECL_SIZE (decl)
+                                 != TYPE_SIZE (TREE_TYPE (base)))))
+                       bitmap_set_bit (not_reg_needs, DECL_UID (decl));
+                   }
                 }
            }
        }
@@ -1937,14 +1976,73 @@ execute_update_addresses_taken (bool do_optimize)
   if (update_vops)
     {
       FOR_EACH_BB (bb)
-         for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
-           {
-             gimple stmt = gsi_stmt (gsi);
+       for (gsi = gsi_start_bb (bb); !gsi_end_p (gsi); gsi_next (&gsi))
+         {
+           gimple stmt = gsi_stmt (gsi);
 
-             if (gimple_references_memory_p (stmt)
-                 || is_gimple_debug (stmt))
-               update_stmt (stmt);
-           }
+           /* Re-write TARGET_MEM_REFs of symbols we want to
+              rewrite into SSA form.  */
+           if (gimple_assign_single_p (stmt))
+             {
+               tree lhs = gimple_assign_lhs (stmt);
+               tree rhs, *rhsp = gimple_assign_rhs1_ptr (stmt);
+               tree sym;
+
+               /* We shouldn't have any fancy wrapping of
+                  component-refs on the LHS, but look through
+                  VIEW_CONVERT_EXPRs as that is easy.  */
+               while (TREE_CODE (lhs) == VIEW_CONVERT_EXPR)
+                 lhs = TREE_OPERAND (lhs, 0);
+               if (TREE_CODE (lhs) == MEM_REF
+                   && TREE_CODE (TREE_OPERAND (lhs, 0)) == ADDR_EXPR
+                   && integer_zerop (TREE_OPERAND (lhs, 1))
+                   && (sym = TREE_OPERAND (TREE_OPERAND (lhs, 0), 0))
+                   && DECL_P (sym)
+                   && !TREE_ADDRESSABLE (sym)
+                   && symbol_marked_for_renaming (sym))
+                 lhs = sym;
+               else
+                 lhs = gimple_assign_lhs (stmt);
+
+               /* Rewrite the RHS and make sure the resulting assignment
+                  is validly typed.  */
+               maybe_rewrite_mem_ref_base (rhsp);
+               rhs = gimple_assign_rhs1 (stmt);
+               if (gimple_assign_lhs (stmt) != lhs
+                   && !useless_type_conversion_p (TREE_TYPE (lhs),
+                                                  TREE_TYPE (rhs)))
+                 rhs = fold_build1 (VIEW_CONVERT_EXPR,
+                                    TREE_TYPE (lhs), rhs);
+
+               if (gimple_assign_lhs (stmt) != lhs)
+                 gimple_assign_set_lhs (stmt, lhs);
+
+               if (gimple_assign_rhs1 (stmt) != rhs)
+                 {
+                   gimple_stmt_iterator gsi = gsi_for_stmt (stmt);
+                   gimple_assign_set_rhs_from_tree (&gsi, rhs);
+                 }
+             }
+
+           if (gimple_code (stmt) == GIMPLE_ASM)
+             {
+               unsigned i;
+               for (i = 0; i < gimple_asm_noutputs (stmt); ++i)
+                 {
+                   tree link = gimple_asm_output_op (stmt, i);
+                   maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
+                 }
+               for (i = 0; i < gimple_asm_ninputs (stmt); ++i)
+                 {
+                   tree link = gimple_asm_input_op (stmt, i);
+                   maybe_rewrite_mem_ref_base (&TREE_VALUE (link));
+                 }
+             }
+
+           if (gimple_references_memory_p (stmt)
+               || is_gimple_debug (stmt))
+             update_stmt (stmt);
+         }
 
       /* Update SSA form here, we are called as non-pass as well.  */
       update_ssa (TODO_update_ssa);
index 3bfbfc2dc196c95a860e02da214658f6db5f1844..dae14f54a6000be9e9d8813506be8671c3cc1c9d 100644 (file)
@@ -512,7 +512,7 @@ check_all_va_list_escapes (struct stdarg_info *si)
                  enum tree_code rhs_code = gimple_assign_rhs_code (stmt);
 
                  /* x = *ap_temp;  */
-                 if (gimple_assign_rhs_code (stmt) == INDIRECT_REF
+                 if (gimple_assign_rhs_code (stmt) == MEM_REF
                      && TREE_OPERAND (rhs, 0) == use
                      && TYPE_SIZE_UNIT (TREE_TYPE (rhs))
                      && host_integerp (TYPE_SIZE_UNIT (TREE_TYPE (rhs)), 1)
@@ -522,6 +522,7 @@ check_all_va_list_escapes (struct stdarg_info *si)
                      tree access_size = TYPE_SIZE_UNIT (TREE_TYPE (rhs));
 
                      gpr_size = si->offsets[SSA_NAME_VERSION (use)]
+                                + tree_low_cst (TREE_OPERAND (rhs, 1), 0)
                                 + tree_low_cst (access_size, 1);
                      if (gpr_size >= VA_LIST_MAX_GPR_SIZE)
                        cfun->va_list_gpr_size = VA_LIST_MAX_GPR_SIZE;
index 6002b09046339f21ac3d39a21532dc03a0ae7b4f..cbefc1f01c4bb775b4b8dc5966fddff1082e8945 100644 (file)
@@ -2398,12 +2398,9 @@ vect_create_addr_base_for_vector_ref (gimple stmt,
                              data_ref_base, base_offset);
   else
     {
-      if (TREE_CODE (DR_REF (dr)) == INDIRECT_REF)
-        addr_base = unshare_expr (TREE_OPERAND (DR_REF (dr), 0));
-      else
-        addr_base = build1 (ADDR_EXPR,
-                            build_pointer_type (TREE_TYPE (DR_REF (dr))),
-                            unshare_expr (DR_REF (dr)));
+      addr_base = build1 (ADDR_EXPR,
+                         build_pointer_type (TREE_TYPE (DR_REF (dr))),
+                         unshare_expr (DR_REF (dr)));
     }
 
   vect_ptr_type = build_pointer_type (STMT_VINFO_VECTYPE (stmt_info));
index eded879eb38ecc97a63131be6f88f3b2bd28af00..c95fe7d273e4ecd807a56101fbe4e505ed28603e 100644 (file)
@@ -3026,7 +3026,8 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
       && TREE_CODE (scalar_dest) != INDIRECT_REF
       && TREE_CODE (scalar_dest) != COMPONENT_REF
       && TREE_CODE (scalar_dest) != IMAGPART_EXPR
-      && TREE_CODE (scalar_dest) != REALPART_EXPR)
+      && TREE_CODE (scalar_dest) != REALPART_EXPR
+      && TREE_CODE (scalar_dest) != MEM_REF)
     return false;
 
   gcc_assert (gimple_assign_single_p (stmt));
@@ -3282,7 +3283,7 @@ vectorizable_store (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
            vec_oprnd = VEC_index (tree, result_chain, i);
 
           if (aligned_access_p (first_dr))
-            data_ref = build_fold_indirect_ref (dataref_ptr);
+           data_ref = build_simple_mem_ref (dataref_ptr);
           else
           {
             int mis = DR_MISALIGNMENT (first_dr);
@@ -3421,7 +3422,8 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
       && code != INDIRECT_REF
       && code != COMPONENT_REF
       && code != IMAGPART_EXPR
-      && code != REALPART_EXPR)
+      && code != REALPART_EXPR
+      && code != MEM_REF)
     return false;
 
   if (!STMT_VINFO_DATA_REF (stmt_info))
@@ -3659,7 +3661,7 @@ vectorizable_load (gimple stmt, gimple_stmt_iterator *gsi, gimple *vec_stmt,
            {
            case dr_aligned:
              gcc_assert (aligned_access_p (first_dr));
-             data_ref = build_fold_indirect_ref (dataref_ptr);
+             data_ref = build_simple_mem_ref (dataref_ptr);
              break;
            case dr_unaligned_supported:
              {
index 3da2dc84e9d75aeef2843cf17fee3de0441953a2..4f5db8069f52489b2c62ad0e0c25f640d0a77683 100644 (file)
@@ -987,7 +987,7 @@ vrp_stmt_computes_nonzero (gimple stmt, bool *strict_overflow_p)
       tree base = get_base_address (TREE_OPERAND (expr, 0));
 
       if (base != NULL_TREE
-         && TREE_CODE (base) == INDIRECT_REF
+         && TREE_CODE (base) == MEM_REF
          && TREE_CODE (TREE_OPERAND (base, 0)) == SSA_NAME)
        {
          value_range_t *vr = get_value_range (TREE_OPERAND (base, 0));
@@ -5075,8 +5075,7 @@ check_array_ref (location_t location, tree ref, bool ignore_off_by_one)
   /* Accesses to trailing arrays via pointers may access storage
      beyond the types array bounds.  */
   base = get_base_address (ref);
-  if (base
-      && INDIRECT_REF_P (base))
+  if (base && TREE_CODE (base) == MEM_REF)
     {
       tree cref, next = NULL_TREE;
 
@@ -5175,6 +5174,51 @@ search_for_addr_array (tree t, location_t location)
       t = TREE_OPERAND (t, 0);
     }
   while (handled_component_p (t));
+
+  if (TREE_CODE (t) == MEM_REF
+      && TREE_CODE (TREE_OPERAND (t, 0)) == ADDR_EXPR
+      && !TREE_NO_WARNING (t))
+    {
+      tree tem = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
+      tree low_bound, up_bound, el_sz;
+      double_int idx;
+      if (TREE_CODE (TREE_TYPE (tem)) != ARRAY_TYPE
+         || TREE_CODE (TREE_TYPE (TREE_TYPE (tem))) == ARRAY_TYPE
+         || !TYPE_DOMAIN (TREE_TYPE (tem)))
+       return;
+
+      low_bound = TYPE_MIN_VALUE (TYPE_DOMAIN (TREE_TYPE (tem)));
+      up_bound = TYPE_MAX_VALUE (TYPE_DOMAIN (TREE_TYPE (tem)));
+      el_sz = TYPE_SIZE_UNIT (TREE_TYPE (TREE_TYPE (tem)));
+      if (!low_bound
+         || TREE_CODE (low_bound) != INTEGER_CST
+         || !up_bound
+         || TREE_CODE (up_bound) != INTEGER_CST
+         || !el_sz
+         || TREE_CODE (el_sz) != INTEGER_CST)
+       return;
+
+      idx = mem_ref_offset (t);
+      idx = double_int_sdiv (idx, tree_to_double_int (el_sz), TRUNC_DIV_EXPR);
+      if (double_int_scmp (idx, double_int_zero) < 0)
+       {
+         warning_at (location, OPT_Warray_bounds,
+                     "array subscript is below array bounds");
+         TREE_NO_WARNING (t) = 1;
+       }
+      else if (double_int_scmp (idx,
+                               double_int_add
+                                 (double_int_add
+                                   (tree_to_double_int (up_bound),
+                                    double_int_neg
+                                      (tree_to_double_int (low_bound))),
+                                   double_int_one)) > 0)
+       {
+         warning_at (location, OPT_Warray_bounds,
+                     "array subscript is above array bounds");
+         TREE_NO_WARNING (t) = 1;
+       }
+    }
 }
 
 /* walk_tree() callback that checks if *TP is
@@ -5203,7 +5247,7 @@ check_array_bounds (tree *tp, int *walk_subtree, void *data)
   if (TREE_CODE (t) == ARRAY_REF)
     check_array_ref (location, t, false /*ignore_off_by_one*/);
 
-  if (TREE_CODE (t) == INDIRECT_REF
+  if (TREE_CODE (t) == MEM_REF
       || (TREE_CODE (t) == RETURN_EXPR && TREE_OPERAND (t, 0)))
     search_for_addr_array (TREE_OPERAND (t, 0), location);
 
index de83e64cea1a7b123484ba090a2641efda70e32c..17accb2f912e91bcc6514ab28b39757019738e39 100644 (file)
@@ -3564,7 +3564,8 @@ do { tree _node = (NODE); \
      address is constant too.  If it's a decl, its address is constant if the
      decl is static.  Everything else is not constant and, furthermore,
      taking the address of a volatile variable is not volatile.  */
-  if (TREE_CODE (node) == INDIRECT_REF)
+  if (TREE_CODE (node) == INDIRECT_REF
+      || TREE_CODE (node) == MEM_REF)
     UPDATE_FLAGS (TREE_OPERAND (node, 0));
   else if (CONSTANT_CLASS_P (node))
     ;
@@ -3878,6 +3879,42 @@ build6_stat (enum tree_code code, tree tt, tree arg0, tree arg1,
   return t;
 }
 
+/* Build a simple MEM_REF tree with the sematics of a plain INDIRECT_REF
+   on the pointer PTR.  */
+
+tree
+build_simple_mem_ref_loc (location_t loc, tree ptr)
+{
+  HOST_WIDE_INT offset = 0;
+  tree ptype = TREE_TYPE (ptr);
+  tree tem;
+  /* For convenience allow addresses that collapse to a simple base
+     and offset.  */
+  if (TREE_CODE (ptr) == ADDR_EXPR
+      && (handled_component_p (TREE_OPERAND (ptr, 0))
+         || TREE_CODE (TREE_OPERAND (ptr, 0)) == MEM_REF))
+    {
+      ptr = get_addr_base_and_unit_offset (TREE_OPERAND (ptr, 0), &offset);
+      gcc_assert (ptr);
+      ptr = build_fold_addr_expr (ptr);
+      gcc_assert (is_gimple_reg (ptr) || is_gimple_min_invariant (ptr));
+    }
+  tem = build2 (MEM_REF, TREE_TYPE (ptype),
+               ptr, build_int_cst (ptype, offset));
+  SET_EXPR_LOCATION (tem, loc);
+  return tem;
+}
+
+/* Return the constant offset of a MEM_REF tree T.  */
+
+double_int
+mem_ref_offset (const_tree t)
+{
+  tree toff = TREE_OPERAND (t, 1);
+  return double_int_sext (tree_to_double_int (toff),
+                         TYPE_PRECISION (TREE_TYPE (toff)));
+}
+
 /* Similar except don't specify the TREE_TYPE
    and leave the TREE_SIDE_EFFECTS as 0.
    It is permissible for arguments to be null,
index e09debf0cddce8375ecc551ad0949ffc286383bc..9a7b1ec8199134c94cc6f2a5b2db581c1e5dda3e 100644 (file)
@@ -970,6 +970,16 @@ DEFTREECODE (REALIGN_LOAD_EXPR, "realign_load", tcc_expression, 3)
 
 DEFTREECODE (TARGET_MEM_REF, "target_mem_ref", tcc_reference, 6)
 
+/* Memory addressing.  Operands are a pointer and a tree constant integer
+   byte offset of the pointer type that when dereferenced yields the
+   type of the base object the pointer points into and which is used for
+   TBAA purposes.
+   The type of the MEM_REF is the type the bytes at the memory location
+   are interpreted as.
+   MEM_REF <p, c> is equivalent to ((typeof(c))p)->x... where x... is a
+   chain of component references offsetting p by c.  */
+DEFTREECODE (MEM_REF, "mem_ref", tcc_reference, 2)
+
 /* The ordering of the codes between OMP_PARALLEL and OMP_CRITICAL is
    exposed to TREE_RANGE_CHECK.  */
 /* OpenMP - #pragma omp parallel [clause1 ... clauseN]
index 5253bf9a50a24b4202555d3c4d1b9e4489e9b4d3..75b3dcfccf145c348b57b4786a4ada787b9c7888 100644 (file)
@@ -4963,6 +4963,10 @@ extern tree build_fold_indirect_ref_loc (location_t, tree);
 #define fold_indirect_ref(T)\
         fold_indirect_ref_loc (UNKNOWN_LOCATION, T)
 extern tree fold_indirect_ref_loc (location_t, tree);
+extern tree build_simple_mem_ref_loc (location_t, tree);
+#define build_simple_mem_ref(T)\
+       build_simple_mem_ref_loc (UNKNOWN_LOCATION, T)
+extern double_int mem_ref_offset (const_tree);
 extern tree constant_boolean_node (int, tree);
 extern tree div_if_zero_remainder (enum tree_code, const_tree, const_tree);