+2007-06-15 Andrew Pinski <andrew_pinski@playstation.sony.com>
+ Zdenek Dvorak <dvorakz@suse.cz>
+ Richard Guenther <rguenther@suse.de>
+ Kaz Kojima <kkojima@gcc.gnu.org>
+
+ * tree-vrp.c (compare_values_warnv): Convert val2 to
+ the type of val1.
+ (extract_range_from_assert): Create
+ POINTER_PLUS_EXPR for pointer types.
+ (extract_range_from_binary_expr): Handle
+ only POINTER_PLUS_EXPR, MIN_EXPR, and MAX_EXPR
+ for pointer types.
+ * doc/c-tree.texi (POINTER_PLUS_EXPR): Document.
+ * tree-ssa-loop-niter.c (split_to_var_and_offset): Handle
+ POINTER_PLUS_EXPR as PLUS_EXPR.
+ (number_of_iterations_lt_to_ne):
+ For pointer types, use sizetype when
+ creating MINUS_EXPR/PLUS_EXPRs.
+ (assert_loop_rolls_lt): For pointer types, use sizetype when
+ creating MINUS_EXPR/PLUS_EXPRs.
+ (number_of_iterations_le): Likewise.
+ (expand_simple_operations): POINTER_PLUS_EXPR are simple also.
+ (derive_constant_upper_bound): Handle POINTER_PLUS_EXPR just
+ like PLUS_EXPR and MINUS_EXPR.
+ * tree-pretty-print.c (dump_generic_node): Handle
+ POINTER_PLUS_EXPR.
+ (op_prio): Likewise.
+ (op_symbol_1): Likewise.
+ * optabs.c (optab_for_tree_code): Likewise.
+ * tree-ssa-loop-manip.c (create_iv): Handle pointer base
+ specially.
+ * tree-tailcall.c (process_assignment): Mention
+ POINTER_PLUS_EXPR in a TODO comment.
+ * tree.c (build2_stat): Assert when trying to use PLUS_EXPR or
+ MINUS_EXPR with a pointer. Also assert for POINTER_PLUS_EXPR
+ not used with a pointer and an integer type.
+ * tree-scalar-evolution.c (add_to_evolution_1): Convert the
+ increment using chrec_convert_rhs instead of chrec_convert.
+ (follow_ssa_edge_in_rhs): Handle POINTER_PLUS_EXPR like
+ PLUS_EXPR except for the right hand side's type will be
+ sizetype.
+ (interpret_rhs_modify_stmt): Handle POINTER_PLUS_EXPR.
+ (fold_used_pointer_cast): Kill.
+ (pointer_offset_p): Kill.
+ (fold_used_pointer): Kill.
+ (pointer_used_p): Kill.
+ (analyze_scalar_evolution_1 <case GIMPLE_MODIFY_STMT>): Don't
+ call fold_used_pointer.
+ (instantiate_parameters_1): Convert the increment
+ using chrec_convert_rhs instead of chrec_convert.
+ Handle POINTER_PLUS_EXPR as PLUS_EXPR.
+ * builtins.c (get_pointer_alignment): Handle POINTER_PLUS_EXPR
+ instead of PLUS_EXPR.
+ (expand_builtin_strcat): Create a POINTER_PLUS_EXPR instead of
+ PLUS_EXPR for pointers.
+ (std_gimplify_va_arg_expr): Likewise.
+ (fold_builtin_memory_op): Likewise.
+ (fold_builtin_strstr): Likewise.
+ (fold_builtin_strchr): Likewise.
+ (fold_builtin_strrchr): Likewise.
+ (fold_builtin_strpbrk): Likewise.
+ (expand_builtin_memory_chk): Likewise.
+ (fold_builtin_memory_chk): Likewise.
+ (std_expand_builtin_va_start): Use
+ sizetype for the call to make_tree and then convert
+ to the pointer type.
+ (fold_builtin_memchr): Use POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for adding to a pointer.
+ (std_gimplify_va_arg_expr): Use fold_build2 for
+ the creating of POINTER_PLUS_EXPR. For the BIT_AND_EXPR, cast
+ the operands to sizetype first and then cast the BIT_AND_EXPR
+ back to the pointer type.
+ * fold-const.c (build_range_check): Handle pointer types
+ specially.
+ (extract_array_ref): Look for POINTER_PLUS_EXPR instead
+ of PLUS_EXPR's. Make sure the offset is converted to
+ sizetype.
+ (try_move_mult_to_index): Strip the NOPs from the offset.
+ Remove code argument and replace all uses with PLUS_EXPR.
+ (fold_to_nonsharp_ineq_using_bound): Handle pointer types
+ specially. Don't use a pointer type for MINUS_EXPR.
+ (fold_unary): Handle for (T1)(X op Y),
+ only p+ as that is the only as that can be handled for
+ binary operators now.
+ (fold_binary <case POINTER_PLUS_EXPR>): Add folding of
+ POINTER_PLUS_EXPR.
+ <case PLUS_EXPR>: Add folding of PTR+INT into
+ PTR p+ INT.
+ Don't call try_move_mult_to_index.
+ <case MINUS_EXPR>: Fold (PTR0 p+ A) - (PTR1 p+ B)
+ into (PTR0 - PTR1) + (A - B). Fold (PTR0 p+ A) - PTR1 into
+ (PTR0 - PTR1) + A iff (PTR0 - PTR1) simplifies.
+ Don't call try_move_mult_to_index.
+ (tree_expr_nonnegative_warnv_p): Handle POINTER_PLUS_EXPR.
+ (tree_expr_nonzero_p): Likewise.
+ (fold_indirect_ref_1): Look at POINTER_PLUS_EXPR instead
+ of PLUS_EXPR for the complex expression folding.
+ * tree-chrec.c (chrec_fold_plus_poly_poly): If the
+ first chrec is a pointer type, then the second should
+ be sizetype and not the first's type.
+ For POINTER_PLUS_EXPR, use a different right hand side type.
+ Handle POINTER_PLUS_EXPR like PLUS_EXPR.
+ (chrec_fold_plus_1): For POINTER_PLUS_EXPR, use a
+ different right hand side type.
+ Handle POINTER_PLUS_EXPR like PLUS_EXPR.
+ (chrec_fold_plus): For pointer types, use POINTER_PLUS_EXPR
+ instead of PLUS_EXPR.
+ When either operand is zero, convert the other operand.
+ (chrec_apply): Use chrec_convert_rhs
+ on the argument x instead of chrec_convert.
+ (reset_evolution_in_loop): For pointer types, the new_evol
+ should be sizetype.
+ (convert_affine_scev): For POINTER_PLUS_EXPR, use a
+ different right hand side type.
+ Handle POINTER_PLUS_EXPR like PLUS_EXPR.
+ (chrec_convert_rhs): New function.
+ (chrec_convert_aggressive): For POINTER_PLUS_EXPR, use a
+ different right hand side type.
+ Handle POINTER_PLUS_EXPR like PLUS_EXPR.
+ * tree-chrec.h (chrec_convert_rhs): New prototype.
+ (build_polynomial_chrec): For pointer types, the right hand
+ * tree-ssa-ccp.c (maybe_fold_stmt_indirect): Look for
+ POINTER_PLUS_EXPR instead of PLUS_EXPR's.
+ Remove subtraction case as it is always addition now.
+ Make sure the offset is converted to sizetype.
+ (fold_stmt_r): Don't handle PLUS_EXPR/MINUS_EXPR specially.
+ Handle POINTER_PLUS_EXPR like PLUS_EXPR was handled before.
+ * tree-ssa-loop-ivopts.c (determine_base_object): Abort for
+ PLUS_EXPR in pointer type.
+ Handle POINTER_PLUS_EXPR.
+ (tree_to_aff_combination): Likewise.
+ (force_expr_to_var_cost): Likewise.
+ (force_expr_to_var_cost): Likewise. Create a POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for pointers.
+ * c-format.c (check_format_arg): Handle POINTER_PLUS_EXPR
+ instead of PLUS_EXPR of pointer types.
+ * tree-stdarg.c (va_list_counter_bump): Handle POINTER_PLUS_EXPR
+ as PLUS_EXPR.
+ (check_va_list_escapes): Likewise.
+ (check_all_va_list_escapes): Likewise.
+ * dwarf2out.c (loc_descriptor_from_tree_1):
+ Handle POINT_PLUS_EXPR as a PLUS_EXPR.
+ * expr.c (expand_expr_real_1): Handle POINTER_PLUS_EXPR.
+ (string_constant): Likewise.
+ * tree-ssa-address.c (tree_mem_ref_addr): When adding
+ the offset to the base, use POINTER_PLUS_EXPR.
+ (add_to_parts): Convert the index to sizetype.
+ (create_mem_ref): Create A POINTER_PLUS_EXPR for the one case.
+ * matrix-reorg.c (collect_data_for_malloc_call): Stmt
+ will now only be either INDIRECT_REF and POINTER_PLUS_EXPR.
+ Offset only holds something for PLUS_EXPR.
+ (ssa_accessed_in_tree): Handle POINTER_PLUS_EXPR just as
+ a PLUS_EXPR.
+ (analyze_transpose): POINTER_PLUS_EXPR will only show up now
+ and not PLUS_EXPR.
+ (analyze_accesses_for_modify_stmt): Likewise.
+ Remove comment about the type being integral type as it is
+ wrong now.
+ (can_calculate_expr_before_stmt): Handle POINTER_PLUS_EXPR as
+ PLUS_EXPR.
+ (transform_access_sites): POINTER_PLUS_EXPR will only show up now
+ and not PLUS_EXPR.
+ Correct the type which the artimentic is done in (is now
+ sizetype).
+ Reindent one loop.
+ * tree-data-ref.c (split_constant_offset): Handle
+ POINTER_PLUS_EXPR
+ * tree-affine.c (tree_to_aff_combination): Likewise.
+ * c-typeck.c (build_unary_op): For pointers create the increment
+ as a sizetype. Create a POINTER_PLUS_EXPR instead of PLUS_EXPR
+ for pointers.
+ * gimplify.c (gimplify_self_mod_expr): Create a
+ POINTER_PLUS_EXPR instead of PLUS_EXPR for pointers.
+ (gimplify_omp_atomic_fetch_op): Handle POINTER_PLUS_EXPR.
+ * tree.def (POINTER_PLUS_EXPR): New tree code.
+ * tree-predcom.c (ref_at_iteration): If we have a pointer
+ type do the multiplication in sizetype.
+ * tree-mudflap.c (mf_xform_derefs_1): Create a
+ POINTER_PLUS_EXPR instead of PLUS_EXPR for pointers.
+ * tree-ssa-forwprop.c
+ (forward_propagate_addr_into_variable_array_index):
+ Don't expect there to be a cast for the index as that
+ does not exist anymore.
+ (forward_propagate_addr_expr_1): Check for POINTER_PLUS_EXPR
+ instead of PLUS_EXPR.
+ Don't check for the first operand of the POINTER_PLUS_EXPR
+ was the index as it cannot be.
+ Call forward_propagate_addr_into_variable_array_index with
+ the SSA_NAME instead of the statement.
+ * varasm.c (const_hash_1): Handle POINTER_PLUS_EXPR.
+ (compare_constant): Likewise.
+ (copy_constant): Likewise.
+ (compute_reloc_for_constant): Likewise.
+ (output_addressed_constants): Likewise.
+ (initializer_constant_valid_p): Likewise.
+ * tree-ssa.c (tree_ssa_useless_type_conversion_1):
+ Convert the MIN/MAX of the inner type to the outer
+ type before comparing them.
+ * tree-ssa-loop-prefetch.c (idx_analyze_ref): Handle
+ POINTER_PLUS_EXPR instead of PLUS_EXPR.
+ (issue_prefetch_ref): Create a POINTER_PLUS_EXPR instead
+ of PLUS_EXPR for pointers.
+ * tree-inline.c (estimate_num_insns_1): Handle
+ POINTER_PLUS_EXPR.
+ * tree-vect-transform.c (vect_create_addr_base_for_vector_ref):
+ Create a POINTER_PLUS_EXPR instead of PLUS_EXPR for pointers.
+ (bump_vector_ptr): Create a POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for the pointer increment statement.
+ (vect_update_ivs_after_vectorizer): For pointer types, create
+ POINTER_PLUS_EXPR instead of PLUS_EXPR and also create
+ MULT_EXPR in sizetype.
+ (vect_gen_niters_for_prolog_loop): Add a cast when creating
+ byte_misalign.
+ * tree-object-size.c (plus_expr_object_size): Handle
+ POINTER_PLUS_EXPR instead of PLUS_EXPR. Removing all the extra
+ code which is trying to figure out which side is a pointer and
+ is the index.
+ (check_for_plus_in_loops_1): Likewise.
+ (check_for_plus_in_loops): Likewise.
+ * c-common.c (pointer_int_sum): Create a
+ POINTER_PLUS_EXPR instead of PLUS_EXPR for pointers.
+ * tree-ssa-structalias.c (handle_ptr_arith): Handle
+ only POINTER_PLUS_EXPR. Removing all the extra
+ code which is trying to figure out which side is a pointer and
+ is the index.
+ * tree-cfg.c (verify_expr): Add extra checking for pointers and
+ PLUS_EXPR and MINUS_EXPR.
+ Also add checking to make sure the operands of POINTER_PLUS_EXPR
+ are correct.
+ * config/frv/frv.c (frv_expand_builtin_va_start): Use sizetype
+ with make_tree, instead of a pointer type.
+ * config/s390/s390.c (s390_va_start): Use POINTER_PLUS_EXPR
+ for pointers instead of PLUS_EXPR.
+ (s390_gimplify_va_arg): Likewise.
+ * config/spu/spu.c (spu_va_start): Create POINTER_PLUS_EXPR
+ instead of PLUS_EXPR when doing addition on pointer
+ types. Use sizetype for the second operand.
+ (spu_gimplify_va_arg_expr): Likewise.
+ * config/sparc/sparc.c (sparc_gimplify_va_arg): Use
+ POINTER_PLUS_EXPR instead of PLUS_EXPR when the operand was
+ a pointer. Don't create a BIT_AND_EXPR for pointer types.
+ * config/i386/i386.c (ix86_va_start): Use POINTER_PLUS_EXPR
+ for the pointer addition and also use size_int/sizetype
+ for the offset.
+ (ix86_gimplify_va_arg): Likewise.
+ Perform BIT_AND_EXPR on sizetype arguments.
+ * config/sh/sh.c (sh_va_start): Call make_tree with sizetype
+ and convert its result to a pointer type. Use POINTER_PLUS_EXPR
+ for the pointer additions and also use size_int for the offsets.
+ (sh_gimplify_va_arg_expr): Use POINTER_PLUS_EXPR for the pointer
+ additions and also use size_int for the offsets. Perform
+ BIT_AND_EXPR on sizetype arguments.
+ * config/ia64/ia64.c (ia64_gimplify_va_arg): Use
+ POINTER_PLUS_EXPR for pointers and create the
+ BIT_AND_EXPR in sizetype.
+ * config/rs6000/rs6000.c (rs6000_va_start): Use POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for pointer addition.
+ (rs6000_va_start): Likewise.
+ Also use sizetype for the offset.
+ * config/pa/pa.c (reloc_needed): Handle POINTER_PLUS_EXPR
+ as PLUS_EXPR/MINUS_EXPR.
+ (hppa_gimplify_va_arg_expr): Don't create MINUS_EXPR or
+ PLUS_EXPR for pointers, instead use POINTER_PLUS_EXPR.
+ Don't use BIT_AND_EXPR on a pointer type, convert the
+ expression to sizetype first.
+ * config/mips/mips.c (mips_va_start): Use POINTER_PLUS_EXPR
+ for pointers.
+ (mips_gimplify_va_arg_expr): Likewise.
+ Don't create BIT_AND_EXPR in a pointer type.
+
2007-06-15 Eric Christopher <echristo@apple.com>
* config.gcc (i?86-*-darwin*): Add t-crtfm and t-crtpc.
--- /dev/null
+2007-06-14 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ Merge mainline, revision 125733
+
+2007-06-14 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree-vrp.c (compare_values_warnv): Fix spelling/grammer
+ mistakes
+ (extract_range_from_binary_expr): Likewise.
+ * doc/c-tree.texi (POINTER_PLUS_EXPR): Likewise.
+ * tree-ssa-loop-niter.c (assert_loop_rolls_lt): Add vertical
+ whitespace.
+ * tree-pretty-print.c (op_symbol_code <case POINTER_PLUS_EXPR>):
+ Change print out to "+".
+ * tree-scalar-evolution.c (instantiate_parameters_1):
+ Use chrec_type instead of TREE_TYPE.
+ * builtins.c (expand_builtin_strcat): Fix vertical whitespace.
+ (std_expand_builtin_va_start): Fix whitespace.
+ (fold_builtin_strstr): Use size_int instead of build_int_cst (sizetype.
+ (fold_builtin_strchr): Likewise.
+ (fold_builtin_strrchr): Likewise.
+ (fold_builtin_strpbrk): Likewise.
+ * fold-const.c (try_move_mult_to_index): Fix spelling/grammer
+ mistakes.
+ (fold_to_nonsharp_ineq_using_bound): Merge the two ifs at the end.
+ (fold_binary): Fix spelling/grammer mistakes.
+ * tree-ssa-ccp.c (maybe_fold_stmt_addition): Assert that only
+ a POINTER_PLUS_EXPR is passed in.
+ * tree-ssa-loop-ivopts.c (determine_base_object):
+ Fix spelling/grammer mistakes.
+ * expr.c (expand_expr_real_1): Likewise.
+ * tree-data-ref.c (split_constant_offset): Likewise.
+ * c-typeck.c (build_unary_op): Use fold_convert instead of convert
+ for converting to sizetype.
+ * tree.def (POINTER_PLUS_EXPR): Fix comment.
+ * tree-ssa-forwprop.c (forward_propagate_addr_expr_1):
+ Fix spelling/grammer mistakes.
+ (phiprop_insert_phi): Likewise.
+ * c-common.c (pointer_int_sum): Remove FIXME about
+ POINTER_MINUS_EXPR.
+
+2007-06-13 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * expr.c (expand_expr_real_1 <case POINTER_PLUS_EXPR>): Remove assert
+ for checking the modes of the operands are the same.
+
+2007-06-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * config/sparc/sparc.c (sparc_gimplify_va_arg): Use POINTER_PLUS_EXPR
+ instead of PLUS_EXPR when the operand was a pointer. Don't create a
+ BIT_AND_EXPR for pointer types.
+
+2007-06-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * config/mips/mips.c (mips_va_start): Use POINTER_PLUS_EXPR
+ for pointers.
+ (mips_gimplify_va_arg_expr): Likewise.
+ Don't create BIT_AND_EXPR in a pointer type.
+
+2007-06-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ Merge mainline, revision 125658
+
+2007-06-11 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ Merge mainline, revision 125611
+
+2007-06-07 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * matrix-reorg.c (collect_data_for_malloc_call): Stmt
+ will now only be either INDIRECT_REF and POINTER_PLUS_EXPR.
+ Offset only holds something for PLUS_EXPR.
+ (ssa_accessed_in_tree): Handle POINTER_PLUS_EXPR just as
+ a PLUS_EXPR.
+ (analyze_transpose): POINTER_PLUS_EXPR will only show up now
+ and not PLUS_EXPR.
+ (analyze_accesses_for_modify_stmt): Likewise.
+ Remove comment about the type being integral type as it is
+ wrong now.
+ (analyze_matrix_accesses): Handle POINTER_PLUS_EXPR as
+ PLUS_EXPR.
+ (transform_access_sites): POINTER_PLUS_EXPR will only show up now
+ and not PLUS_EXPR.
+ Correct the type which the artimentic is done in (is now
+ sizetype).
+ Reindent one loop.
+
+2007-06-07 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * config/ia64/ia64.c (ia64_gimplify_va_arg): Use
+ POINTER_PLUS_EXPR for pointers and create the
+ BIT_AND_EXPR in sizetype.
+ * config/s390/s390.c (s390_va_start): Use POINTER_PLUS_EXPR
+ for pointers instead of PLUS_EXPR.
+ (s390_gimplify_va_arg): Likewise.
+
+2007-06-07 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * config/frv/frv.c (frv_expand_builtin_va_start): Use sizetype
+ with make_tree, instead of a pointer type.
+ * config/pa/pa.c (reloc_needed): Handle POINTER_PLUS_EXPR
+ as PLUS_EXPR/MINUS_EXPR.
+ (hppa_gimplify_va_arg_expr): Don't create MINUS_EXPR or
+ PLUS_EXPR for pointers, instead use POINTER_PLUS_EXPR.
+ Don't use BIT_AND_EXPR on a pointer type, convert the
+ expression to sizetype first.
+
+2007-06-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree-ssa-forwprop.c
+ (forward_propagate_addr_into_variable_array_index):
+ Don't expect a statement for the size 1 case.
+ Use the offset variable for the size 1 case.
+ Look through use-def chains to find the mutliply
+ for the non size 1 case.
+ (forward_propagate_addr_expr_1): Call
+ forward_propagate_addr_into_variable_array_index with
+ the SSA_NAME instead of the statement.
+
+2007-06-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ PR tree-opt/32225
+ * tree-scalar-evolution.c (follow_ssa_edge_in_rhs <case
+ POINTER_PLUS_EXPR>): Do not change type_rhs.
+ (follow_ssa_edge_in_rhs <case POINTER_PLUS_EXPR, case PLUS_EXPR>):
+ Use the code of the orginal expression instead of just PLUS_EXPR.
+ Also use type_rhs where TREE_TYPE (rhs) was used (reverting back
+ to the trunk).
+
+2007-06-03 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree-predcom.c (ref_at_iteration): If we have a pointer
+ type do the multiplication in sizetype.
+
+2007-06-01 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ Merge mainline, revision 125285
+
+2007-05-31 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree-vrp.c (extract_range_from_binary_expr): Handle
+ MIN_EXPR/MAX_EXPR for pointers type.
+
+ PR tree-opt/32167
+ * tree-chrec.c (chrec_fold_plus): When either
+ operand is zero, convert the other operand.
+
+2007-05-30 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * builtins.c (std_expand_builtin_va_start): Use
+ sizetype for the call to make_tree and then convert
+ to the pointer type.
+
+2007-05-30 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ PR tree-op/32145
+ * tree-vrp.c (extract_range_from_assert): Create
+ POINTER_PLUS_EXPR for pointer types.
+
+ PR tree-opt/32144
+ * tree-chrec.c (chrec_fold_plus_poly_poly): If the
+ first chrec is a pointer type, then the second should
+ be sizetype and not the first's type.
+
+2007-05-28 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * fold-const.c (try_move_mult_to_index):
+ Say why we strip the nops.
+ (fold_unary <case NOP_EXPR>): Remove
+ TODO as we cannot get that case.
+ * tree-chrec.c (chrec_fold_plus):
+ Cleanup the code to chose which
+ tree code is used.
+ (chrec_convert_rhs): Add comment on
+ why the increment is sizetype for
+ pointers.
+ * tree-mudflap.c (mf_xform_derefs_1):
+ Use size_int instead of build_int_cst.
+ * tree-ssa-loop-prefetch.c (issue_prefetch_ref): Likewise.
+
+2007-05-21 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ PR tree-opt/32015
+ * tree.c (build2_stat): Make sure that
+ MULT_EXPR is not used with pointers.
+ * tree-chrec.c (chrec_apply): Use chrec_convert_rhs
+ on the argument x instead of chrec_convert.
+
+2007-05-20 Kaz Kojima <kkojima@gcc.gnu.org>
+
+ * config/sh/sh.c (sh_va_start): Call make_tree with sizetype
+ and convert its result to a pointer type. Use POINTER_PLUS_EXPR
+ for the pointer additions and also use size_int for the offsets.
+ (sh_gimplify_va_arg_expr): Use POINTER_PLUS_EXPR for the pointer
+ additions and also use size_int for the offsets. Perform
+ BIT_AND_EXPR on sizetype arguments.
+
+2007-05-15 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree-ssa-forwprop (forward_propagate_addr_into_variable_array_index):
+ Don't expect there to be a cast for the index as that
+ does not exist anymore.
+ (forward_propagate_addr_expr_1): Check for
+ POINTER_PLUS_EXPR instead of PLUS_EXPR.
+ Don't check for the first operand of the
+ POINTER_PLUS_EXPR was the index as it
+ cannot be.
+
+2007-05-15 Richard Guenther <rguenther@suse.de>
+
+ * config/i386/i386.c (ix86_gimplify_va_arg): Use POINTER_PLUS_EXPR,
+ perform BIT_AND_EXPR on sizetype arguments.
+
+2007-05-15 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * config/rs6000/rs6000.c (rs6000_va_start): Use POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for pointer addition.
+ (rs6000_va_start): Likewise.
+ Also use sizetype for the offset.
+ * tree-stdarg.c (va_list_counter_bump): Check for PLUS_EXPR
+ and POINTER_PLUS_EXPR.
+ (check_va_list_escapes): Likewise.
+
+2007-05-14 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * config/i386/i386.c (ix86_va_start): Use POINTER_PLUS_EXPR
+ for the pointer addition and also use size_int/sizetype
+ for the offset.
+ (ix86_gimplify_va_arg): Likewise.
+
+2007-05-14 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ Merge mainline, revision 124657
+ * tree-data-ref.c (split_constant_offset): Handle
+ POINTER_PLUS_EXPR
+ exactly the same as PLUS_EXPR/MINUS_EXPR except for the offset
+ needs to be calcuated using PLUS_EXPR instead of
+ POINTER_PLUS_EXPR.
+ * builtins.c (fold_builtin_memchr): Use POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for adding to a pointer.
+
+2006-05-15 Zdenek Dvorak <dvorakz@suse.cz>
+
+ * tree-ssa-loop-ivopts.c (determine_base_object): Abort for PLUS_EXPR
+ in pointer type.
+
+2007-05-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree-ssa-address.c (tree_mem_ref_addr): When adding
+ the offset to the base, use POINTER_PLUS_EXPR.
+
+2007-05-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * expr.c (expand_expr_addr_expr_1): Call expand_expr
+ for the offset with the modifier as EXPAND_INITIALIZER
+ if the modifier is EXPAND_INITIALIZER.
+ (expand_expr_real_1 <case INTEGER_CST>): Don't force to
+ a register if we had an overflow.
+
+2007-05-10 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ Merge mainline, revision 124602.
+
+ * fold-const.c (extract_array_ref): Make sure the offset
+ is converted to sizetype.
+ (try_move_mult_to_index): Strip the NOPs from the offset.
+ (fold_binary <case POINTER_PLUS_EXPR>): Convert the second
+ operand to sizetype before calling try_move_mult_to_index.
+ * tree-ssa-loop-niter.c (number_of_iterations_lt_to_ne):
+ For pointer types, use sizetype when
+ creating MINUS_EXPR/PLUS_EXPRs.
+ * tree-ssa-ccp.c (maybe_fold_stmt_indirect): Make sure
+ the offset is converted to sizetype.
+
+2007-05-11 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * config/spu/spu.c (spu_va_start): Create POINTER_PLUS_EXPR
+ instead of PLUS_EXPR when doing addition on pointer
+ types. Use sizetype for the second operand.
+ (spu_gimplify_va_arg_expr): Likewise.
+ * tree-ssa.c (tree_ssa_useless_type_conversion_1):
+ Convert the MIN/MAX of the inner type to the outer
+ type before comparing them.
+
+2007-05-09 Andrew Pinski <andrew_pinski@playstation.sony.com>
+ Zdenek Dvorak <dvorakz@suse.cz>
+
+ * fold-const.c (fold_to_nonsharp_ineq_using_bound): Don't
+ use a pointer type for MINUS_EXPR.
+ (fold_binary <case MINUS_EXPR>): Fold (PTR0 p+ A) - (PTR1 p+ B)
+ into (PTR0 - PTR1) + (A - B). Fold (PTR0 p+ A) - PTR1 into
+ (PTR0 - PTR1) + A iff (PTR0 - PTR1) simplifies.
+ * tree-chrec.c (chrec_fold_plus_poly_poly): For
+ POINTER_PLUS_EXPR, use a different right hand side type.
+ Handle POINTER_PLUS_EXPR like PLUS_EXPR.
+ (chrec_fold_plus_1): Likewise.
+ (convert_affine_scev): Likewise.
+ (chrec_convert_aggressive): Likewise.
+ (chrec_fold_plus): For pointer types, use POINTER_PLUS_EXPR
+ instead of PLUS_EXPR.
+ (reset_evolution_in_loop): For pointer types, the new_evol
+ should be sizetype.
+ (chrec_convert_rhs): New function.
+ * tree-chrec.h (chrec_convert_rhs): New prototype.
+ (build_polynomial_chrec): For pointer types, the right hand
+ side should be sizetype and not the same as the left hand side.
+ * tree-scalar-evolution.c (add_to_evolution_1): Convert the
+ increment using chrec_convert_rhs instead of chrec_convert.
+ (follow_ssa_edge_in_rhs): Handle POINTER_PLUS_EXPR like
+ PLUS_EXPR except for the right hand side's type will be
+ sizetype.
+ (interpret_rhs_modify_stmt): Handle POINTER_PLUS_EXPR.
+ (fold_used_pointer_cast): Kill.
+ (pointer_offset_p): Kill.
+ (fold_used_pointer): Kill.
+ (pointer_used_p): Kill.
+ (analyze_scalar_evolution_1 <case GIMPLE_MODIFY_STMT>): Don't
+ call fold_used_pointer.
+ (instantiate_parameters_1): Convert the increment
+ using chrec_convert_rhs instead of chrec_convert.
+ Handle POINTER_PLUS_EXPR as PLUS_EXPR.
+ * tree-ssa-loop-niter.c (split_to_var_and_offset): Handle
+ POINTER_PLUS_EXPR as PLUS_EXPR.
+ (assert_loop_rolls_lt): For pointer types, use sizetype when
+ creating MINUS_EXPR/PLUS_EXPRs.
+ (number_of_iterations_le): Likewise.
+ (expand_simple_operations): POINTER_PLUS_EXPR are simple also.
+ (derive_constant_upper_bound): Handle POINTER_PLUS_EXPR just
+ like PLUS_EXPR and MINUS_EXPR.
+ * tree-data-ref.c (analyze_offset_expr): Likewise.
+ (address_analysis): Handle POINTER_PLUS_EXPR as PLUS_EXPR.
+ (analyze_offset): Handle POINTER_PLUS_EXPR also.
+ (create_data_ref): Convert the increment
+ using chrec_convert_rhs instead of chrec_convert.
+ * tree-vect-transform.c (vect_update_ivs_after_vectorizer):
+ For pointer types, create POINTER_PLUS_EXPR instead of
+ PLUS_EXPR and also create MULT_EXPR in sizetype.
+
+2007-05-07 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * builtins.c (std_gimplify_va_arg_expr): Use fold_build2 for
+ the creating of POINTER_PLUS_EXPR. For the BIT_AND_EXPR, cast
+ the operands to sizetype first and then cast the BIT_AND_EXPR
+ back to the pointer type.
+ * tree-ssa-address.c (create_mem_ref): Create A
+ POINTER_PLUS_EXPR for one case.
+ * tree.c (const_hash_1): Handle POINTER_PLUS_EXPR same as
+ PLUS_EXPR.
+ (compare_constant): Likewise.
+ (copy_constant): Likewise.
+ (compute_reloc_for_constant): Likewise.
+ (output_addressed_constants): Likewise.
+
+2007-05-07 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree.def (POINTER_PLUS_EXPR): The second operand
+ is of type sizetype and not ssizetype.
+ * doc/c-tree.texi (POINTER_PLUS_EXPR): Document.
+
+2007-05-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree-mudflap.c (mf_xform_derefs_1 <case BIT_FIELD_REF>):
+ Add a conversion of ofs to sizetype.
+ (mf_decl_eligible_p): Reformat for length issues.
+ (mf_xform_derefs_1): Likewise.
+
+2007-05-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree.c (build2_stat): Check to make sure the
+ second operand is compatiable with sizetype.
+
+2007-05-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree-cfg.c (verify_expr): Change error
+ message about sizetype to be correct.
+
+2007-05-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ Merge mainline, revision 124478.
+ * fold-const.c (fold_unary): Handle for (T1)(X op Y),
+ only p+ as that is the only as that can be handled for
+ binary operators now. Add a TODO for non pointer type
+ op's.
+ * gimplifier.c (gimplify_expr): Don't special case
+ PLUS_EXPR. Special case POINTER_PLUS_EXPR instead,
+ remove check for pointer type as it will always be
+ a pointer type now.
+
+2007-05-04 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree-vrp.c (extract_range_from_binary_expr): Handle
+ POINTER_PLUS_EXPRs. Assert POINTER_PLUS_EXPR is
+ the only expression for pointer types.
+ * tree-vect-transform.c (vect_gen_niters_for_prolog_loop):
+ Add a cast when creating byte_misalign.
+
+2007-05-04 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * fold-const.c (fold_binary <case POINTER_PLUS_EXPR>);
+ Add comment on why you get INT +p INT.
+ (fold_binary <case PLUS_EXPR>): Add folding of PTR+INT into
+ PTR p+ INT.
+ * dwarf2out.c (loc_descriptor_from_tree_1):
+ Handle POINT_PLUS_EXPR as a PLUS_EXPR.
+
+2007-05-04 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree-vrp.c (compare_values_warnv): Convert val2 to
+ the type of val1.
+ * fold-const.c (extract_array_ref): Look for
+ POINTER_PLUS_EXPR instead of PLUS_EXPR's.
+ * tree-ssa-ccp.c (maybe_fold_stmt_indirect): Likewise.
+
+2007-05-02 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * c-format.c (check_format_arg): Handle POINTER_PLUS_EXPR
+ instead of PLUS_EXPR of pointer types.
+
+2007-05-02 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * fold-const.c (try_move_mult_to_index): Remove code argument
+ and replace all uses with PLUS_EXPR.
+ (fold_binary <case POINTER_PLUS_EXR>): Remove code argument.
+ (fold_binary <case PLUS_EXPR>): Don't call try_move_mult_to_index.
+ (fold_binary <case MINUS_EXPR>): Likewise.
+ * tree-ssa-ccp.c (maybe_fold_stmt_indirect): Remove subtraction
+ case as it is always addition now.
+ (fold_stmt_r): Don't handle PLUS_EXPR/MINUS_EXPR specially.
+ Handle POINTER_PLUS_EXPR like PLUS_EXPR was handled before.
+
+2007-05-01 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ Merge mainline, revision 124343.
+ * tree-vect-transform.c (bump_vector_ptr): Create a POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for the pointer increment statement.
+ * expr.c (expand_expr_real_1): Add FIXME/assert for the unhandle case
+ where the modes of the two operands are different.
+
+2007-02-25 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ Merge mainline, revision 122323.
+
+2006-12-14 Zdenek Dvorak <dvorakz@suse.cz>
+
+ Merge mainline, revision 119860.
+
+2006-11-23 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * tree.def (POINTER_PLUS_EXPR): New tree code.
+ * tree-pretty-print.c (dump_generic_node): Handle
+ POINTER_PLUS_EXPR.
+ (op_prio): Likewise.
+ (op_symbol_1): Likewise.
+ * optabs.c (optab_for_tree_code): Likewise.
+ * tree-ssa-loop-manip.c (create_iv): Handle pointer base
+ specially.
+ * tree-tailcall.c (process_assignment): Mention
+ POINTER_PLUS_EXPR in a TODO comment.
+ * tree.c (build2_stat): Assert when trying to use PLUS_EXPR or
+ MINUS_EXPR with a pointer. Also assert for POINTER_PLUS_EXPR
+ not used with a pointer and an integer type.
+ * tree-scalar-evolution.c (fold_used_pointer): Mention
+ POINTER_PLUS_EXPR is what this needs to handle.
+ * builtins.c (get_pointer_alignment): Handle POINTER_PLUS_EXPR
+ instead of PLUS_EXPR.
+ (expand_builtin_strcat): Create a POINTER_PLUS_EXPR instead of
+ PLUS_EXPR for pointers.
+ (std_gimplify_va_arg_expr): Likewise.
+ (fold_builtin_memory_op): Likewise.
+ (fold_builtin_strstr): Likewise.
+ (fold_builtin_strchr): Likewise.
+ (fold_builtin_strrchr): Likewise.
+ (fold_builtin_strpbrk): Likewise.
+ (expand_builtin_memory_chk): Likewise.
+ (fold_builtin_memory_chk): Likewise.
+ * fold-const.c (build_range_check): Handle pointer types
+ specially.
+ (fold_to_nonsharp_ineq_using_bound): Likewise.
+ (fold_binary): Handle simple POINTER_PLUS_EXPR cases.
+ (tree_expr_nonnegative_p): Handle POINTER_PLUS_EXPR.
+ (tree_expr_nonzero_p): Likewise.
+ (fold_indirect_ref_1): Look at POINTER_PLUS_EXPR instead
+ of PLUS_EXPR for the complex expression folding.
+ * tree-ssa-loop-ivopts.c (determine_base_object): Handle
+ POINTER_PLUS_EXPR.
+ (tree_to_aff_combination): Likewise.
+ (force_expr_to_var_cost): Likewise.
+ (force_expr_to_var_cost): Likewise. Create a POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for pointers.
+ * c-format.c (check_format_arg): Mention this should be handling
+ POINTER_PLUS_EXPR.
+ * tree-stdarg.c (va_list_counter_bump): Handle POINTER_PLUS_EXPR
+ instead of PLUS_EXPR.
+ (check_va_list_escapes): Likewise.
+ (check_all_va_list_escapes): Likewise.
+ * expr.c (expand_expr_real_1): Handle POINTER_PLUS_EXPR.
+ (string_constant): Likewise.
+ * tree-ssa-address.c (add_to_parts): Create a POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for pointers.
+ (most_expensive_mult_to_index): Likewise.
+ (addr_to_parts): Use the correct type for the index.
+ * c-typeck.c (build_unary_op): For pointers create the increment
+ as a sizetype. Create a POINTER_PLUS_EXPR instead of PLUS_EXPR
+ for pointers.
+ * gimplify.c (gimplify_self_mod_expr): Create a
+ POINTER_PLUS_EXPR instead of PLUS_EXPR for pointers.
+ (gimplify_omp_atomic_fetch_op): Handle POINTER_PLUS_EXPR.
+ * tree-mudflap.c (mf_xform_derefs_1): Create a
+ POINTER_PLUS_EXPR instead of PLUS_EXPR for pointers.
+ * tree-ssa-forwprop.c (forward_propagate_addr_expr_1): Add a
+ note this should be handling POINTER_PLUS_EXPR.
+ * varasm.c (initializer_constant_valid_p): Handle
+ POINTER_PLUS_EXPR.
+ * tree-ssa-loop-prefetch.c (idx_analyze_ref): Handle
+ POINTER_PLUS_EXPR instead of PLUS_EXPR.
+ (issue_prefetch_ref): Create a POINTER_PLUS_EXPR instead
+ of PLUS_EXPR for pointers.
+ * tree-vect-transform.c (vect_create_addr_base_for_vector_ref):
+ Likewise.
+ * tree-inline.c (estimate_num_insns_1): Handle
+ POINTER_PLUS_EXPR.
+ * tree-object-size.c (plus_expr_object_size): Handle
+ POINTER_PLUS_EXPR instead of PLUS_EXPR. Removing all the extra
+ code which is trying to figure out which side is a pointer and
+ is the index.
+ (check_for_plus_in_loops_1): Likewise.
+ (check_for_plus_in_loops): Likewise.
+ * c-common.c (pointer_int_sum): Create a
+ POINTER_PLUS_EXPR instead of PLUS_EXPR for pointers.
+ * tree-ssa-structalias.c (handle_ptr_arith): Handle
+ only POINTER_PLUS_EXPR. Removing all the extra
+ code which is trying to figure out which side is a pointer and
+ is the index.
+ * tree-cfg.c (verify_expr): Add extra checking for pointers and
+ PLUS_EXPR and MINUS_EXPR.
+ Also add checking to make sure the operands of POINTER_PLUS_EXPR
+ are correct.
+
+
+2007-06-15 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * trans.c (Attribute_to_gnu): When subtracting an
+ offset from a pointer, use POINTER_PLUS_EXPR with
+ NEGATE_EXPR instead of MINUS_EXPR.
+ (gnat_to_gnu): Likewise.
+ * utils.c (convert): When converting between
+ thin pointers, use POINTER_PLUS_EXPR and sizetype
+ for the offset.
+ * utils2.c (known_alignment): POINTER_PLUS_EXPR
+ have the same semantics as PLUS_EXPR for alignment.
+ (build_binary_op): Add support for the semantics of
+ POINTER_PLUS_EXPR's operands.
+ When adding an offset to a pointer, use POINTER_PLUS_EXPR.
+
2007-06-11 Rafael Avila de Espindola <espindola@google.com>
* trans.c (Attribute_to_gnu): Use signed_or_unsigned_type_for instead
--- /dev/null
+2007-06-14 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * trans.c (Attribute_to_gnu): Use fold_build1 instead
+ of build1 for NEGATE_EXPR.
+ (gnat_to_gnu): Likewise.
+
+2007-05-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * trans.c (Attribute_to_gnu): When subtracting an
+ offset from a pointer, use POINTER_PLUS_EXPR with
+ NEGATE_EXPR instead of MINUS_EXPR.
+ (gnat_to_gnu): Likewise.
+ * utils.c (convert): When converting between
+ thin pointers, use POINTER_PLUS_EXPR and sizetype
+ for the offset.
+ * utils2.c (known_alignment): POINTER_PLUS_EXPR
+ have the same semantics as PLUS_EXPR for alignment.
+ (build_binary_op): Add support for the semantics of
+ POINTER_PLUS_EXPR's operands.
+ When adding an offset to a pointer, use POINTER_PLUS_EXPR.
+
tree gnu_char_ptr_type = build_pointer_type (char_type_node);
tree gnu_pos = byte_position (TYPE_FIELDS (gnu_obj_type));
tree gnu_byte_offset
- = convert (gnu_char_ptr_type,
+ = convert (sizetype,
size_diffop (size_zero_node, gnu_pos));
+ gnu_byte_offset = fold_build1 (NEGATE_EXPR, sizetype, gnu_byte_offset);
gnu_ptr = convert (gnu_char_ptr_type, gnu_ptr);
- gnu_ptr = build_binary_op (MINUS_EXPR, gnu_char_ptr_type,
+ gnu_ptr = build_binary_op (POINTER_PLUS_EXPR, gnu_char_ptr_type,
gnu_ptr, gnu_byte_offset);
}
tree gnu_char_ptr_type = build_pointer_type (char_type_node);
tree gnu_pos = byte_position (TYPE_FIELDS (gnu_obj_type));
tree gnu_byte_offset
- = convert (gnu_char_ptr_type,
+ = convert (sizetype,
size_diffop (size_zero_node, gnu_pos));
+ gnu_byte_offset = fold_build1 (NEGATE_EXPR, sizetype, gnu_byte_offset);
gnu_ptr = convert (gnu_char_ptr_type, gnu_ptr);
- gnu_ptr = build_binary_op (MINUS_EXPR, gnu_char_ptr_type,
+ gnu_ptr = build_binary_op (POINTER_PLUS_EXPR, gnu_char_ptr_type,
gnu_ptr, gnu_byte_offset);
}
if (integer_zerop (byte_diff))
return expr;
- return build_binary_op (PLUS_EXPR, type, expr,
- fold (convert_to_pointer (type, byte_diff)));
+ return build_binary_op (POINTER_PLUS_EXPR, type, expr,
+ fold (convert (sizetype, byte_diff)));
}
/* If converting to a thin pointer, handle specially. */
break;
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
/* If two address are added, the alignment of the result is the
minimum of the two alignments. */
modulus = NULL_TREE;
goto common;
+ case POINTER_PLUS_EXPR:
+ gcc_assert (operation_type == left_base_type
+ && sizetype == right_base_type);
+ left_operand = convert (operation_type, left_operand);
+ right_operand = convert (sizetype, right_operand);
+ break;
+
default:
common:
/* The result type should be the same as the base types of the
type, if any. */
inner = build_unary_op (ADDR_EXPR, NULL_TREE, inner);
inner = convert (ptr_void_type_node, inner);
- offset = convert (ptr_void_type_node, offset);
- result = build_binary_op (PLUS_EXPR, ptr_void_type_node,
+ result = build_binary_op (POINTER_PLUS_EXPR, ptr_void_type_node,
inner, offset);
result = convert (build_pointer_type (TREE_TYPE (operand)),
result);
align = MIN (inner, max_align);
break;
- case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
/* If sum of pointer + int, restrict our maximum alignment to that
imposed by the integer. If not, we can't do any better than
ALIGN. */
/* Create strlen (dst). */
newdst = build_call_expr (strlen_fn, 1, dst);
- /* Create (dst + (cast) strlen (dst)). */
- newdst = fold_convert (TREE_TYPE (dst), newdst);
- newdst = fold_build2 (PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
+ /* Create (dst p+ strlen (dst)). */
+ newdst = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dst), dst, newdst);
newdst = builtin_save_expr (newdst);
if (!expand_builtin_strcpy_args (fndecl, newdst, newsrc, target, mode))
std_expand_builtin_va_start (tree valist, rtx nextarg)
{
tree t;
+ t = make_tree (sizetype, nextarg);
+ t = fold_convert (ptr_type_node, t);
- t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist,
- make_tree (ptr_type_node, nextarg));
+ t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
if (boundary > align
&& !integer_zerop (TYPE_SIZE (type)))
{
- t = fold_convert (TREE_TYPE (valist), size_int (boundary - 1));
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
- build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t));
+ fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist),
+ valist_tmp, size_int (boundary - 1)));
gimplify_and_add (t, pre_p);
- t = fold_convert (TREE_TYPE (valist), size_int (-boundary));
+ t = fold_convert (sizetype, valist_tmp);
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist_tmp,
- build2 (BIT_AND_EXPR, TREE_TYPE (valist), valist_tmp, t));
+ fold_convert (TREE_TYPE (valist),
+ fold_build2 (BIT_AND_EXPR, sizetype, t,
+ size_int (-boundary))));
gimplify_and_add (t, pre_p);
}
else
t = fold_build2 (GT_EXPR, sizetype, rounded_size, size_int (align));
t = fold_build3 (COND_EXPR, sizetype, t, size_zero_node,
size_binop (MINUS_EXPR, rounded_size, type_size));
- t = fold_convert (TREE_TYPE (addr), t);
- addr = fold_build2 (PLUS_EXPR, TREE_TYPE (addr), addr, t);
+ addr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (addr), addr, t);
}
/* Compute new value for AP. */
- t = fold_convert (TREE_TYPE (valist), rounded_size);
- t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist_tmp, t);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist_tmp, rounded_size);
t = build2 (MODIFY_EXPR, TREE_TYPE (valist), valist, t);
gimplify_and_add (t, pre_p);
len = fold_build2 (MINUS_EXPR, TREE_TYPE (len), len,
ssize_int (1));
- len = fold_convert (TREE_TYPE (dest), len);
- dest = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ dest = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
dest = fold_convert (type, dest);
if (expr)
dest = omit_one_operand (type, dest, expr);
if (r == NULL)
return build_int_cst (TREE_TYPE (arg1), 0);
- tem = fold_build2 (PLUS_EXPR, TREE_TYPE (arg1), arg1,
- build_int_cst (TREE_TYPE (arg1), r - p1));
+ tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (arg1), arg1,
+ size_int (r - p1));
return fold_convert (type, tem);
}
return NULL_TREE;
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1));
+ tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ s1, size_int (r - p1));
return fold_convert (type, tem);
}
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1));
+ tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ s1, size_int (r - p1));
return fold_convert (type, tem);
}
return NULL_TREE;
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1));
+ tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ s1, size_int (r - p1));
return fold_convert (type, tem);
}
return build_int_cst (TREE_TYPE (s1), 0);
/* Return an offset into the constant string argument. */
- tem = fold_build2 (PLUS_EXPR, TREE_TYPE (s1),
- s1, build_int_cst (TREE_TYPE (s1), r - p1));
+ tem = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (s1),
+ s1, size_int (r - p1));
return fold_convert (type, tem);
}
return expand_expr (dest, target, mode, EXPAND_NORMAL);
}
- len = fold_convert (TREE_TYPE (dest), len);
- expr = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, len);
+ expr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
return expand_expr (expr, target, mode, EXPAND_NORMAL);
}
return omit_one_operand (TREE_TYPE (TREE_TYPE (fndecl)), dest, len);
else
{
- tree temp = fold_convert (TREE_TYPE (dest), len);
- temp = fold_build2 (PLUS_EXPR, TREE_TYPE (dest), dest, temp);
+ tree temp = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (dest), dest, len);
return fold_convert (TREE_TYPE (TREE_TYPE (fndecl)), temp);
}
}
Do this multiplication as signed, then convert to the appropriate
pointer type (actually unsigned integral). */
- intop = convert (result_type,
- build_binary_op (MULT_EXPR, intop,
- convert (TREE_TYPE (intop), size_exp), 1));
+ intop = build_binary_op (MULT_EXPR, intop,
+ convert (TREE_TYPE (intop), size_exp), 1);
+
+ if (resultcode == MINUS_EXPR)
+ intop = fold_build1 (NEGATE_EXPR, TREE_TYPE (intop), intop);
+
+ intop = convert (sizetype, intop);
/* Create the sum or difference. */
- ret = fold_build2 (resultcode, result_type, ptrop, intop);
+ ret = fold_build2 (POINTER_PLUS_EXPR, result_type, ptrop, intop);
fold_undefer_and_ignore_overflow_warnings ();
}
offset = 0;
- if (TREE_CODE (format_tree) == PLUS_EXPR)
+ if (TREE_CODE (format_tree) == POINTER_PLUS_EXPR)
{
tree arg0, arg1;
STRIP_NOPS (arg1);
if (TREE_CODE (arg1) == INTEGER_CST)
format_tree = arg0;
- else if (TREE_CODE (arg0) == INTEGER_CST)
- {
- format_tree = arg1;
- arg1 = arg0;
- }
else
{
res->number_non_literal++;
}
inc = c_size_in_bytes (TREE_TYPE (result_type));
+ inc = fold_convert (sizetype, inc);
}
else
- inc = integer_one_node;
-
- inc = convert (argtype, inc);
+ {
+ inc = integer_one_node;
+ inc = convert (argtype, inc);
+ }
/* Complain about anything else that is not a true lvalue. */
if (!lvalue_or_else (arg, ((code == PREINCREMENT_EXPR
if (val && TREE_CODE (val) == INDIRECT_REF
&& TREE_CONSTANT (TREE_OPERAND (val, 0)))
{
- tree op0 = fold_convert (argtype, fold_offsetof (arg, val)), op1;
+ tree op0 = fold_convert (sizetype, fold_offsetof (arg, val)), op1;
op1 = fold_convert (argtype, TREE_OPERAND (val, 0));
- return fold_build2 (PLUS_EXPR, argtype, op0, op1);
+ return fold_build2 (POINTER_PLUS_EXPR, argtype, op1, op0);
}
val = build1 (ADDR_EXPR, argtype, arg);
}
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (valist), valist,
- make_tree (ptr_type_node, nextarg));
+ fold_convert (TREE_TYPE (valist),
+ make_tree (sizetype, nextarg)));
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
type = TREE_TYPE (ovf);
t = make_tree (type, virtual_incoming_args_rtx);
if (words != 0)
- t = build2 (PLUS_EXPR, type, t,
- build_int_cst (type, words * UNITS_PER_WORD));
+ t = build2 (POINTER_PLUS_EXPR, type, t,
+ size_int (words * UNITS_PER_WORD));
t = build2 (GIMPLE_MODIFY_STMT, type, ovf, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
if (needed_intregs)
{
/* int_addr = gpr + sav; */
- t = fold_convert (ptr_type_node, fold_convert (size_type_node, gpr));
- t = build2 (PLUS_EXPR, ptr_type_node, sav, t);
+ t = fold_convert (sizetype, gpr);
+ t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, t);
t = build2 (GIMPLE_MODIFY_STMT, void_type_node, int_addr, t);
gimplify_and_add (t, pre_p);
}
if (needed_sseregs)
{
/* sse_addr = fpr + sav; */
- t = fold_convert (ptr_type_node, fold_convert (size_type_node, fpr));
- t = build2 (PLUS_EXPR, ptr_type_node, sav, t);
+ t = fold_convert (sizetype, fpr);
+ t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, t);
t = build2 (GIMPLE_MODIFY_STMT, void_type_node, sse_addr, t);
gimplify_and_add (t, pre_p);
}
src_offset = REGNO (reg) * 8;
}
src_addr = fold_convert (addr_type, src_addr);
- src_addr = fold_build2 (PLUS_EXPR, addr_type, src_addr,
+ src_addr = fold_build2 (POINTER_PLUS_EXPR, addr_type, src_addr,
build_int_cst (addr_type, src_offset));
src = build_va_arg_indirect_ref (src_addr);
dest_addr = fold_convert (addr_type, addr);
- dest_addr = fold_build2 (PLUS_EXPR, addr_type, dest_addr,
+ dest_addr = fold_build2 (POINTER_PLUS_EXPR, addr_type, dest_addr,
build_int_cst (addr_type, INTVAL (XEXP (slot, 1))));
dest = build_va_arg_indirect_ref (dest_addr);
if (FUNCTION_ARG_BOUNDARY (VOIDmode, type) <= 64
|| integer_zerop (TYPE_SIZE (type)))
t = ovf;
- else
+ else
{
HOST_WIDE_INT align = FUNCTION_ARG_BOUNDARY (VOIDmode, type) / 8;
- t = build2 (PLUS_EXPR, TREE_TYPE (ovf), ovf,
- build_int_cst (TREE_TYPE (ovf), align - 1));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), ovf,
+ size_int (align - 1));
+ t = fold_convert (sizetype, t);
t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
- build_int_cst (TREE_TYPE (t), -align));
+ size_int (-align));
+ t = fold_convert (TREE_TYPE (ovf), t);
}
gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
t2 = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
gimplify_and_add (t2, pre_p);
- t = build2 (PLUS_EXPR, TREE_TYPE (t), t,
- build_int_cst (TREE_TYPE (t), rsize * UNITS_PER_WORD));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t,
+ size_int (rsize * UNITS_PER_WORD));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
gimplify_and_add (t, pre_p);
if ((TREE_CODE (type) == REAL_TYPE || TREE_CODE (type) == INTEGER_TYPE)
? int_size_in_bytes (type) > 8 : TYPE_ALIGN (type) > 8 * BITS_PER_UNIT)
{
- tree t = build2 (PLUS_EXPR, TREE_TYPE (valist), valist,
- build_int_cst (NULL_TREE, 2 * UNITS_PER_WORD - 1));
+ tree t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (valist), valist,
+ size_int (2 * UNITS_PER_WORD - 1));
+ t = fold_convert (sizetype, t);
t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
- build_int_cst (NULL_TREE, -2 * UNITS_PER_WORD));
+ size_int (-2 * UNITS_PER_WORD));
+ t = fold_convert (TREE_TYPE (valist), t);
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (valist), valist, t);
gimplify_and_add (t, pre_p);
}
words used by named arguments. */
t = make_tree (TREE_TYPE (ovfl), virtual_incoming_args_rtx);
if (cum->stack_words > 0)
- t = build2 (PLUS_EXPR, TREE_TYPE (ovfl), t,
- build_int_cst (NULL_TREE,
- cum->stack_words * UNITS_PER_WORD));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl), t,
+ size_int (cum->stack_words * UNITS_PER_WORD));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovfl), ovfl, t);
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
fpr_offset = gpr_save_area_size + UNITS_PER_FPVALUE - 1;
fpr_offset &= ~(UNITS_PER_FPVALUE - 1);
if (fpr_offset)
- t = build2 (PLUS_EXPR, TREE_TYPE (ftop), t,
- build_int_cst (NULL_TREE, -fpr_offset));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ftop), t,
+ size_int (-fpr_offset));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ftop), ftop, t);
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
t = fold_convert (TREE_TYPE (off), build_int_cst (NULL_TREE, rsize));
t = build2 (POSTDECREMENT_EXPR, TREE_TYPE (off), off, t);
t = fold_convert (sizetype, t);
- t = fold_convert (TREE_TYPE (top), t);
+ t = fold_build1 (NEGATE_EXPR, sizetype, t);
/* [4] Emit code for: addr_rtx = top - off. On big endian machines,
the argument has RSIZE - SIZE bytes of leading padding. */
- t = build2 (MINUS_EXPR, TREE_TYPE (top), top, t);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (top), top, t);
if (BYTES_BIG_ENDIAN && rsize > size)
{
- u = fold_convert (TREE_TYPE (t), build_int_cst (NULL_TREE,
- rsize - size));
- t = build2 (PLUS_EXPR, TREE_TYPE (t), t, u);
+ u = size_int (rsize - size);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, u);
}
COND_EXPR_THEN (addr) = t;
if (osize > UNITS_PER_WORD)
{
/* [9] Emit: ovfl += ((intptr_t) ovfl + osize - 1) & -osize. */
- u = fold_convert (TREE_TYPE (ovfl),
- build_int_cst (NULL_TREE, osize - 1));
- t = build2 (PLUS_EXPR, TREE_TYPE (ovfl), ovfl, u);
- u = fold_convert (TREE_TYPE (ovfl),
- build_int_cst (NULL_TREE, -osize));
- t = build2 (BIT_AND_EXPR, TREE_TYPE (ovfl), t, u);
+ u = size_int (osize - 1);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovfl), ovfl, u);
+ t = fold_convert (sizetype, t);
+ u = size_int (-osize);
+ t = build2 (BIT_AND_EXPR, sizetype, t, u);
+ t = fold_convert (TREE_TYPE (ovfl), t);
align = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovfl), ovfl, t);
}
else
t = build2 (POSTINCREMENT_EXPR, TREE_TYPE (ovfl), ovfl, u);
if (BYTES_BIG_ENDIAN && osize > size)
{
- u = fold_convert (TREE_TYPE (t),
- build_int_cst (NULL_TREE, osize - size));
- t = build2 (PLUS_EXPR, TREE_TYPE (t), t, u);
+ u = size_int (osize - size);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, u);
}
/* String [9] and [10,11] together. */
case ADDR_EXPR:
return 1;
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
case MINUS_EXPR:
reloc = reloc_needed (TREE_OPERAND (exp, 0));
/* Args grow down. Not handled by generic routines. */
- u = fold_convert (valist_type, size_in_bytes (type));
- t = build2 (MINUS_EXPR, valist_type, valist, u);
+ u = fold_convert (sizetype, size_in_bytes (type));
+ u = fold_build1 (NEGATE_EXPR, sizetype, u);
+ t = build2 (POINTER_PLUS_EXPR, valist_type, valist, u);
/* Copied from va-pa.h, but we probably don't need to align to
word size, since we generate and preserve that invariant. */
- u = build_int_cst (valist_type, (size > 4 ? -8 : -4));
- t = build2 (BIT_AND_EXPR, valist_type, t, u);
+ u = size_int (size > 4 ? -8 : -4);
+ t = fold_convert (sizetype, t);
+ t = build2 (BIT_AND_EXPR, sizetype, t, u);
+ t = fold_convert (valist_type, t);
t = build2 (MODIFY_EXPR, valist_type, valist, t);
ofs = (8 - size) % 4;
if (ofs != 0)
{
- u = fold_convert (valist_type, size_int (ofs));
- t = build2 (PLUS_EXPR, valist_type, t, u);
+ u = size_int (ofs);
+ t = build2 (POINTER_PLUS_EXPR, valist_type, t, u);
}
t = fold_convert (ptr, t);
/* Find the overflow area. */
t = make_tree (TREE_TYPE (ovf), virtual_incoming_args_rtx);
if (words != 0)
- t = build2 (PLUS_EXPR, TREE_TYPE (ovf), t,
- build_int_cst (NULL_TREE, words * UNITS_PER_WORD));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t,
+ size_int (words * UNITS_PER_WORD));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
/* Find the register save area. */
t = make_tree (TREE_TYPE (sav), virtual_stack_vars_rtx);
if (cfun->machine->varargs_save_offset)
- t = build2 (PLUS_EXPR, TREE_TYPE (sav), t,
- build_int_cst (NULL_TREE, cfun->machine->varargs_save_offset));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
+ size_int (cfun->machine->varargs_save_offset));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
t = sav;
if (sav_ofs)
- t = build2 (PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
+ t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav, size_int (sav_ofs));
u = build2 (POSTINCREMENT_EXPR, TREE_TYPE (reg), reg, size_int (n_reg));
- u = build1 (CONVERT_EXPR, integer_type_node, u);
- u = build2 (MULT_EXPR, integer_type_node, u, size_int (sav_scale));
- t = build2 (PLUS_EXPR, ptr_type_node, t, u);
+ u = fold_convert (sizetype, u);
+ u = build2 (MULT_EXPR, sizetype, u, size_int (sav_scale));
+ t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, u);
t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
gimplify_and_add (t, pre_p);
t = ovf;
if (align != 1)
{
- t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (align - 1));
+ t = fold_convert (sizetype, t);
t = build2 (BIT_AND_EXPR, TREE_TYPE (t), t,
- build_int_cst (NULL_TREE, -align));
+ size_int (-align));
+ t = fold_convert (TREE_TYPE (ovf), t);
}
gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
gimplify_and_add (u, pre_p);
- t = build2 (PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (t), t, size_int (size));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
gimplify_and_add (t, pre_p);
fprintf (stderr, "va_start: n_gpr = %d, n_fpr = %d off %d\n",
(int)n_gpr, (int)n_fpr, off);
- t = build2 (PLUS_EXPR, TREE_TYPE (ovf), t, build_int_cst (NULL_TREE, off));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (ovf), t, size_int (off));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (ovf), ovf, t);
TREE_SIDE_EFFECTS (t) = 1;
|| (cfun->va_list_fpr_size && n_fpr < FP_ARG_NUM_REG))
{
t = make_tree (TREE_TYPE (sav), return_address_pointer_rtx);
- t = build2 (PLUS_EXPR, TREE_TYPE (sav), t,
- build_int_cst (NULL_TREE, -RETURN_REGNUM * UNITS_PER_WORD));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (sav), t,
+ size_int (-RETURN_REGNUM * UNITS_PER_WORD));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (sav), sav, t);
TREE_SIDE_EFFECTS (t) = 1;
t = build3 (COND_EXPR, void_type_node, t, u, NULL_TREE);
gimplify_and_add (t, pre_p);
- t = build2 (PLUS_EXPR, ptr_type_node, sav,
- fold_convert (ptr_type_node, size_int (sav_ofs)));
+ t = build2 (POINTER_PLUS_EXPR, ptr_type_node, sav,
+ size_int (sav_ofs));
u = build2 (MULT_EXPR, TREE_TYPE (reg), reg,
fold_convert (TREE_TYPE (reg), size_int (sav_scale)));
- t = build2 (PLUS_EXPR, ptr_type_node, t, fold_convert (ptr_type_node, u));
+ t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t, fold_convert (sizetype, u));
t = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
gimplify_and_add (t, pre_p);
t = ovf;
if (size < UNITS_PER_WORD)
- t = build2 (PLUS_EXPR, ptr_type_node, t,
- fold_convert (ptr_type_node, size_int (UNITS_PER_WORD - size)));
+ t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
+ size_int (UNITS_PER_WORD - size));
gimplify_expr (&t, pre_p, NULL, is_gimple_val, fb_rvalue);
u = build2 (GIMPLE_MODIFY_STMT, void_type_node, addr, t);
gimplify_and_add (u, pre_p);
- t = build2 (PLUS_EXPR, ptr_type_node, t,
- fold_convert (ptr_type_node, size_int (size)));
+ t = build2 (POINTER_PLUS_EXPR, ptr_type_node, t,
+ size_int (size));
t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, ovf, t);
gimplify_and_add (t, pre_p);
valist, f_next_stack, NULL_TREE);
/* Call __builtin_saveregs. */
- u = make_tree (ptr_type_node, expand_builtin_saveregs ());
+ u = make_tree (sizetype, expand_builtin_saveregs ());
+ u = fold_convert (ptr_type_node, u);
t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, next_fp, u);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
nfp = 8 - nfp;
else
nfp = 0;
- u = fold_build2 (PLUS_EXPR, ptr_type_node, u,
- build_int_cst (NULL_TREE, UNITS_PER_WORD * nfp));
+ u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
+ size_int (UNITS_PER_WORD * nfp));
t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, next_fp_limit, u);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
nint = 4 - nint;
else
nint = 0;
- u = fold_build2 (PLUS_EXPR, ptr_type_node, u,
- build_int_cst (NULL_TREE, UNITS_PER_WORD * nint));
+ u = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, u,
+ size_int (UNITS_PER_WORD * nint));
t = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, next_o_limit, u);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
gimplify_and_add (tmp, pre_p);
tmp = next_fp_limit;
if (size > 4 && !is_double)
- tmp = build2 (PLUS_EXPR, TREE_TYPE (tmp), tmp,
- fold_convert (TREE_TYPE (tmp), size_int (4 - size)));
+ tmp = build2 (POINTER_PLUS_EXPR, TREE_TYPE (tmp), tmp,
+ size_int (4 - size));
tmp = build2 (GE_EXPR, boolean_type_node, next_fp_tmp, tmp);
cmp = build3 (COND_EXPR, void_type_node, tmp,
build1 (GOTO_EXPR, void_type_node, lab_false),
if (TYPE_ALIGN (eff_type) > BITS_PER_WORD
|| (is_double || size == 16))
{
- tmp = fold_convert (ptr_type_node, size_int (UNITS_PER_WORD));
- tmp = build2 (BIT_AND_EXPR, ptr_type_node, next_fp_tmp, tmp);
- tmp = build2 (PLUS_EXPR, ptr_type_node, next_fp_tmp, tmp);
+ tmp = fold_convert (sizetype, next_fp_tmp);
+ tmp = build2 (BIT_AND_EXPR, sizetype, tmp,
+ size_int (UNITS_PER_WORD));
+ tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node,
+ next_fp_tmp, tmp);
tmp = build2 (GIMPLE_MODIFY_STMT, ptr_type_node,
next_fp_tmp, tmp);
gimplify_and_add (tmp, pre_p);
}
else
{
- tmp = fold_convert (ptr_type_node, size_int (rsize));
- tmp = build2 (PLUS_EXPR, ptr_type_node, next_o, tmp);
+ tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node, next_o,
+ size_int (rsize));
tmp = build2 (GT_EXPR, boolean_type_node, tmp, next_o_limit);
tmp = build3 (COND_EXPR, void_type_node, tmp,
build1 (GOTO_EXPR, void_type_node, lab_false),
incr = valist;
if (align)
{
- incr = fold_build2 (PLUS_EXPR, ptr_type_node, incr,
- ssize_int (align - 1));
- incr = fold_build2 (BIT_AND_EXPR, ptr_type_node, incr,
- ssize_int (-align));
+ incr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, incr,
+ size_int (align - 1));
+ incr = fold_convert (sizetype, incr);
+ incr = fold_build2 (BIT_AND_EXPR, sizetype, incr,
+ size_int (-align));
+ incr = fold_convert (ptr_type_node, incr);
}
gimplify_expr (&incr, pre_p, post_p, is_gimple_val, fb_rvalue);
addr = incr;
if (BYTES_BIG_ENDIAN && size < rsize)
- addr = fold_build2 (PLUS_EXPR, ptr_type_node, incr,
- ssize_int (rsize - size));
+ addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, incr,
+ size_int (rsize - size));
if (indirect)
{
else
addr = fold_convert (ptrtype, addr);
- incr = fold_build2 (PLUS_EXPR, ptr_type_node, incr, ssize_int (rsize));
+ incr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, incr, size_int (rsize));
incr = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, valist, incr);
gimplify_and_add (incr, post_p);
/* Find the __args area. */
t = make_tree (TREE_TYPE (args), nextarg);
if (current_function_pretend_args_size > 0)
- t = build2 (PLUS_EXPR, TREE_TYPE (args), t,
- build_int_cst (integer_type_node, -STACK_POINTER_OFFSET));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (args), t,
+ size_int (-STACK_POINTER_OFFSET));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (args), args, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
/* Find the __skip area. */
t = make_tree (TREE_TYPE (skip), virtual_incoming_args_rtx);
- t = build2 (PLUS_EXPR, TREE_TYPE (skip), t,
- build_int_cst (integer_type_node,
- (current_function_pretend_args_size
- - STACK_POINTER_OFFSET)));
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (skip), t,
+ size_int (current_function_pretend_args_size
+ - STACK_POINTER_OFFSET));
t = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (skip), skip, t);
TREE_SIDE_EFFECTS (t) = 1;
expand_expr (t, const0_rtx, VOIDmode, EXPAND_NORMAL);
/* build conditional expression to calculate addr. The expression
will be gimplified later. */
- paddedsize = fold_convert (ptr_type_node, size_int (rsize));
- tmp = build2 (PLUS_EXPR, ptr_type_node, args, paddedsize);
+ paddedsize = size_int (rsize);
+ tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node, args, paddedsize);
tmp = build2 (TRUTH_AND_EXPR, boolean_type_node,
build2 (GT_EXPR, boolean_type_node, tmp, skip),
build2 (LE_EXPR, boolean_type_node, args, skip));
tmp = build3 (COND_EXPR, ptr_type_node, tmp,
- build2 (PLUS_EXPR, ptr_type_node, skip,
- fold_convert (ptr_type_node, size_int (32))), args);
+ build2 (POINTER_PLUS_EXPR, ptr_type_node, skip,
+ size_int (32)), args);
tmp = build2 (GIMPLE_MODIFY_STMT, ptr_type_node, addr, tmp);
gimplify_and_add (tmp, pre_p);
/* update VALIST.__args */
- tmp = build2 (PLUS_EXPR, ptr_type_node, addr, paddedsize);
+ tmp = build2 (POINTER_PLUS_EXPR, ptr_type_node, addr, paddedsize);
tmp = build2 (GIMPLE_MODIFY_STMT, TREE_TYPE (args), args, tmp);
gimplify_and_add (tmp, pre_p);
+2007-06-15 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * typeck.c (build_binary_op): For templates build the
+ expression in pieces to avoid the assert in build2_stat.
+ (get_member_function_from_ptrfunc):
+ Change over to using POINTER_PLUS_EXPR and convert
+ the second operand to sizetype.
+ * typeck2.c (build_m_component_ref): Likewise.
+ * init.c (expand_virtual_init): Create a POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for pointers.
+ (build_new_1): Likewise.
+ (build_vec_delete_1): Likewise.
+ (build_vec_delete): Likewise.
+ * class.c (build_base_path): Likewise.
+ (build_base_path): Likewise.
+ (convert_to_base_statically): Likewise.
+ (fixed_type_or_null): Handle POINTER_PLUS_EXPR.
+ (get_vtbl_decl_for_binfo): Handle POINTER_PLUS_EXPR
+ instead of PLUS_EXPR.
+ (dfs_accumulate_vtbl_inits): Create a POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for pointers.
+ * call.c (build_special_member_call): Likewise.
+ * rtti.c (build_headof): Likewise.
+ Use sizetype instead of ptrdiff_type_node.
+ (tinfo_base_init): Create a POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for pointers.
+ * except.c (expand_start_catch_block): Do a
+ NEGATIVE and then a POINTER_PLUS_EXPR instead
+ of a MINUS_EXPR.
+ * cp-gimplify.c (cxx_omp_clause_apply_fn): Convert
+ PLUS_EXPR on pointer types over to use
+ POINTER_PLUS_EXPR and remove the conversion
+ to the pointer types.
+ * method.c (thunk_adjust): Use POINTER_PLUS_EXPR for
+ adding to a pointer type. Use size_int instead of
+ ssize_int. Convert the index to sizetype before
+ adding it to the pointer.
+
2007-06-15 Mark Mitchell <mark@codesourcery.com>
* cp-tree.h (DECL_VAR_MARKED_P): Remove.
(copy_fn_p): Don't consider constructors taking rvalue references
to be copy constructors.
(move_fn_p): New.
- * call.c (conversion): New "rvaluedness_matches_p" member.
+ * call.c (conversion): New "rvaluedness_matches_p" member.
(convert_class_to_reference): Require reference type as first
parameter instead of base type.
(reference_binding): Add logic to handle rvalue references.
2007-05-28 Andrew Pinski <Andrew_pinski@playstation.sony.com>
- PR c++/31339
+ PR c++/31339
* typeck.c (build_unary_op <case PREINCREMENT_EXPR,
case POSTINCREMENT_EXPR, case PREDECREMENT_EXPR,
case POSTDECREMENT_EXPR>): Return the error_mark_node
PR C++/30158
* semantics.c (finish_stmt_expr_expr): Set TREE_TYPE of the
statement expression if we had an error mark node.
-
+
2007-02-15 Sandra Loosemore <sandra@codesourcery.com>
Brooks Moses <brooks.moses@codesourcery.com>
Lee Millward <lee.millward@codesourcery.com>
--- /dev/null
+2007-06-14 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * init.c (build_new_1): Use fold_build1 instead
+ of build1 for NEGATE_EXPR.
+ (build_vec_delete_1): Likewise.
+ * class.c (build_base_path): Likewise.
+ * except.c (expand_start_catch_block): Likewise.
+
+2007-05-28 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * typeck.c (build_binary_op): Add a comment on why creating
+ the tree in pieces while processing templates.
+
+2007-05-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * except.c (expand_start_catch_block): Do a
+ NEGATIVE and then a POINTER_PLUS_EXPR instead
+ of a MINUS_EXPR.
+
+2007-05-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * cp-gimplify.c (cxx_omp_clause_apply_fn): Convert
+ PLUS_EXPR on pointer types over to use
+ POINTER_PLUS_EXPR and remove the conversion
+ to the pointer types.
+
+2007-05-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * typeck.c (build_unary_op): Remove code that used to
+ handle non lvalue increments/decrements as we now error
+ out all ways.
+
+2007-05-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * typeck.c (get_member_function_from_ptrfunc):
+ Change over to using POINTER_PLUS_EXPR and convert
+ the second operand to sizetype.
+ * typeck2.c (build_m_component_ref): Likewise.
+ * rtti.c (build_headof): Use sizetype instead of
+ ptrdiff_type_node.
+
+2007-05-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * method.c (thunk_adjust): Use POINTER_PLUS_EXPR for
+ adding to a pointer type. Use size_int instead of
+ ssize_int. Convert the index to sizetype before
+ adding it to the pointer.
+
+2006-11-23 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * typeck.c (build_binary_op): For templates build the
+ expression in pieces to avoid the assert in build2_stat.
+ * init.c (expand_virtual_init): Create a POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for pointers.
+ (build_new_1): Likewise.
+ (build_vec_delete_1): Likewise.
+ (build_vec_delete): Likewise.
+ * class.c (build_base_path): Likewise.
+ (build_base_path): Likewise.
+ (convert_to_base_statically): Likewise.
+ (fixed_type_or_null): Handle POINTER_PLUS_EXPR.
+ (get_vtbl_decl_for_binfo): Handle POINTER_PLUS_EXPR
+ instead of PLUS_EXPR.
+ (dfs_accumulate_vtbl_inits): Create a POINTER_PLUS_EXPR
+ instead of PLUS_EXPR for pointers.
+ * call.c (build_special_member_call): Likewise.
+ * rtti.c (build_headof): Likewise.
+ (tinfo_base_init): Likewise.
current_vtt_parm,
vtt);
gcc_assert (BINFO_SUBVTT_INDEX (binfo));
- sub_vtt = build2 (PLUS_EXPR, TREE_TYPE (vtt), vtt,
+ sub_vtt = build2 (POINTER_PLUS_EXPR, TREE_TYPE (vtt), vtt,
BINFO_SUBVTT_INDEX (binfo));
args = tree_cons (NULL_TREE, sub_vtt, args);
v_offset = build_vfield_ref (build_indirect_ref (expr, NULL),
TREE_TYPE (TREE_TYPE (expr)));
- v_offset = build2 (PLUS_EXPR, TREE_TYPE (v_offset),
- v_offset, BINFO_VPTR_FIELD (v_binfo));
+ v_offset = build2 (POINTER_PLUS_EXPR, TREE_TYPE (v_offset),
+ v_offset, fold_convert (sizetype, BINFO_VPTR_FIELD (v_binfo)));
v_offset = build1 (NOP_EXPR,
build_pointer_type (ptrdiff_type_node),
v_offset);
expr = build1 (NOP_EXPR, ptr_target_type, expr);
if (!integer_zerop (offset))
- expr = build2 (code, ptr_target_type, expr, offset);
+ {
+ offset = fold_convert (sizetype, offset);
+ if (code == MINUS_EXPR)
+ offset = fold_build1 (NEGATE_EXPR, sizetype, offset);
+ expr = build2 (POINTER_PLUS_EXPR, ptr_target_type, expr, offset);
+ }
else
null_test = NULL;
gcc_assert (!processing_template_decl);
expr = build_unary_op (ADDR_EXPR, expr, /*noconvert=*/1);
if (!integer_zerop (BINFO_OFFSET (base)))
- expr = fold_build2 (PLUS_EXPR, pointer_type, expr,
- fold_convert (pointer_type, BINFO_OFFSET (base)));
+ expr = fold_build2 (POINTER_PLUS_EXPR, pointer_type, expr,
+ fold_convert (sizetype, BINFO_OFFSET (base)));
expr = fold_convert (build_pointer_type (BINFO_TYPE (base)), expr);
expr = build_fold_indirect_ref (expr);
}
}
return RECUR (TREE_OPERAND (instance, 0));
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
case MINUS_EXPR:
if (TREE_CODE (TREE_OPERAND (instance, 0)) == ADDR_EXPR)
tree decl;
decl = BINFO_VTABLE (binfo);
- if (decl && TREE_CODE (decl) == PLUS_EXPR)
+ if (decl && TREE_CODE (decl) == POINTER_PLUS_EXPR)
{
gcc_assert (TREE_CODE (TREE_OPERAND (decl, 0)) == ADDR_EXPR);
decl = TREE_OPERAND (TREE_OPERAND (decl, 0), 0);
index = size_binop (MULT_EXPR,
TYPE_SIZE_UNIT (vtable_entry_type),
index);
- vtbl = build2 (PLUS_EXPR, TREE_TYPE (vtbl), vtbl, index);
+ vtbl = build2 (POINTER_PLUS_EXPR, TREE_TYPE (vtbl), vtbl, index);
}
if (ctor_vtbl_p)
start2 = build_fold_addr_expr (start2);
end1 = TYPE_SIZE_UNIT (TREE_TYPE (arg1));
- end1 = fold_convert (TREE_TYPE (start1), end1);
- end1 = build2 (PLUS_EXPR, TREE_TYPE (start1), start1, end1);
+ end1 = build2 (POINTER_PLUS_EXPR, TREE_TYPE (start1), start1, end1);
p1 = create_tmp_var (TREE_TYPE (start1), NULL);
t = build2 (GIMPLE_MODIFY_STMT, void_type_node, p1, start1);
t = build_call_a (fn, i, argarray);
append_to_statement_list (t, &ret);
- t = fold_convert (TREE_TYPE (p1), TYPE_SIZE_UNIT (inner_type));
- t = build2 (PLUS_EXPR, TREE_TYPE (p1), p1, t);
+ t = TYPE_SIZE_UNIT (inner_type);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p1), p1, t);
t = build2 (GIMPLE_MODIFY_STMT, void_type_node, p1, t);
append_to_statement_list (t, &ret);
if (arg2)
{
- t = fold_convert (TREE_TYPE (p2), TYPE_SIZE_UNIT (inner_type));
- t = build2 (PLUS_EXPR, TREE_TYPE (p2), p2, t);
+ t = TYPE_SIZE_UNIT (inner_type);
+ t = build2 (POINTER_PLUS_EXPR, TREE_TYPE (p2), p2, t);
t = build2 (GIMPLE_MODIFY_STMT, void_type_node, p2, t);
append_to_statement_list (t, &ret);
}
generic exception header. */
exp = build_exc_ptr ();
exp = build1 (NOP_EXPR, build_pointer_type (type), exp);
- exp = build2 (MINUS_EXPR, TREE_TYPE (exp), exp,
- TYPE_SIZE_UNIT (TREE_TYPE (exp)));
+ exp = build2 (POINTER_PLUS_EXPR, TREE_TYPE (exp), exp,
+ fold_build1 (NEGATE_EXPR, sizetype,
+ TYPE_SIZE_UNIT (TREE_TYPE (exp))));
exp = build_indirect_ref (exp, NULL);
initialize_handler_parm (decl, exp);
return type;
/* Compute the value to use, when there's a VTT. */
vtt_parm = current_vtt_parm;
- vtbl2 = build2 (PLUS_EXPR,
+ vtbl2 = build2 (POINTER_PLUS_EXPR,
TREE_TYPE (vtt_parm),
vtt_parm,
vtt_index);
tree cookie_ptr;
/* Adjust so we're pointing to the start of the object. */
- data_addr = get_target_expr (build2 (PLUS_EXPR, full_pointer_type,
+ data_addr = get_target_expr (build2 (POINTER_PLUS_EXPR, full_pointer_type,
alloc_node, cookie_size));
/* Store the number of bytes allocated so that we can know how
many elements to destroy later. We use the last sizeof
(size_t) bytes to store the number of elements. */
- cookie_ptr = build2 (MINUS_EXPR, build_pointer_type (sizetype),
- data_addr, size_in_bytes (sizetype));
+ cookie_ptr = fold_build1 (NEGATE_EXPR, sizetype, size_in_bytes (sizetype));
+ cookie_ptr = build2 (POINTER_PLUS_EXPR, build_pointer_type (sizetype),
+ data_addr, cookie_ptr);
cookie = build_indirect_ref (cookie_ptr, NULL);
cookie_expr = build2 (MODIFY_EXPR, sizetype, cookie, nelts);
executing any other code in the loop.
This is also the containing expression returned by this function. */
tree controller = NULL_TREE;
+ tree tmp;
/* We should only have 1-D arrays here. */
gcc_assert (TREE_CODE (type) != ARRAY_TYPE);
tbase = create_temporary_var (ptype);
tbase_init = build_modify_expr (tbase, NOP_EXPR,
- fold_build2 (PLUS_EXPR, ptype,
+ fold_build2 (POINTER_PLUS_EXPR, ptype,
base,
virtual_size));
DECL_REGISTER (tbase) = 1;
body = build1 (EXIT_EXPR, void_type_node,
build2 (EQ_EXPR, boolean_type_node, tbase,
fold_convert (ptype, base)));
+ tmp = fold_build1 (NEGATE_EXPR, sizetype, size_exp);
body = build_compound_expr
(body, build_modify_expr (tbase, NOP_EXPR,
- build2 (MINUS_EXPR, ptype, tbase, size_exp)));
+ build2 (POINTER_PLUS_EXPR, ptype, tbase, tmp)));
body = build_compound_expr
(body, build_delete (ptype, tbase, sfk_complete_destructor,
LOOKUP_NORMAL|LOOKUP_DESTRUCTOR, 1));
base = TARGET_EXPR_SLOT (base_init);
}
type = strip_array_types (TREE_TYPE (type));
- cookie_addr = build2 (MINUS_EXPR,
+ cookie_addr = fold_build1 (NEGATE_EXPR, sizetype, TYPE_SIZE_UNIT (sizetype));
+ cookie_addr = build2 (POINTER_PLUS_EXPR,
build_pointer_type (sizetype),
base,
- TYPE_SIZE_UNIT (sizetype));
+ cookie_addr);
maxindex = build_indirect_ref (cookie_addr, NULL);
}
else if (TREE_CODE (type) == ARRAY_TYPE)
{
if (this_adjusting)
/* Adjust the pointer by the constant. */
- ptr = fold_build2 (PLUS_EXPR, TREE_TYPE (ptr), ptr,
- ssize_int (fixed_offset));
+ ptr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
+ size_int (fixed_offset));
/* If there's a virtual offset, look up that value in the vtable and
adjust the pointer again. */
/* Form the vtable address. */
vtable = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (vtable)), vtable);
/* Find the entry with the vcall offset. */
- vtable = build2 (PLUS_EXPR, TREE_TYPE (vtable), vtable, virtual_offset);
+ vtable = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (vtable), vtable,
+ fold_convert (sizetype, virtual_offset));
/* Get the offset itself. */
vtable = build1 (INDIRECT_REF, TREE_TYPE (TREE_TYPE (vtable)), vtable);
/* Adjust the `this' pointer. */
- ptr = fold_build2 (PLUS_EXPR, TREE_TYPE (ptr), ptr, vtable);
+ ptr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
+ fold_convert (sizetype, vtable));
}
if (!this_adjusting)
/* Adjust the pointer by the constant. */
- ptr = fold_build2 (PLUS_EXPR, TREE_TYPE (ptr), ptr,
- ssize_int (fixed_offset));
+ ptr = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (ptr), ptr,
+ size_int (fixed_offset));
return ptr;
}
type = build_qualified_type (ptr_type_node,
cp_type_quals (TREE_TYPE (exp)));
- return build2 (PLUS_EXPR, type, exp,
- convert_to_integer (ptrdiff_type_node, offset));
+ return build2 (POINTER_PLUS_EXPR, type, exp,
+ convert_to_integer (sizetype, offset));
}
/* Get a bad_cast node for the program to throw...
/* We need to point into the middle of the vtable. */
vtable_ptr = build2
- (PLUS_EXPR, TREE_TYPE (vtable_ptr), vtable_ptr,
+ (POINTER_PLUS_EXPR, TREE_TYPE (vtable_ptr), vtable_ptr,
size_binop (MULT_EXPR,
size_int (2 * TARGET_VTABLE_DATA_ENTRY_DISTANCE),
TYPE_SIZE_UNIT (vtable_entry_type)));
return error_mark_node;
}
/* ...and then the delta in the PMF. */
- instance_ptr = build2 (PLUS_EXPR, TREE_TYPE (instance_ptr),
- instance_ptr, delta);
+ instance_ptr = build2 (POINTER_PLUS_EXPR, TREE_TYPE (instance_ptr),
+ instance_ptr, fold_convert (sizetype, delta));
/* Hand back the adjusted 'this' argument to our caller. */
*instance_ptrptr = instance_ptr;
vtbl = build_indirect_ref (vtbl, NULL);
/* Finally, extract the function pointer from the vtable. */
- e2 = fold_build2 (PLUS_EXPR, TREE_TYPE (vtbl), vtbl, idx);
+ e2 = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (vtbl), vtbl,
+ fold_convert (sizetype, idx));
e2 = build_indirect_ref (e2, NULL);
TREE_CONSTANT (e2) = 1;
TREE_INVARIANT (e2) = 1;
/* If we're in a template, the only thing we need to know is the
RESULT_TYPE. */
if (processing_template_decl)
- return build2 (resultcode,
- build_type ? build_type : result_type,
- op0, op1);
+ {
+ /* Since the middle-end checks the type when doing a build2, we
+ need to build the tree in pieces. This built tree will never
+ get out of the front-end as we replace it when instantiating
+ the template. */
+ tree tmp = build2 (resultcode,
+ build_type ? build_type : result_type,
+ NULL_TREE, op1);
+ TREE_OPERAND (tmp, 0) = op0;
+ return tmp;
+ }
if (arithmetic_types_p)
{
/* Build an expression for "object + offset" where offset is the
value stored in the pointer-to-data-member. */
- datum = build2 (PLUS_EXPR, build_pointer_type (type),
- datum, build_nop (ptrdiff_type_node, component));
+ datum = build2 (POINTER_PLUS_EXPR, build_pointer_type (type),
+ datum, build_nop (sizetype, component));
return build_indirect_ref (datum, 0);
}
else
@tindex TRUTH_AND_EXPR
@tindex TRUTH_OR_EXPR
@tindex TRUTH_XOR_EXPR
+@tindex POINTER_PLUS_EXPR
@tindex PLUS_EXPR
@tindex MINUS_EXPR
@tindex MULT_EXPR
not matter. The type of the operands and that of the result are
always of @code{BOOLEAN_TYPE} or @code{INTEGER_TYPE}.
+@itemx POINTER_PLUS_EXPR
+This node represents pointer arithmetic. The first operand is always
+a pointer/reference type. The second operand is always an unsigned
+integer type compatible with sizetype. This is the only binary
+arithmetic operand that can operate on pointer types.
+
@itemx PLUS_EXPR
@itemx MINUS_EXPR
@itemx MULT_EXPR
op = (TYPE_UNSIGNED (TREE_TYPE (loc)) ? DW_OP_shr : DW_OP_shra);
goto do_binop;
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
if (TREE_CODE (TREE_OPERAND (loc, 1)) == INTEGER_CST
&& host_integerp (TREE_OPERAND (loc, 1), 0))
return op0;
+ case POINTER_PLUS_EXPR:
+ /* Even though the sizetype mode and the pointer's mode can be different
+ expand is able to handle this correctly and get the correct result out
+ of the PLUS_EXPR code. */
case PLUS_EXPR:
+
/* Check if this is a case for multiplication and addition. */
if (TREE_CODE (type) == INTEGER_TYPE
&& TREE_CODE (TREE_OPERAND (exp, 0)) == MULT_EXPR)
else
return 0;
}
- else if (TREE_CODE (arg) == PLUS_EXPR)
+ else if (TREE_CODE (arg) == PLUS_EXPR || TREE_CODE (arg) == POINTER_PLUS_EXPR)
{
tree arg0 = TREE_OPERAND (arg, 0);
tree arg1 = TREE_OPERAND (arg, 1);
value = const_binop (MINUS_EXPR, high, low, 0);
+
+ if (POINTER_TYPE_P (etype))
+ {
+ if (value != 0 && !TREE_OVERFLOW (value))
+ {
+ low = fold_convert (sizetype, low);
+ low = fold_build1 (NEGATE_EXPR, sizetype, low);
+ return build_range_check (type,
+ fold_build2 (POINTER_PLUS_EXPR, etype, exp, low),
+ 1, build_int_cst (etype, 0), value);
+ }
+ return 0;
+ }
+
if (value != 0 && !TREE_OVERFLOW (value))
return build_range_check (type,
fold_build2 (MINUS_EXPR, etype, exp, low),
offset is set to NULL_TREE. Base will be canonicalized to
something you can get the element type from using
TREE_TYPE (TREE_TYPE (base)). Offset will be the offset
- in bytes to the base. */
+ in bytes to the base in sizetype. */
static bool
extract_array_ref (tree expr, tree *base, tree *offset)
/* One canonical form is a PLUS_EXPR with the first
argument being an ADDR_EXPR with a possible NOP_EXPR
attached. */
- if (TREE_CODE (expr) == PLUS_EXPR)
+ if (TREE_CODE (expr) == POINTER_PLUS_EXPR)
{
tree op0 = TREE_OPERAND (expr, 0);
tree inner_base, dummy1;
/* Strip NOP_EXPRs here because the C frontends and/or
- folders present us (int *)&x.a + 4B possibly. */
+ folders present us (int *)&x.a p+ 4 possibly. */
STRIP_NOPS (op0);
if (extract_array_ref (op0, &inner_base, &dummy1))
{
*base = inner_base;
- if (dummy1 == NULL_TREE)
- *offset = TREE_OPERAND (expr, 1);
- else
- *offset = fold_build2 (PLUS_EXPR, TREE_TYPE (expr),
- dummy1, TREE_OPERAND (expr, 1));
+ *offset = fold_convert (sizetype, TREE_OPERAND (expr, 1));
+ if (dummy1 != NULL_TREE)
+ *offset = fold_build2 (PLUS_EXPR, sizetype,
+ dummy1, *offset);
return true;
}
}
*base = TREE_OPERAND (op0, 0);
*offset = fold_build2 (MULT_EXPR, TREE_TYPE (idx), idx,
array_ref_element_size (op0));
+ *offset = fold_convert (sizetype, *offset);
}
else
{
return fold_build2 (code, type, arg0_inner, arg1);
}
-/* Tries to replace &a[idx] CODE s * delta with &a[idx CODE delta], if s is
+/* Tries to replace &a[idx] p+ s * delta with &a[idx + delta], if s is
step of the array. Reconstructs s and delta in the case of s * delta
being an integer constant (and thus already folded).
ADDR is the address. MULT is the multiplicative expression.
NULL_TREE is returned. */
static tree
-try_move_mult_to_index (enum tree_code code, tree addr, tree op1)
+try_move_mult_to_index (tree addr, tree op1)
{
tree s, delta, step;
tree ref = TREE_OPERAND (addr, 0), pref;
tree itype;
bool mdim = false;
+ /* Strip the nops that might be added when converting op1 to sizetype. */
+ STRIP_NOPS (op1);
+
/* Canonicalize op1 into a possibly non-constant delta
and an INTEGER_CST s. */
if (TREE_CODE (op1) == MULT_EXPR)
|| TREE_CODE (TYPE_MAX_VALUE (itype)) != INTEGER_CST)
continue;
- tmp = fold_binary (code, itype,
+ tmp = fold_binary (PLUS_EXPR, itype,
fold_convert (itype,
TREE_OPERAND (ref, 1)),
fold_convert (itype, delta));
pos = TREE_OPERAND (pos, 0);
}
- TREE_OPERAND (pos, 1) = fold_build2 (code, itype,
+ TREE_OPERAND (pos, 1) = fold_build2 (PLUS_EXPR, itype,
fold_convert (itype,
TREE_OPERAND (pos, 1)),
fold_convert (itype, delta));
if (TREE_TYPE (a1) != typea)
return NULL_TREE;
- diff = fold_build2 (MINUS_EXPR, typea, a1, a);
- if (!integer_onep (diff))
- return NULL_TREE;
+ if (POINTER_TYPE_P (typea))
+ {
+ /* Convert the pointer types into integer before taking the difference. */
+ tree ta = fold_convert (ssizetype, a);
+ tree ta1 = fold_convert (ssizetype, a1);
+ diff = fold_binary (MINUS_EXPR, ssizetype, ta1, ta);
+ }
+ else
+ diff = fold_binary (MINUS_EXPR, typea, a1, a);
+
+ if (!diff || !integer_onep (diff))
+ return NULL_TREE;
return fold_build2 (GE_EXPR, type, a, y);
}
}
}
- /* Convert (T1)(X op Y) into ((T1)X op (T1)Y), for pointer type,
+ /* Convert (T1)(X p+ Y) into ((T1)X p+ Y), for pointer type,
when one of the new casts will fold away. Conservatively we assume
- that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
- if (POINTER_TYPE_P (type) && POINTER_TYPE_P (TREE_TYPE (arg0))
- && BINARY_CLASS_P (arg0)
+ that this happens when X or Y is NOP_EXPR or Y is INTEGER_CST. */
+ if (POINTER_TYPE_P (type)
+ && TREE_CODE (arg0) == POINTER_PLUS_EXPR
&& (TREE_CODE (TREE_OPERAND (arg0, 1)) == INTEGER_CST
|| TREE_CODE (TREE_OPERAND (arg0, 0)) == NOP_EXPR
|| TREE_CODE (TREE_OPERAND (arg0, 1)) == NOP_EXPR))
tree arg01 = TREE_OPERAND (arg0, 1);
return fold_build2 (TREE_CODE (arg0), type, fold_convert (type, arg00),
- fold_convert (type, arg01));
+ fold_convert (sizetype, arg01));
}
/* Convert (T1)(~(T2)X) into ~(T1)X if T1 and T2 are integral types
switch (code)
{
+ case POINTER_PLUS_EXPR:
+ /* 0 +p index -> (type)index */
+ if (integer_zerop (arg0))
+ return non_lvalue (fold_convert (type, arg1));
+
+ /* PTR +p 0 -> PTR */
+ if (integer_zerop (arg1))
+ return non_lvalue (fold_convert (type, arg0));
+
+ /* INT +p INT -> (PTR)(INT + INT). Stripping types allows for this. */
+ if (INTEGRAL_TYPE_P (TREE_TYPE (arg1))
+ && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
+ return fold_convert (type, fold_build2 (PLUS_EXPR, sizetype,
+ fold_convert (sizetype, arg1),
+ fold_convert (sizetype, arg0)));
+
+ /* index +p PTR -> PTR +p index */
+ if (POINTER_TYPE_P (TREE_TYPE (arg1))
+ && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
+ return fold_build2 (POINTER_PLUS_EXPR, type,
+ fold_convert (type, arg1), fold_convert (sizetype, arg0));
+
+ /* (PTR +p B) +p A -> PTR +p (B + A) */
+ if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
+ {
+ tree inner;
+ tree arg01 = fold_convert (sizetype, TREE_OPERAND (arg0, 1));
+ tree arg00 = TREE_OPERAND (arg0, 0);
+ inner = fold_build2 (PLUS_EXPR, sizetype, arg01, fold_convert (sizetype, arg1));
+ return fold_build2 (POINTER_PLUS_EXPR, type, arg00, inner);
+ }
+
+ /* PTR_CST +p CST -> CST1 */
+ if (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST)
+ return fold_build2 (PLUS_EXPR, type, arg0, fold_convert (type, arg1));
+
+ /* Try replacing &a[i1] +p c * i2 with &a[i1 + i2], if c is step
+ of the array. Loop optimizer sometimes produce this type of
+ expressions. */
+ if (TREE_CODE (arg0) == ADDR_EXPR)
+ {
+ tem = try_move_mult_to_index (arg0, fold_convert (sizetype, arg1));
+ if (tem)
+ return fold_convert (type, tem);
+ }
+
+ return NULL_TREE;
case PLUS_EXPR:
+ /* PTR + INT -> (INT)(PTR p+ INT) */
+ if (POINTER_TYPE_P (TREE_TYPE (arg0))
+ && INTEGRAL_TYPE_P (TREE_TYPE (arg1)))
+ return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
+ TREE_TYPE (arg0),
+ arg0,
+ fold_convert (sizetype, arg1)));
+ /* INT + PTR -> (INT)(PTR p+ INT) */
+ if (POINTER_TYPE_P (TREE_TYPE (arg1))
+ && INTEGRAL_TYPE_P (TREE_TYPE (arg0)))
+ return fold_convert (type, fold_build2 (POINTER_PLUS_EXPR,
+ TREE_TYPE (arg1),
+ arg1,
+ fold_convert (sizetype, arg0)));
/* A + (-B) -> A - B */
if (TREE_CODE (arg1) == NEGATE_EXPR)
return fold_build2 (MINUS_EXPR, type,
fold_convert (type,
parg1)));
}
-
- /* Try replacing &a[i1] + c * i2 with &a[i1 + i2], if c is step
- of the array. Loop optimizer sometimes produce this type of
- expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR)
- {
- tem = try_move_mult_to_index (PLUS_EXPR, arg0, arg1);
- if (tem)
- return fold_convert (type, tem);
- }
- else if (TREE_CODE (arg1) == ADDR_EXPR)
- {
- tem = try_move_mult_to_index (PLUS_EXPR, arg1, arg0);
- if (tem)
- return fold_convert (type, tem);
- }
}
else
{
return NULL_TREE;
case MINUS_EXPR:
+ /* Pointer simplifications for subtraction, simple reassociations. */
+ if (POINTER_TYPE_P (TREE_TYPE (arg1)) && POINTER_TYPE_P (TREE_TYPE (arg0)))
+ {
+ /* (PTR0 p+ A) - (PTR1 p+ B) -> (PTR0 - PTR1) + (A - B) */
+ if (TREE_CODE (arg0) == POINTER_PLUS_EXPR
+ && TREE_CODE (arg1) == POINTER_PLUS_EXPR)
+ {
+ tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
+ tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
+ tree arg10 = fold_convert (type, TREE_OPERAND (arg1, 0));
+ tree arg11 = fold_convert (type, TREE_OPERAND (arg1, 1));
+ return fold_build2 (PLUS_EXPR, type,
+ fold_build2 (MINUS_EXPR, type, arg00, arg10),
+ fold_build2 (MINUS_EXPR, type, arg01, arg11));
+ }
+ /* (PTR0 p+ A) - PTR1 -> (PTR0 - PTR1) + A, assuming PTR0 - PTR1 simplifies. */
+ else if (TREE_CODE (arg0) == POINTER_PLUS_EXPR)
+ {
+ tree arg00 = fold_convert (type, TREE_OPERAND (arg0, 0));
+ tree arg01 = fold_convert (type, TREE_OPERAND (arg0, 1));
+ tree tmp = fold_binary (MINUS_EXPR, type, arg00, fold_convert (type, arg1));
+ if (tmp)
+ return fold_build2 (PLUS_EXPR, type, tmp, arg01);
+ }
+ }
/* A - (-B) -> A + B */
if (TREE_CODE (arg1) == NEGATE_EXPR)
return fold_build2 (PLUS_EXPR, type, arg0, TREE_OPERAND (arg1, 0));
}
}
- /* Try replacing &a[i1] - c * i2 with &a[i1 - i2], if c is step
- of the array. Loop optimizer sometimes produce this type of
- expressions. */
- if (TREE_CODE (arg0) == ADDR_EXPR)
- {
- tem = try_move_mult_to_index (MINUS_EXPR, arg0, arg1);
- if (tem)
- return fold_convert (type, tem);
- }
-
if (flag_unsafe_math_optimizations
&& (TREE_CODE (arg0) == RDIV_EXPR || TREE_CODE (arg0) == MULT_EXPR)
&& (TREE_CODE (arg1) == RDIV_EXPR || TREE_CODE (arg1) == MULT_EXPR)
case REAL_CST:
return ! REAL_VALUE_NEGATIVE (TREE_REAL_CST (t));
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
if (FLOAT_TYPE_P (TREE_TYPE (t)))
return (tree_expr_nonnegative_warnv_p (TREE_OPERAND (t, 0),
case INTEGER_CST:
return !integer_zerop (t);
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
if (TYPE_OVERFLOW_UNDEFINED (type))
{
}
/* ((foo*)&complexfoo)[1] => __imag__ complexfoo */
- if (TREE_CODE (sub) == PLUS_EXPR
+ if (TREE_CODE (sub) == POINTER_PLUS_EXPR
&& TREE_CODE (TREE_OPERAND (sub, 1)) == INTEGER_CST)
{
tree op00 = TREE_OPERAND (sub, 0);
+2007-06-15 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * trans-intrinsic.c (gfc_conv_intrinsic_repeat): Use
+ POINTER_PLUS_EXPR instead of PLUS_EXPR for pointer addition.
+ * trans-expr.c (gfc_trans_string_copy): Create
+ POINTER_PLUS_EXPR instead of a PLUS_EXPR
+ for pointer types.
+
2007-06-14 Paul Thomas <pault@gcc.gnu.org>
PR fortran/32302
--- /dev/null
+2007-05-15 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * trans-intrinsic.c (gfc_conv_intrinsic_repeat): Use
+ POINTER_PLUS_EXPR instead of PLUS_EXPR for pointer addition.
+
+2007-05-07 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * trans-expr.c (gfc_trans_string_copy): Create
+ POINTER_PLUS_EXPR instead of a PLUS_EXPR
+ for pointer types.
+
tmp3 = build_call_expr (built_in_decls[BUILT_IN_MEMMOVE],
3, dest, src, slen);
- tmp4 = fold_build2 (PLUS_EXPR, pchar_type_node, dest,
- fold_convert (pchar_type_node, slen));
+ tmp4 = fold_build2 (POINTER_PLUS_EXPR, pchar_type_node, dest,
+ fold_convert (sizetype, slen));
tmp4 = build_call_expr (built_in_decls[BUILT_IN_MEMSET], 3,
tmp4,
build_int_cst (gfc_get_int_type (gfc_c_int_kind),
/* Call memmove (dest + (i*slen), src, slen). */
tmp = fold_build2 (MULT_EXPR, gfc_charlen_type_node, slen,
fold_convert (gfc_charlen_type_node, count));
- tmp = fold_build2 (PLUS_EXPR, pchar_type_node, dest,
- fold_convert (pchar_type_node, tmp));
+ tmp = fold_build2 (POINTER_PLUS_EXPR, pchar_type_node, dest,
+ fold_convert (sizetype, tmp));
tmp = build_call_expr (built_in_decls[BUILT_IN_MEMMOVE], 3,
tmp, src, slen);
gfc_add_expr_to_block (&body, tmp);
return ret;
}
+ /* For POINTERs increment, use POINTER_PLUS_EXPR. */
+ if (POINTER_TYPE_P (TREE_TYPE (lhs)))
+ {
+ rhs = fold_convert (sizetype, rhs);
+ if (arith_code == MINUS_EXPR)
+ rhs = fold_build1 (NEGATE_EXPR, TREE_TYPE (rhs), rhs);
+ arith_code = POINTER_PLUS_EXPR;
+ }
+
t1 = build2 (arith_code, TREE_TYPE (*expr_p), lhs, rhs);
t1 = build_gimple_modify_stmt (lvalue, t1);
/* Check for one of the supported fetch-op operations. */
switch (TREE_CODE (rhs))
{
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
base = BUILT_IN_FETCH_AND_ADD_N;
optab = sync_add_optab;
ret = GS_ALL_DONE;
break;
- case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
/* Convert ((type *)A)+offset into &A->field_of_type_and_offset.
The second is gimple immediate saving a need for extra statement.
*/
- if (POINTER_TYPE_P (TREE_TYPE (*expr_p))
- && TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
+ if (TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
&& (tmp = maybe_fold_offset_to_reference
(TREE_OPERAND (*expr_p, 0), TREE_OPERAND (*expr_p, 1),
TREE_TYPE (TREE_TYPE (*expr_p)))))
break;
}
/* Convert (void *)&a + 4 into (void *)&a[1]. */
- if (POINTER_TYPE_P (TREE_TYPE (*expr_p))
- && TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
+ if (TREE_CODE (TREE_OPERAND (*expr_p, 0)) == NOP_EXPR
&& TREE_CODE (TREE_OPERAND (*expr_p, 1)) == INTEGER_CST
&& POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (TREE_OPERAND (*expr_p,
0),0)))
+2007-06-15 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * class.c (make_class_data): Build the index in sizetype.
+ Use POINTER_PLUS_EXPR instead of PLUS_EXPR when
+ adding to a pointer type.
+ (build_symbol_entry): Likewise.
+ * expr.c (build_java_arrayaccess): Likewise.
+ (build_field_ref): Likewise.
+ (build_known_method_ref): Likewise.
+ (build_invokevirtual): Likewise.
+ * except.c (build_exception_object_ref): Do a
+ NEGATIVE and then a POINTER_PLUS_EXPR instead
+ of a MINUS_EXPR.
+
2007-06-11 Rafael Avila de Espindola <espindola@google.com>
* typeck.c (java_signed_type): Remove.
--- /dev/null
+2007-06-14 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * except.c (build_exception_object_ref):
+ Use fold_build1 instead of build1 for NEGATE_EXPR.
+
+2007-05-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * class.c (make_class_data): Build the index in sizetype.
+ Use POINTER_PLUS_EXPR instead of PLUS_EXPR when
+ adding to a pointer type.
+ (build_symbol_entry): Likewise.
+ * expr.c (build_java_arrayaccess): Likewise.
+ (build_field_ref): Likewise.
+ (build_known_method_ref): Likewise.
+ (build_invokevirtual): Likewise.
+ * except.c (build_exception_object_ref): Do a
+ NEGATIVE and then a POINTER_PLUS_EXPR instead
+ of a MINUS_EXPR.
+
tree id_class = get_identifier("java.lang.Class");
/** Offset from start of virtual function table declaration
to where objects actually point at, following new g++ ABI. */
- tree dtable_start_offset = build_int_cst (NULL_TREE,
- 2 * POINTER_SIZE / BITS_PER_UNIT);
+ tree dtable_start_offset = size_int (2 * POINTER_SIZE / BITS_PER_UNIT);
VEC(int, heap) *field_indexes;
tree first_real_field;
PUSH_FIELD_VALUE (temp, "vtable",
(flag_indirect_classes
? null_pointer_node
- : build2 (PLUS_EXPR, dtable_ptr_type,
+ : build2 (POINTER_PLUS_EXPR, dtable_ptr_type,
build1 (ADDR_EXPR, dtable_ptr_type,
class_dtable_decl),
dtable_start_offset)));
else
PUSH_FIELD_VALUE (cons, "vtable",
dtable_decl == NULL_TREE ? null_pointer_node
- : build2 (PLUS_EXPR, dtable_ptr_type,
+ : build2 (POINTER_PLUS_EXPR, dtable_ptr_type,
build1 (ADDR_EXPR, dtable_ptr_type,
dtable_decl),
dtable_start_offset));
system that this is a "special" symbol, i.e. one that should
bypass access controls. */
if (special != NULL_TREE)
- signature = build2 (PLUS_EXPR, TREE_TYPE (signature), signature, special);
+ signature = build2 (POINTER_PLUS_EXPR, TREE_TYPE (signature), signature,
+ fold_convert (sizetype, special));
START_RECORD_CONSTRUCTOR (sym, symbol_type);
PUSH_FIELD_VALUE (sym, "clname", clname);
/* Java only passes object via pointer and doesn't require adjusting.
The java object is immediately before the generic exception header. */
obj = build0 (EXC_PTR_EXPR, build_pointer_type (type));
- obj = build2 (MINUS_EXPR, TREE_TYPE (obj), obj,
- TYPE_SIZE_UNIT (TREE_TYPE (obj)));
+ obj = build2 (POINTER_PLUS_EXPR, TREE_TYPE (obj), obj,
+ fold_build1 (NEGATE_EXPR, sizetype,
+ TYPE_SIZE_UNIT (TREE_TYPE (obj))));
obj = build1 (INDIRECT_REF, type, obj);
return obj;
/* Multiply the index by the size of an element to obtain a byte
offset. Convert the result to a pointer to the element type. */
- index = fold_convert (TREE_TYPE (node),
- build2 (MULT_EXPR, sizetype,
- fold_convert (sizetype, index),
- size_exp));
+ index = build2 (MULT_EXPR, sizetype,
+ fold_convert (sizetype, index),
+ size_exp);
/* Sum the byte offset and the address of the data field. */
- node = fold_build2 (PLUS_EXPR, TREE_TYPE (node), node, index);
+ node = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (node), node, index);
/* Finally, return
field_offset = fold (convert (sizetype, field_offset));
self_value = java_check_reference (self_value, check);
address
- = fold_build2 (PLUS_EXPR,
+ = fold_build2 (POINTER_PLUS_EXPR,
build_pointer_type (TREE_TYPE (field_decl)),
self_value, field_offset);
return fold_build1 (INDIRECT_REF, TREE_TYPE (field_decl), address);
method_index++;
}
method_index *= int_size_in_bytes (method_type_node);
- ref = fold_build2 (PLUS_EXPR, method_ptr_type_node,
- ref, build_int_cst (NULL_TREE, method_index));
+ ref = fold_build2 (POINTER_PLUS_EXPR, method_ptr_type_node,
+ ref, size_int (method_index));
ref = build1 (INDIRECT_REF, method_type_node, ref);
func = build3 (COMPONENT_REF, nativecode_ptr_type_node,
ref, lookup_field (&method_type_node, ncode_ident),
size_int (TARGET_VTABLE_USES_DESCRIPTORS));
}
- func = fold_build2 (PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable,
- convert (nativecode_ptr_ptr_type_node, method_index));
+ func = fold_build2 (POINTER_PLUS_EXPR, nativecode_ptr_ptr_type_node, dtable,
+ convert (sizetype, method_index));
if (TARGET_VTABLE_USES_DESCRIPTORS)
func = build1 (NOP_EXPR, nativecode_ptr_type_node, func);
initial address and index of each dimension. */
struct access_site_info
{
- /* The statement (INDIRECT_REF or PLUS_EXPR). */
+ /* The statement (INDIRECT_REF or POINTER_PLUS_EXPR). */
tree stmt;
- /* In case of PLUS_EXPR, what is the offset. */
+ /* In case of POINTER_PLUS_EXPR, what is the offset. */
tree offset;
/* The index which created the offset. */
/* Find if the SSA variable is accessed inside the
tree and record the tree containing it.
The only relevant uses are the case of SSA_NAME, or SSA inside
- INDIRECT_REF, CALL_EXPR, PLUS_EXPR, MULT_EXPR. */
+ INDIRECT_REF, CALL_EXPR, PLUS_EXPR, POINTER_PLUS_EXPR, MULT_EXPR. */
static void
ssa_accessed_in_tree (tree t, struct ssa_acc_in_tree *a)
{
}
}
break;
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
case MULT_EXPR:
op1 = TREE_OPERAND (t, 0);
for (i = 0; VEC_iterate (access_site_info_p, mi->access_l, i, acc_info);
i++)
{
- if (TREE_CODE (GIMPLE_STMT_OPERAND (acc_info->stmt, 1)) == PLUS_EXPR
+ if (TREE_CODE (GIMPLE_STMT_OPERAND (acc_info->stmt, 1)) == POINTER_PLUS_EXPR
&& acc_info->level < min_escape_l)
{
loop = loop_containing_stmt (acc_info->stmt);
return current_indirect_level;
}
if (rhs_acc.t_code != INDIRECT_REF
- && rhs_acc.t_code != PLUS_EXPR && rhs_acc.t_code != SSA_NAME)
+ && rhs_acc.t_code != POINTER_PLUS_EXPR && rhs_acc.t_code != SSA_NAME)
{
mark_min_matrix_escape_level (mi, current_indirect_level, use_stmt);
return current_indirect_level;
current_indirect_level, true);
current_indirect_level += 1;
}
- else if (rhs_acc.t_code == PLUS_EXPR)
+ else if (rhs_acc.t_code == POINTER_PLUS_EXPR)
{
- /* ??? maybe we should check
- the type of the PLUS_EXP and make sure it's
- integral type. */
gcc_assert (rhs_acc.second_op);
if (last_op)
/* Currently we support only one PLUS expression on the
/* We are placing it in an SSA, follow that SSA. */
analyze_matrix_accesses (mi, lhs,
current_indirect_level,
- rhs_acc.t_code == PLUS_EXPR,
+ rhs_acc.t_code == POINTER_PLUS_EXPR,
visited, record_accesses);
}
}
/* Now go over the uses of the SSA_NAME and check how it is used in
each one of them. We are mainly looking for the pattern INDIRECT_REF,
- then a PLUS_EXPR, then INDIRECT_REF etc. while in between there could
+ then a POINTER_PLUS_EXPR, then INDIRECT_REF etc. while in between there could
be any number of copies and casts. */
gcc_assert (TREE_CODE (ssa_var) == SSA_NAME);
case PARM_DECL:
case INTEGER_CST:
return expr;
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
GIMPLE_STMT_OPERAND (orig, 0));
GIMPLE_STMT_OPERAND (acc_info->stmt, 1) = orig;
}
- else if (TREE_CODE (orig) == PLUS_EXPR
+ else if (TREE_CODE (orig) == POINTER_PLUS_EXPR
&& acc_info->level < (min_escape_l))
{
imm_use_iterator imm_iter;
tree new_offset;
tree d_type_size, d_type_size_k;
- d_type_size =
- build_int_cst (type,
- mi->dimension_type_size[min_escape_l]);
- d_type_size_k =
- build_int_cst (type, mi->dimension_type_size[k + 1]);
+ d_type_size = size_int (mi->dimension_type_size[min_escape_l]);
+ d_type_size_k = size_int (mi->dimension_type_size[k + 1]);
new_offset =
compute_offset (mi->dimension_type_size[min_escape_l],
{
d_size = mi->dimension_size[mi->dim_map[k] + 1];
num_elements =
- fold_build2 (MULT_EXPR, type, acc_info->index, d_size);
+ fold_build2 (MULT_EXPR, sizetype, fold_convert (sizetype, acc_info->index),
+ fold_convert (sizetype, d_size));
tmp1 = force_gimple_operand (num_elements, &stmts, true, NULL);
add_referenced_var (d_size);
if (stmts)
FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, offset)
FOR_EACH_IMM_USE_ON_STMT (use_p, imm_iter)
- if (use_stmt == acc_info->stmt)
- SET_USE (use_p, tmp1);
+ if (use_stmt == acc_info->stmt)
+ SET_USE (use_p, tmp1);
}
else
{
trapv = INTEGRAL_TYPE_P (type) && TYPE_OVERFLOW_TRAPS (type);
switch (code)
{
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
return trapv ? addv_optab : add_optab;
+2007-06-15 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ PR tree-opt/32225
+ * gcc.c-torture/compile/20070605-1.c: New test.
+
+ * gcc.c-torture/compile/20070603-1.c: New testcase.
+ * gcc.c-torture/compile/20070603-2.c: New testcase.
+
+ * gcc.c-torture/compile/20070531-1.c: New test.
+
+ PR tree-opt/32167
+ * gcc.c-torture/compile/20070531-2.c: New test.
+
+ PR tree-opt/32144
+ * gcc.c-torture/compile/20070529-1.c: New test.
+
+ PR tree-opt/32145
+ * gcc.c-torture/compile/20070529-2.c: New test.
+
+ PR tree-opt/32015
+ * gcc.c-torture/compile/20070520-1.c: New test.
+
+ * g++.dg/ext/java-1.C: New test.
+
+ * gcc.dg/vect/vect-106.c: We are now able to vectorize two
+ loops instead of one. Remove the "can't determine dependence"
+ check.
+ * gcc.dg/tree-ssa/20030815-1.c: Remove testcase which is no longer
+ needed as the cast is gone in the first place.
+ * gcc.dg/max-1.c: Change local variable a to be a global one.
+ * gcc.dg/tree-ssa/ssa-pre-8.c: Update testcase since we don't
+ have a cast which is PREd.
+
2007-06-15 Mark Mitchell <mark@codesourcery.com>
* g++.dg/lookup/anon6.C: New test.
--- /dev/null
+2007-06-14 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * gcc.dg/max-1.c: Fix spelling/grammer mistakes.
+ * gcc.dg/vect/vect-106.c: Likewise.
+
+2007-06-06 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ PR tree-opt/32225
+ * gcc.c-torture/compile/20070605-1.c: New test.
+
+2007-06-03 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * gcc.c-torture/compile/20070603-1.c: New testcase.
+ * gcc.c-torture/compile/20070603-2.c: New testcase.
+
+2007-05-31 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * gcc.c-torture/compile/20070531-1.c: New test.
+
+ PR tree-opt/32167
+ * gcc.c-torture/compile/20070531-2.c: New test.
+
+2007-05-28 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ PR tree-opt/32144
+ * gcc.c-torture/compile/20070529-1.c: New test.
+
+ PR tree-opt/32145
+ * gcc.c-torture/compile/20070529-2.c: New test.
+
+2007-05-21 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ PR tree-opt/32015
+ * gcc.c-torture/compile/20070520-1.c: New test.
+
+2007-05-12 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * g++.dg/ext/java-1.C: New test.
+
+2007-05-09 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * gcc.dg/vect/vect-106.c: We are now able to vectorize two
+ loops instead of one. Remove the "can't determine dependence"
+ check.
+
+2007-05-04 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * gcc.dg/tree-ssa/20030815-1.c: Remove testcase which is no longer
+ needed as the cast is gone in the first place.
+
+2007-05-04 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * gcc.dg/max-1.c: Change local variable a to be a global one.
+
+2006-11-23 Andrew Pinski <andrew_pinski@playstation.sony.com>
+
+ * gcc.dg/tree-ssa/ssa-pre-8.c: Update testcase since we don't
+ have a cast which is PREd.
--- /dev/null
+// { dg-do compile }
+// { dg-options "" }
+// Test extern "java" and some throwing of the objects.
+
+extern "Java"
+ namespace java
+ {
+ namespace lang
+ {
+ class Throwable;
+ class Class;
+ }
+}
+typedef class java::lang::Throwable* jthrowable;
+typedef class java::lang::Class* jclass;
+class java::lang::Throwable {
+public:
+ static jclass class$;
+};
+int
+_Jv_FindClassFromSignature ( )
+ try
+ {
+ }
+ catch (java::lang::Throwable *ncdfe) {}
+
--- /dev/null
+typedef unsigned char uint8_t;
+extern uint8_t ff_cropTbl[256 + 2 * 1024];
+
+void ff_pred8x8_plane_c(uint8_t *src, int stride){
+ int j, k;
+ int a;
+ uint8_t *cm = ff_cropTbl + 1024;
+ const uint8_t * const src0 = src+3-stride;
+ const uint8_t *src1 = src+4*stride-1;
+ const uint8_t *src2 = src1-2*stride;
+ int H = src0[1] - src0[-1];
+ int V = src1[0] - src2[ 0];
+ for(k=2; k<=4; ++k) {
+ src1 += stride; src2 -= stride;
+ H += k*(src0[k] - src0[-k]);
+ V += k*(src1[0] - src2[ 0]);
+ }
+ H = ( 17*H+16 ) >> 5;
+ V = ( 17*V+16 ) >> 5;
+
+ a = 16*(src1[0] + src2[8]+1) - 3*(V+H);
+ for(j=8; j>0; --j) {
+ int b = a;
+ a += V;
+ src[0] = cm[ (b ) >> 5 ];
+ src[1] = cm[ (b+ H) >> 5 ];
+ src[2] = cm[ (b+2*H) >> 5 ];
+ src[3] = cm[ (b+3*H) >> 5 ];
+ src[4] = cm[ (b+4*H) >> 5 ];
+ src[5] = cm[ (b+5*H) >> 5 ];
+ src[6] = cm[ (b+6*H) >> 5 ];
+ src[7] = cm[ (b+7*H) >> 5 ];
+ src += stride;
+ }
+}
--- /dev/null
+/* ICE in chrec_fold_plus_poly_poly. */
+
+typedef unsigned short __u16;
+typedef unsigned int u32;
+typedef __u16 __be16;
+struct hfs_extent {
+ __be16 count;
+};
+int hfs_free_fork( int type)
+{
+ u32 total_blocks, blocks, start;
+ struct hfs_extent *extent;
+ int res, i;
+ for (i = 0; i < 3; extent++, i++)
+ blocks += __fswab16((( __u16)(__be16)(extent[i].count)));
+}
--- /dev/null
+void xfs_dir2_grow_inode(void)
+{
+ int map;
+ int *mapp;
+ int nmap;
+ mapp = ↦
+ if (nmap == 0 )
+ mapp = ((void *)0);
+ if (mapp != &map)
+ kmem_free(mapp);
+}
--- /dev/null
+/* MIN_EXPR/MAX_EXPR caused an ICE in VRP. */
+int *f(int *a, int *b)
+{
+ *a = 1;
+ *b = 2;
+ int *c = a < b ? a : b;
+ if (c)
+ return c;
+ else
+ return a;
+}
--- /dev/null
+int f(void)
+{
+ int *a = 0;
+ for(a = 0; a < (int*)32767;a++)
+ ;
+}
--- /dev/null
+
+int f(_Complex double *a, unsigned int n)
+{
+ unsigned int i;
+ for(i = 0; i< n; i++)
+ {
+ a[i] = __real__ a[i+1] + __real__ a[i];
+ }
+}
--- /dev/null
+typedef _Complex double ar[];
+int f(ar *a, unsigned int n)
+{
+ unsigned int i;
+ for(i = 0; i< n; i++)
+ {
+ (*a)[i*4] = __real__ (*a)[(i+1)*4] + __real__ (*a)[i*4];
+ }
+}
--- /dev/null
+quantize_fs_dither (unsigned width, short *errorptr, int dir)
+{
+ short bpreverr;
+ unsigned col;
+ for (col = width; col > 0; col--)
+ errorptr += dir;
+ errorptr[0] = (short) bpreverr;
+}
+
+
fff[i] = a;
}
+/* The variable a cannot be a local variable as we get better aliasing
+ now and decide that the store to a is dead. The better aliasing comes
+ from better representation of pointer arithmetic. */
+long a = 10;
int main(void)
{
int i;
- long a = 10;
f((long)(&a)-1,0);
for(i = 0;i<10;i++)
if (fff[i]!=10)
+++ /dev/null
-/* { dg-do compile } */
-/* { dg-options "-O1 -fdump-tree-dom3" } */
-
-extern void abort (void);
-typedef unsigned int size_t;
-struct rtx_def;
-typedef struct rtx_def *rtx;
-typedef union varray_data_tag
-{
- struct reg_info_def *reg[1];
-} varray_data;
-struct varray_head_tag
-{
- size_t num_elements;
- varray_data data;
-};
-typedef struct varray_head_tag *varray_type;
-typedef struct reg_info_def
-{
-} reg_info;
-extern varray_type reg_n_info;
-static rtx *reg_base_value;
-static rtx *new_reg_base_value;
-
-rtx
-blah (unsigned int regno)
-{
- if (new_reg_base_value[regno] && ((*(
- {
- if (regno >=
- reg_n_info->
- num_elements)
- abort ();
- ®_n_info->data.reg[regno];}
- ))))
- return reg_base_value[regno];
-}
-
-/* If we have more than 1 cast to a struct rtx_def * *, then we failed to
- eliminate some useless typecasting. The first type cast is needed
- to convert the unsigned int regno parameter into a struct rtx_def **. */
-/* { dg-final { scan-tree-dump-times "\\(struct rtx_def \\* \\*\\)" 1 "dom3"} } */
-/* { dg-final { cleanup-tree-dump "dom3" } } */
}
return 0;
}
-/* We should eliminate two address calculations, one cast, and one load. */
-/* { dg-final { scan-tree-dump-times "Eliminated: 4" 1 "fre"} } */
+/* We should eliminate two address calculations, and one load. */
+/* We used to eliminate a cast but that was before POINTER_PLUS_EXPR
+ was added. */
+/* { dg-final { scan-tree-dump-times "Eliminated: 3" 1 "fre"} } */
/* { dg-final { cleanup-tree-dump "fre" } } */
p1 = p; q1 = q;
- /* Not vectorizable: because of the redundant cast (caused by ponter
- arithmetics), alias analysis fails to distinguish between
- the pointers. */
+ /* Vectorizable, before pointer plus we would get a redundant cast
+ (caused by pointer arithmetics), alias analysis fails to distinguish
+ between the pointers. */
for (i = 0; i < N; i++)
{
*(q + i) = a[i];
return main1 ();
}
-/* { dg-final { scan-tree-dump-times "vectorized 1 loops" 1 "vect" } } */
-/* { dg-final { scan-tree-dump-times "can't determine dependence" 1 "vect" } } */
+/* { dg-final { scan-tree-dump-times "vectorized 2 loops" 1 "vect" } } */
/* { dg-final { cleanup-tree-dump "vect" } } */
aff_combination_const (comb, type, tree_to_double_int (expr));
return;
+ case POINTER_PLUS_EXPR:
+ tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
+ tree_to_aff_combination (TREE_OPERAND (expr, 1), sizetype, &tmp);
+ aff_combination_convert (&tmp, type);
+ aff_combination_add (comb, &tmp);
+ return;
+
case PLUS_EXPR:
case MINUS_EXPR:
tree_to_aff_combination (TREE_OPERAND (expr, 0), type, comb);
}
*walk_subtrees = 0;
break;
+ case PLUS_EXPR:
+ case MINUS_EXPR:
+ /* PLUS_EXPR and MINUS_EXPR don't work on pointers, they should be done using
+ POINTER_PLUS_EXPR. */
+ if (POINTER_TYPE_P (TREE_TYPE (t)))
+ {
+ error ("invalid operand to plus/minus, type is a pointer");
+ return t;
+ }
+ CHECK_OP (0, "invalid operand to binary operator");
+ CHECK_OP (1, "invalid operand to binary operator");
+ break;
+ case POINTER_PLUS_EXPR:
+ /* Check to make sure the first operand is a pointer or reference type. */
+ if (!POINTER_TYPE_P (TREE_TYPE (TREE_OPERAND (t, 0))))
+ {
+ error ("invalid operand to pointer plus, first operand is not a pointer");
+ return t;
+ }
+ /* Check to make sure the second operand is an integer with type of
+ sizetype. */
+ if (!tree_ssa_useless_type_conversion_1 (sizetype,
+ TREE_TYPE (TREE_OPERAND (t, 1))))
+ {
+ error ("invalid operand to pointer plus, second operand is not an "
+ "integer with type of sizetype.");
+ return t;
+ }
+ /* FALLTHROUGH */
case LT_EXPR:
case LE_EXPR:
case GT_EXPR:
case UNGE_EXPR:
case UNEQ_EXPR:
case LTGT_EXPR:
- case PLUS_EXPR:
- case MINUS_EXPR:
case MULT_EXPR:
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:
tree left, right;
struct loop *loop0 = get_chrec_loop (poly0);
struct loop *loop1 = get_chrec_loop (poly1);
+ tree rtype = code == POINTER_PLUS_EXPR ? sizetype : type;
gcc_assert (poly0);
gcc_assert (poly1);
gcc_assert (TREE_CODE (poly0) == POLYNOMIAL_CHREC);
gcc_assert (TREE_CODE (poly1) == POLYNOMIAL_CHREC);
- gcc_assert (chrec_type (poly0) == chrec_type (poly1));
+ if (POINTER_TYPE_P (chrec_type (poly0)))
+ gcc_assert (chrec_type (poly1) == sizetype);
+ else
+ gcc_assert (chrec_type (poly0) == chrec_type (poly1));
gcc_assert (type == chrec_type (poly0));
/*
{a, +, b}_x + {c, +, d}_x -> {a+c, +, b+d}_x. */
if (flow_loop_nested_p (loop0, loop1))
{
- if (code == PLUS_EXPR)
+ if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
return build_polynomial_chrec
(CHREC_VARIABLE (poly1),
chrec_fold_plus (type, poly0, CHREC_LEFT (poly1)),
if (flow_loop_nested_p (loop1, loop0))
{
- if (code == PLUS_EXPR)
+ if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
return build_polynomial_chrec
(CHREC_VARIABLE (poly0),
chrec_fold_plus (type, CHREC_LEFT (poly0), poly1),
do not belong to the same loop nest. */
gcc_assert (loop0 == loop1);
- if (code == PLUS_EXPR)
+ if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
{
left = chrec_fold_plus
(type, CHREC_LEFT (poly0), CHREC_LEFT (poly1));
right = chrec_fold_plus
- (type, CHREC_RIGHT (poly0), CHREC_RIGHT (poly1));
+ (rtype, CHREC_RIGHT (poly0), CHREC_RIGHT (poly1));
}
else
{
chrec_fold_plus_1 (enum tree_code code, tree type,
tree op0, tree op1)
{
+ tree op1_type = code == POINTER_PLUS_EXPR ? sizetype : type;
+
if (automatically_generated_chrec_p (op0)
|| automatically_generated_chrec_p (op1))
return chrec_fold_automatically_generated_operands (op0, op1);
return chrec_fold_plus_poly_poly (code, type, op0, op1);
default:
- if (code == PLUS_EXPR)
+ if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
return build_polynomial_chrec
(CHREC_VARIABLE (op0),
chrec_fold_plus (type, CHREC_LEFT (op0), op1),
switch (TREE_CODE (op1))
{
case POLYNOMIAL_CHREC:
- if (code == PLUS_EXPR)
+ if (code == PLUS_EXPR || code == POINTER_PLUS_EXPR)
return build_polynomial_chrec
(CHREC_VARIABLE (op1),
chrec_fold_plus (type, op0, CHREC_LEFT (op1)),
else if (size < PARAM_VALUE (PARAM_SCEV_MAX_EXPR_SIZE))
return fold_build2 (code, type,
fold_convert (type, op0),
- fold_convert (type, op1));
+ fold_convert (op1_type, op1));
else
return chrec_dont_know;
}
tree op0,
tree op1)
{
+ enum tree_code code;
if (automatically_generated_chrec_p (op0)
|| automatically_generated_chrec_p (op1))
return chrec_fold_automatically_generated_operands (op0, op1);
if (integer_zerop (op0))
- return op1;
+ return chrec_convert (type, op1, NULL_TREE);
if (integer_zerop (op1))
- return op0;
+ return chrec_convert (type, op0, NULL_TREE);
+
+ if (POINTER_TYPE_P (type))
+ code = POINTER_PLUS_EXPR;
+ else
+ code = PLUS_EXPR;
- return chrec_fold_plus_1 (PLUS_EXPR, type, op0, op1);
+ return chrec_fold_plus_1 (code, type, op0, op1);
}
/* Fold the subtraction of two chrecs. */
if (evolution_function_is_affine_p (chrec))
{
/* "{a, +, b} (x)" -> "a + b*x". */
- x = chrec_convert (type, x, NULL_TREE);
- res = chrec_fold_multiply (type, CHREC_RIGHT (chrec), x);
+ x = chrec_convert_rhs (type, x, NULL_TREE);
+ res = chrec_fold_multiply (TREE_TYPE (x), CHREC_RIGHT (chrec), x);
if (!integer_zerop (CHREC_LEFT (chrec)))
res = chrec_fold_plus (type, CHREC_LEFT (chrec), res);
}
{
struct loop *loop = get_loop (loop_num);
- gcc_assert (chrec_type (chrec) == chrec_type (new_evol));
+ if (POINTER_TYPE_P (chrec_type (chrec)))
+ gcc_assert (sizetype == chrec_type (new_evol));
+ else
+ gcc_assert (chrec_type (chrec) == chrec_type (new_evol));
if (TREE_CODE (chrec) == POLYNOMIAL_CHREC
&& flow_loop_nested_p (loop, get_chrec_loop (chrec)))
bool enforce_overflow_semantics;
bool must_check_src_overflow, must_check_rslt_overflow;
tree new_base, new_step;
+ tree step_type = POINTER_TYPE_P (type) ? sizetype : type;
/* If we cannot perform arithmetic in TYPE, avoid creating an scev. */
if (avoid_arithmetics_in_type_p (type))
[100, +, 255] with values 100, 355, ...; the sign-extension is
performed by default when CT is signed. */
new_step = *step;
- if (TYPE_PRECISION (type) > TYPE_PRECISION (ct) && TYPE_UNSIGNED (ct))
+ if (TYPE_PRECISION (step_type) > TYPE_PRECISION (ct) && TYPE_UNSIGNED (ct))
new_step = chrec_convert_1 (signed_type_for (ct), new_step, at_stmt,
use_overflow_semantics);
- new_step = chrec_convert_1 (type, new_step, at_stmt, use_overflow_semantics);
+ new_step = chrec_convert_1 (step_type, new_step, at_stmt, use_overflow_semantics);
if (automatically_generated_chrec_p (new_base)
|| automatically_generated_chrec_p (new_step))
}
\f
+/* Convert CHREC for the right hand side of a CREC.
+ The increment for a pointer type is always sizetype. */
+tree
+chrec_convert_rhs (tree type, tree chrec, tree at_stmt)
+{
+ if (POINTER_TYPE_P (type))
+ type = sizetype;
+ return chrec_convert (type, chrec, at_stmt);
+}
+
/* Convert CHREC to TYPE. When the analyzer knows the context in
which the CHREC is built, it sets AT_STMT to the statement that
contains the definition of the analyzed variable, otherwise the
tree
chrec_convert_aggressive (tree type, tree chrec)
{
- tree inner_type, left, right, lc, rc;
+ tree inner_type, left, right, lc, rc, rtype;
if (automatically_generated_chrec_p (chrec)
|| TREE_CODE (chrec) != POLYNOMIAL_CHREC)
if (avoid_arithmetics_in_type_p (type))
return NULL_TREE;
+ rtype = POINTER_TYPE_P (type) ? sizetype : type;
+
left = CHREC_LEFT (chrec);
right = CHREC_RIGHT (chrec);
lc = chrec_convert_aggressive (type, left);
if (!lc)
lc = chrec_convert (type, left, NULL_TREE);
- rc = chrec_convert_aggressive (type, right);
+ rc = chrec_convert_aggressive (rtype, right);
if (!rc)
- rc = chrec_convert (type, right, NULL_TREE);
+ rc = chrec_convert (rtype, right, NULL_TREE);
return build_polynomial_chrec (CHREC_VARIABLE (chrec), lc, rc);
}
extern tree chrec_fold_minus (tree, tree, tree);
extern tree chrec_fold_multiply (tree, tree, tree);
extern tree chrec_convert (tree, tree, tree);
+extern tree chrec_convert_rhs (tree, tree, tree);
extern tree chrec_convert_aggressive (tree, tree);
/* Operations. */
|| right == chrec_dont_know)
return chrec_dont_know;
- gcc_assert (TREE_TYPE (left) == TREE_TYPE (right));
+ if (POINTER_TYPE_P (TREE_TYPE (left)))
+ gcc_assert (sizetype == TREE_TYPE (right));
+ else
+ gcc_assert (TREE_TYPE (left) == TREE_TYPE (right));
if (chrec_zerop (right))
return left;
tree type = TREE_TYPE (exp), otype;
tree var0, var1;
tree off0, off1;
+ enum tree_code code;
*var = exp;
STRIP_NOPS (exp);
otype = TREE_TYPE (exp);
+ code = TREE_CODE (exp);
- switch (TREE_CODE (exp))
+ switch (code)
{
case INTEGER_CST:
*var = build_int_cst (type, 0);
*off = fold_convert (ssizetype, exp);
return;
+ case POINTER_PLUS_EXPR:
+ code = PLUS_EXPR;
+ /* FALLTHROUGH */
case PLUS_EXPR:
case MINUS_EXPR:
split_constant_offset (TREE_OPERAND (exp, 0), &var0, &off0);
split_constant_offset (TREE_OPERAND (exp, 1), &var1, &off1);
*var = fold_convert (type, fold_build2 (TREE_CODE (exp), otype,
var0, var1));
- *off = size_binop (TREE_CODE (exp), off0, off1);
+ *off = size_binop (code, off0, off1);
return;
case MULT_EXPR:
case VEC_COND_EXPR:
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
elt = build1 (ADDR_EXPR, build_pointer_type (TREE_TYPE (elt)),
elt);
addr = fold_convert (ptr_type_node, elt ? elt : base);
- addr = fold_build2 (PLUS_EXPR, ptr_type_node,
- addr, fold_convert (ptr_type_node,
+ addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
+ addr, fold_convert (sizetype,
byte_position (field)));
}
else
case INDIRECT_REF:
addr = TREE_OPERAND (t, 0);
base = addr;
- limit = fold_build2 (MINUS_EXPR, ptr_type_node,
- fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
- integer_one_node);
+ limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
+ fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base,
+ size),
+ size_int (-1));
break;
case TARGET_MEM_REF:
addr = tree_mem_ref_addr (ptr_type_node, t);
base = addr;
- limit = fold_build2 (MINUS_EXPR, ptr_type_node,
- fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
- build_int_cst (ptr_type_node, 1));
+ limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
+ fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, base,
+ size),
+ size_int (-1));
break;
case ARRAY_RANGE_REF:
bpu = bitsize_int (BITS_PER_UNIT);
ofs = convert (bitsizetype, TREE_OPERAND (t, 2));
rem = size_binop (TRUNC_MOD_EXPR, ofs, bpu);
- ofs = size_binop (TRUNC_DIV_EXPR, ofs, bpu);
+ ofs = fold_convert (sizetype, size_binop (TRUNC_DIV_EXPR, ofs, bpu));
size = convert (bitsizetype, TREE_OPERAND (t, 1));
size = size_binop (PLUS_EXPR, size, rem);
addr = TREE_OPERAND (TREE_OPERAND (t, 0), 0);
addr = convert (ptr_type_node, addr);
- addr = fold_build2 (PLUS_EXPR, ptr_type_node, addr, ofs);
+ addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node, addr, ofs);
base = addr;
- limit = fold_build2 (MINUS_EXPR, ptr_type_node,
- fold_build2 (PLUS_EXPR, ptr_type_node, base, size),
- integer_one_node);
+ limit = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
+ fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
+ base, size),
+ size_int (-1));
}
break;
/* Compute object_sizes for PTR, defined to VALUE, which is
- a PLUS_EXPR. Return true if the object size might need reexamination
+ a POINTER_PLUS_EXPR. Return true if the object size might need reexamination
later. */
static bool
{
tree op0 = TREE_OPERAND (value, 0);
tree op1 = TREE_OPERAND (value, 1);
- bool ptr1_p = POINTER_TYPE_P (TREE_TYPE (op0))
- && TREE_CODE (op0) != INTEGER_CST;
- bool ptr2_p = POINTER_TYPE_P (TREE_TYPE (op1))
- && TREE_CODE (op1) != INTEGER_CST;
int object_size_type = osi->object_size_type;
unsigned int varno = SSA_NAME_VERSION (var);
unsigned HOST_WIDE_INT bytes;
- gcc_assert (TREE_CODE (value) == PLUS_EXPR);
+ gcc_assert (TREE_CODE (value) == POINTER_PLUS_EXPR);
if (object_sizes[object_size_type][varno] == unknown[object_size_type])
return false;
- /* Swap operands if needed. */
- if (ptr2_p && !ptr1_p)
- {
- tree tem = op0;
- op0 = op1;
- op1 = tem;
- ptr1_p = true;
- ptr2_p = false;
- }
-
/* Handle PTR + OFFSET here. */
- if (ptr1_p
- && !ptr2_p
- && TREE_CODE (op1) == INTEGER_CST
+ if (TREE_CODE (op1) == INTEGER_CST
&& (TREE_CODE (op0) == SSA_NAME
|| TREE_CODE (op0) == ADDR_EXPR))
{
OSI->object_size_type).
For allocation CALL_EXPR like malloc or calloc object size is the size
of the allocation.
- For pointer PLUS_EXPR where second operand is a constant integer,
+ For POINTER_PLUS_EXPR where second operand is a constant integer,
object size is object size of the first operand minus the constant.
If the constant is bigger than the number of remaining bytes until the
end of the object, object size is 0, but if it is instead a pointer
&& POINTER_TYPE_P (TREE_TYPE (rhs)))
reexamine = merge_object_sizes (osi, var, rhs, 0);
- else if (TREE_CODE (rhs) == PLUS_EXPR)
+ else if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
reexamine = plus_expr_object_size (osi, var, rhs);
else if (TREE_CODE (rhs) == COND_EXPR)
if (TREE_CODE (rhs) == SSA_NAME)
check_for_plus_in_loops_1 (osi, rhs, depth);
- else if (TREE_CODE (rhs) == PLUS_EXPR)
+ else if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
{
tree op0 = TREE_OPERAND (rhs, 0);
tree op1 = TREE_OPERAND (rhs, 1);
tree cst, basevar;
- if (TREE_CODE (op0) == SSA_NAME)
- {
- basevar = op0;
- cst = op1;
- }
- else
- {
- basevar = op1;
- cst = op0;
- gcc_assert (TREE_CODE (basevar) == SSA_NAME);
- }
+ basevar = op0;
+ cst = op1;
gcc_assert (TREE_CODE (cst) == INTEGER_CST);
check_for_plus_in_loops_1 (osi, basevar,
rhs = arg;
}
- if (TREE_CODE (rhs) == PLUS_EXPR)
+ if (TREE_CODE (rhs) == POINTER_PLUS_EXPR)
{
tree op0 = TREE_OPERAND (rhs, 0);
tree op1 = TREE_OPERAND (rhs, 1);
tree cst, basevar;
- if (TREE_CODE (op0) == SSA_NAME)
- {
- basevar = op0;
- cst = op1;
- }
- else
- {
- basevar = op1;
- cst = op0;
- gcc_assert (TREE_CODE (basevar) == SSA_NAME);
- }
+ basevar = op0;
+ cst = op1;
gcc_assert (TREE_CODE (cst) == INTEGER_CST);
if (integer_zerop (cst))
else
{
type = TREE_TYPE (iv.base);
- val = fold_build2 (MULT_EXPR, type, iv.step,
- build_int_cst_type (type, iter));
- val = fold_build2 (PLUS_EXPR, type, iv.base, val);
+ if (POINTER_TYPE_P (type))
+ {
+ val = fold_build2 (MULT_EXPR, sizetype, iv.step,
+ size_int (iter));
+ val = fold_build2 (POINTER_PLUS_EXPR, type, iv.base, val);
+ }
+ else
+ {
+ val = fold_build2 (MULT_EXPR, type, iv.step,
+ build_int_cst_type (type, iter));
+ val = fold_build2 (PLUS_EXPR, type, iv.base, val);
+ }
*idx_p = unshare_expr (val);
}
case WIDEN_MULT_EXPR:
case MULT_EXPR:
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
case TRUNC_DIV_EXPR:
case CEIL_DIV_EXPR:
case WIDEN_SUM_EXPR:
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
return 12;
case VEC_RSHIFT_EXPR:
return "v>>";
+
+ case POINTER_PLUS_EXPR:
+ return "+";
case PLUS_EXPR:
return "+";
}
to_add = chrec_convert (type, to_add, at_stmt);
- right = chrec_convert (type, right, at_stmt);
- right = chrec_fold_plus (type, right, to_add);
+ right = chrec_convert_rhs (type, right, at_stmt);
+ right = chrec_fold_plus (chrec_type (right), right, to_add);
return build_polynomial_chrec (var, left, right);
}
else
left = add_to_evolution_1 (loop_nb, CHREC_LEFT (chrec_before),
to_add, at_stmt);
right = CHREC_RIGHT (chrec_before);
- right = chrec_convert (chrec_type (left), right, at_stmt);
+ right = chrec_convert_rhs (chrec_type (left), right, at_stmt);
return build_polynomial_chrec (CHREC_VARIABLE (chrec_before),
left, right);
}
return chrec_dont_know;
left = chrec_before;
- right = chrec_convert (chrec_type (left), to_add, at_stmt);
+ right = chrec_convert_rhs (chrec_type (left), to_add, at_stmt);
return build_polynomial_chrec (loop_nb, left, right);
}
}
tree rhs0, rhs1;
tree type_rhs = TREE_TYPE (rhs);
tree evol;
+ enum tree_code code;
/* The RHS is one of the following cases:
- an SSA_NAME,
- an INTEGER_CST,
- a PLUS_EXPR,
+ - a POINTER_PLUS_EXPR,
- a MINUS_EXPR,
- an ASSERT_EXPR,
- other cases are not yet handled. */
- switch (TREE_CODE (rhs))
+ code = TREE_CODE (rhs);
+ switch (code)
{
case NOP_EXPR:
/* This assignment is under the form "a_1 = (cast) rhs. */
(loop, SSA_NAME_DEF_STMT (rhs), halting_phi, evolution_of_loop, limit);
break;
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
/* This case is under the form "rhs0 + rhs1". */
rhs0 = TREE_OPERAND (rhs, 0);
*evolution_of_loop = add_to_evolution
(loop->num,
chrec_convert (type_rhs, evol, at_stmt),
- PLUS_EXPR, rhs1, at_stmt);
+ code, rhs1, at_stmt);
else if (res == t_false)
{
*evolution_of_loop = add_to_evolution
(loop->num,
chrec_convert (type_rhs, *evolution_of_loop, at_stmt),
- PLUS_EXPR, rhs0, at_stmt);
+ code, rhs0, at_stmt);
else if (res == t_dont_know)
*evolution_of_loop = chrec_dont_know;
*evolution_of_loop = add_to_evolution
(loop->num, chrec_convert (type_rhs, *evolution_of_loop,
at_stmt),
- PLUS_EXPR, rhs1, at_stmt);
+ code, rhs1, at_stmt);
else if (res == t_dont_know)
*evolution_of_loop = chrec_dont_know;
*evolution_of_loop = add_to_evolution
(loop->num, chrec_convert (type_rhs, *evolution_of_loop,
at_stmt),
- PLUS_EXPR, rhs0, at_stmt);
+ code, rhs0, at_stmt);
else if (res == t_dont_know)
*evolution_of_loop = chrec_dont_know;
switch (TREE_CODE (opnd1))
{
+ case POINTER_PLUS_EXPR:
+ opnd10 = TREE_OPERAND (opnd1, 0);
+ opnd11 = TREE_OPERAND (opnd1, 1);
+ chrec10 = analyze_scalar_evolution (loop, opnd10);
+ chrec11 = analyze_scalar_evolution (loop, opnd11);
+ chrec10 = chrec_convert (type, chrec10, at_stmt);
+ chrec11 = chrec_convert (sizetype, chrec11, at_stmt);
+ res = chrec_fold_plus (type, chrec10, chrec11);
+ break;
+
case PLUS_EXPR:
opnd10 = TREE_OPERAND (opnd1, 0);
opnd11 = TREE_OPERAND (opnd1, 1);
return analyze_scalar_evolution_1 (wrto_loop, res, chrec_not_analyzed_yet);
}
-/* Folds EXPR, if it is a cast to pointer, assuming that the created
- polynomial_chrec does not wrap. */
-
-static tree
-fold_used_pointer_cast (tree expr)
-{
- tree op;
- tree type, inner_type;
-
- if (TREE_CODE (expr) != NOP_EXPR && TREE_CODE (expr) != CONVERT_EXPR)
- return expr;
-
- op = TREE_OPERAND (expr, 0);
- if (TREE_CODE (op) != POLYNOMIAL_CHREC)
- return expr;
-
- type = TREE_TYPE (expr);
- inner_type = TREE_TYPE (op);
-
- if (!INTEGRAL_TYPE_P (inner_type)
- || TYPE_PRECISION (inner_type) != TYPE_PRECISION (type))
- return expr;
-
- return build_polynomial_chrec (CHREC_VARIABLE (op),
- chrec_convert (type, CHREC_LEFT (op), NULL_TREE),
- chrec_convert (type, CHREC_RIGHT (op), NULL_TREE));
-}
-
-/* Returns true if EXPR is an expression corresponding to offset of pointer
- in p + offset. */
-
-static bool
-pointer_offset_p (tree expr)
-{
- if (TREE_CODE (expr) == INTEGER_CST)
- return true;
-
- if ((TREE_CODE (expr) == NOP_EXPR || TREE_CODE (expr) == CONVERT_EXPR)
- && INTEGRAL_TYPE_P (TREE_TYPE (TREE_OPERAND (expr, 0))))
- return true;
-
- return false;
-}
-
-/* EXPR is a scalar evolution of a pointer that is dereferenced or used in
- comparison. This means that it must point to a part of some object in
- memory, which enables us to argue about overflows and possibly simplify
- the EXPR. AT_STMT is the statement in which this conversion has to be
- performed. Returns the simplified value.
-
- Currently, for
-
- int i, n;
- int *p;
-
- for (i = -n; i < n; i++)
- *(p + i) = ...;
-
- We generate the following code (assuming that size of int and size_t is
- 4 bytes):
-
- for (i = -n; i < n; i++)
- {
- size_t tmp1, tmp2;
- int *tmp3, *tmp4;
-
- tmp1 = (size_t) i; (1)
- tmp2 = 4 * tmp1; (2)
- tmp3 = (int *) tmp2; (3)
- tmp4 = p + tmp3; (4)
-
- *tmp4 = ...;
- }
-
- We in general assume that pointer arithmetics does not overflow (since its
- behavior is undefined in that case). One of the problems is that our
- translation does not capture this property very well -- (int *) is
- considered unsigned, hence the computation in (4) does overflow if i is
- negative.
-
- This impreciseness creates complications in scev analysis. The scalar
- evolution of i is [-n, +, 1]. Since int and size_t have the same precision
- (in this example), and size_t is unsigned (so we do not care about
- overflows), we succeed to derive that scev of tmp1 is [(size_t) -n, +, 1]
- and scev of tmp2 is [4 * (size_t) -n, +, 4]. With tmp3, we run into
- problem -- [(int *) (4 * (size_t) -n), +, 4] wraps, and since we on several
- places assume that this is not the case for scevs with pointer type, we
- cannot use this scev for tmp3; hence, its scev is
- (int *) [(4 * (size_t) -n), +, 4], and scev of tmp4 is
- p + (int *) [(4 * (size_t) -n), +, 4]. Most of the optimizers are unable to
- work with scevs of this shape.
-
- However, since tmp4 is dereferenced, all its values must belong to a single
- object, and taking into account that the precision of int * and size_t is
- the same, it is impossible for its scev to wrap. Hence, we can derive that
- its evolution is [p + (int *) (4 * (size_t) -n), +, 4], which the optimizers
- can work with.
-
- ??? Maybe we should use different representation for pointer arithmetics,
- however that is a long-term project with a lot of potential for creating
- bugs. */
-
-static tree
-fold_used_pointer (tree expr, tree at_stmt)
-{
- tree op0, op1, new0, new1;
- enum tree_code code = TREE_CODE (expr);
-
- if (code == PLUS_EXPR
- || code == MINUS_EXPR)
- {
- op0 = TREE_OPERAND (expr, 0);
- op1 = TREE_OPERAND (expr, 1);
-
- if (pointer_offset_p (op1))
- {
- new0 = fold_used_pointer (op0, at_stmt);
- new1 = fold_used_pointer_cast (op1);
- }
- else if (code == PLUS_EXPR && pointer_offset_p (op0))
- {
- new0 = fold_used_pointer_cast (op0);
- new1 = fold_used_pointer (op1, at_stmt);
- }
- else
- return expr;
-
- if (new0 == op0 && new1 == op1)
- return expr;
-
- new0 = chrec_convert (TREE_TYPE (expr), new0, at_stmt);
- new1 = chrec_convert (TREE_TYPE (expr), new1, at_stmt);
-
- if (code == PLUS_EXPR)
- expr = chrec_fold_plus (TREE_TYPE (expr), new0, new1);
- else
- expr = chrec_fold_minus (TREE_TYPE (expr), new0, new1);
-
- return expr;
- }
- else
- return fold_used_pointer_cast (expr);
-}
-
-/* Returns true if PTR is dereferenced, or used in comparison. */
-
-static bool
-pointer_used_p (tree ptr)
-{
- use_operand_p use_p;
- imm_use_iterator imm_iter;
- tree stmt, rhs;
- struct ptr_info_def *pi = get_ptr_info (ptr);
-
- /* Check whether the pointer has a memory tag; if it does, it is
- (or at least used to be) dereferenced. */
- if ((pi != NULL && pi->name_mem_tag != NULL)
- || symbol_mem_tag (SSA_NAME_VAR (ptr)))
- return true;
-
- FOR_EACH_IMM_USE_FAST (use_p, imm_iter, ptr)
- {
- stmt = USE_STMT (use_p);
- if (TREE_CODE (stmt) == COND_EXPR)
- return true;
-
- if (TREE_CODE (stmt) != GIMPLE_MODIFY_STMT)
- continue;
-
- rhs = GIMPLE_STMT_OPERAND (stmt, 1);
- if (!COMPARISON_CLASS_P (rhs))
- continue;
-
- if (GIMPLE_STMT_OPERAND (stmt, 0) == ptr
- || GIMPLE_STMT_OPERAND (stmt, 1) == ptr)
- return true;
- }
-
- return false;
-}
-
/* Helper recursive function. */
static tree
case GIMPLE_MODIFY_STMT:
res = interpret_rhs_modify_stmt (loop, def,
GIMPLE_STMT_OPERAND (def, 1), type);
-
- if (POINTER_TYPE_P (type)
- && !automatically_generated_chrec_p (res)
- && pointer_used_p (var))
- res = fold_used_pointer (res, def);
break;
case PHI_NODE:
if (CHREC_LEFT (chrec) != op0
|| CHREC_RIGHT (chrec) != op1)
{
- op1 = chrec_convert (chrec_type (op0), op1, NULL_TREE);
+ op1 = chrec_convert_rhs (chrec_type (op0), op1, NULL_TREE);
chrec = build_polynomial_chrec (CHREC_VARIABLE (chrec), op0, op1);
}
return chrec;
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
op0 = instantiate_parameters_1 (loop, TREE_OPERAND (chrec, 0),
flags, cache, size_expr);
|| TREE_OPERAND (chrec, 1) != op1)
{
op0 = chrec_convert (type, op0, NULL_TREE);
- op1 = chrec_convert (type, op1, NULL_TREE);
+ op1 = chrec_convert_rhs (type, op1, NULL_TREE);
chrec = chrec_fold_plus (type, op0, op1);
}
return chrec;
if (addr_off)
{
- addr = fold_convert (type, addr_off);
if (addr_base)
- addr = fold_build2 (PLUS_EXPR, type, addr_base, addr);
+ addr = fold_build2 (POINTER_PLUS_EXPR, type, addr_base, addr_off);
+ else
+ addr = fold_convert (type, addr_off);
}
else if (addr_base)
addr = addr_base;
if (!parts->index)
{
- parts->index = elt;
+ parts->index = fold_convert (sizetype, elt);
return;
}
j++;
continue;
}
-
+
elt = fold_convert (sizetype, addr->elts[i].val);
if (mult_elt)
mult_elt = fold_build2 (op_code, sizetype, mult_elt, elt);
{
atype = TREE_TYPE (parts.base);
parts.base = force_gimple_operand_bsi (bsi,
- fold_build2 (PLUS_EXPR, atype,
+ fold_build2 (POINTER_PLUS_EXPR, atype,
parts.base,
- fold_convert (atype, parts.offset)),
+ fold_convert (sizetype, parts.offset)),
true, NULL_TREE);
}
else
if (t)
return t;
- /* Add in any offset from a PLUS_EXPR. */
- if (TREE_CODE (base) == PLUS_EXPR)
+ /* Add in any offset from a POINTER_PLUS_EXPR. */
+ if (TREE_CODE (base) == POINTER_PLUS_EXPR)
{
tree offset2;
return NULL_TREE;
base = TREE_OPERAND (base, 0);
- offset = int_const_binop (PLUS_EXPR, offset, offset2, 1);
+ offset = fold_convert (sizetype,
+ int_const_binop (PLUS_EXPR, offset, offset2, 1));
}
if (TREE_CODE (base) == ADDR_EXPR)
}
-/* A subroutine of fold_stmt_r. EXPR is a PLUS_EXPR.
+/* A subroutine of fold_stmt_r. EXPR is a POINTER_PLUS_EXPR.
A quaint feature extant in our address arithmetic is that there
can be hidden type changes here. The type of the result need
What we're after here is an expression of the form
(T *)(&array + const)
where the cast doesn't actually exist, but is implicit in the
- type of the PLUS_EXPR. We'd like to turn this into
+ type of the POINTER_PLUS_EXPR. We'd like to turn this into
&array[x]
which may be able to propagate further. */
tree ptr_type = TREE_TYPE (expr);
tree ptd_type;
tree t;
- bool subtract = (TREE_CODE (expr) == MINUS_EXPR);
- /* We're only interested in pointer arithmetic. */
- if (!POINTER_TYPE_P (ptr_type))
- return NULL_TREE;
- /* Canonicalize the integral operand to op1. */
- if (INTEGRAL_TYPE_P (TREE_TYPE (op0)))
- {
- if (subtract)
- return NULL_TREE;
- t = op0, op0 = op1, op1 = t;
- }
+ gcc_assert (TREE_CODE (expr) == POINTER_PLUS_EXPR);
+
/* It had better be a constant. */
if (TREE_CODE (op1) != INTEGER_CST)
return NULL_TREE;
array_idx = int_const_binop (MULT_EXPR, array_idx, elt_size, 0);
/* Update the operands for the next round, or for folding. */
- /* If we're manipulating unsigned types, then folding into negative
- values can produce incorrect results. Particularly if the type
- is smaller than the width of the pointer. */
- if (subtract
- && TYPE_UNSIGNED (TREE_TYPE (op1))
- && tree_int_cst_lt (array_idx, op1))
- return NULL;
- op1 = int_const_binop (subtract ? MINUS_EXPR : PLUS_EXPR,
+ op1 = int_const_binop (PLUS_EXPR,
array_idx, op1, 0);
- subtract = false;
op0 = array_obj;
}
- /* If we weren't able to fold the subtraction into another array reference,
- canonicalize the integer for passing to the array and component ref
- simplification functions. */
- if (subtract)
- {
- if (TYPE_UNSIGNED (TREE_TYPE (op1)))
- return NULL;
- op1 = fold_unary (NEGATE_EXPR, TREE_TYPE (op1), op1);
- /* ??? In theory fold should always produce another integer. */
- if (op1 == NULL || TREE_CODE (op1) != INTEGER_CST)
- return NULL;
- }
-
ptd_type = TREE_TYPE (ptr_type);
/* At which point we can try some of the same things as for indirects. */
recompute_tree_invariant_for_addr_expr (expr);
return NULL_TREE;
- case PLUS_EXPR:
- case MINUS_EXPR:
+ case POINTER_PLUS_EXPR:
t = walk_tree (&TREE_OPERAND (expr, 0), fold_stmt_r, data, NULL);
if (t)
return t;
{
tree index;
- /* The offset must be defined by a simple GIMPLE_MODIFY_STMT statement. */
- if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
- return false;
-
- /* The RHS of the statement which defines OFFSET must be a gimple
- cast of another SSA_NAME. */
- offset = GIMPLE_STMT_OPERAND (offset, 1);
- if (!is_gimple_cast (offset))
- return false;
-
- offset = TREE_OPERAND (offset, 0);
- if (TREE_CODE (offset) != SSA_NAME)
- return false;
-
/* Try to find an expression for a proper index. This is either
a multiplication expression by the element size or just the
ssa name we came along in case the element size is one. */
index = offset;
else
{
+ /* Get the offset's defining statement. */
offset = SSA_NAME_DEF_STMT (offset);
- /* The RHS of the statement which defines OFFSET must be a
- multiplication of an object by the size of the array elements. */
+ /* The statement which defines OFFSET before type conversion
+ must be a simple GIMPLE_MODIFY_STMT. */
if (TREE_CODE (offset) != GIMPLE_MODIFY_STMT)
return false;
- offset = GIMPLE_STMT_OPERAND (offset, 1);
+ /* The RHS of the statement which defines OFFSET must be a
+ multiplication of an object by the size of the array elements.
+ This implicitly verifies that the size of the array elements
+ is constant. */
+ offset = GIMPLE_STMT_OPERAND (offset, 1);
if (TREE_CODE (offset) != MULT_EXPR
|| TREE_CODE (TREE_OPERAND (offset, 1)) != INTEGER_CST
|| !simple_cst_equal (TREE_OPERAND (offset, 1),
|| !integer_zerop (TREE_OPERAND (array_ref, 1)))
return false;
- /* If the use of the ADDR_EXPR must be a PLUS_EXPR, or else there
+ /* If the use of the ADDR_EXPR is not a POINTER_PLUS_EXPR, there
is nothing to do. */
- if (TREE_CODE (rhs) != PLUS_EXPR)
+ if (TREE_CODE (rhs) != POINTER_PLUS_EXPR)
return false;
- /* Try to optimize &x[0] + C where C is a multiple of the size
+ /* Try to optimize &x[0] p+ C where C is a multiple of the size
of the elements in X into &x[C/element size]. */
if (TREE_OPERAND (rhs, 0) == name
&& TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
}
}
- /* Try to optimize &x[0] + OFFSET where OFFSET is defined by
+ /* Try to optimize &x[0] p+ OFFSET where OFFSET is defined by
converting a multiplication of an index by the size of the
array elements, then the result is converted into the proper
type for the arithmetic. */
&& lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
{
bool res;
- tree offset_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 1));
- res = forward_propagate_addr_into_variable_array_index (offset_stmt,
- def_rhs, use_stmt);
- return res;
- }
-
- /* Same as the previous case, except the operands of the PLUS_EXPR
- were reversed. */
- if (TREE_OPERAND (rhs, 1) == name
- && TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
- /* Avoid problems with IVopts creating PLUS_EXPRs with a
- different type than their operands. */
- && lang_hooks.types_compatible_p (TREE_TYPE (name), TREE_TYPE (rhs)))
- {
- bool res;
- tree offset_stmt = SSA_NAME_DEF_STMT (TREE_OPERAND (rhs, 0));
- res = forward_propagate_addr_into_variable_array_index (offset_stmt,
+ res = forward_propagate_addr_into_variable_array_index (TREE_OPERAND (rhs, 1),
def_rhs, use_stmt);
return res;
}
determine_base_object (tree expr)
{
enum tree_code code = TREE_CODE (expr);
- tree base, obj, op0, op1;
+ tree base, obj;
/* If this is a pointer casted to any type, we need to determine
the base object for the pointer; so handle conversions before
return fold_convert (ptr_type_node,
build_fold_addr_expr (base));
+ case POINTER_PLUS_EXPR:
+ return determine_base_object (TREE_OPERAND (expr, 0));
+
case PLUS_EXPR:
case MINUS_EXPR:
- op0 = determine_base_object (TREE_OPERAND (expr, 0));
- op1 = determine_base_object (TREE_OPERAND (expr, 1));
-
- if (!op1)
- return op0;
-
- if (!op0)
- return (code == PLUS_EXPR
- ? op1
- : fold_build1 (NEGATE_EXPR, ptr_type_node, op1));
-
- return fold_build2 (code, ptr_type_node, op0, op1);
+ /* Pointer addition is done solely using POINTER_PLUS_EXPR. */
+ gcc_unreachable ();
default:
return fold_convert (ptr_type_node, expr);
symbol_cost = computation_cost (addr) + 1;
address_cost
- = computation_cost (build2 (PLUS_EXPR, type,
+ = computation_cost (build2 (POINTER_PLUS_EXPR, type,
addr,
- build_int_cst (type, 2000))) + 1;
+ build_int_cst (sizetype, 2000))) + 1;
if (dump_file && (dump_flags & TDF_DETAILS))
{
fprintf (dump_file, "force_expr_to_var_cost:\n");
switch (TREE_CODE (expr))
{
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
case MINUS_EXPR:
case MULT_EXPR:
mode = TYPE_MODE (TREE_TYPE (expr));
switch (TREE_CODE (expr))
{
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
case MINUS_EXPR:
cost = add_cost (mode);
}
}
}
-
+ if (POINTER_TYPE_P (TREE_TYPE (base)))
+ {
+ step = fold_convert (sizetype, step);
+ if (incr_op == MINUS_EXPR)
+ step = fold_build1 (NEGATE_EXPR, sizetype, step);
+ incr_op = POINTER_PLUS_EXPR;
+ }
/* Gimplify the step if necessary. We put the computations in front of the
loop (i.e. the step should be loop invariant). */
step = force_gimple_operand (step, &stmts, true, var);
/* Fallthru. */
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
op0 = TREE_OPERAND (expr, 0);
op1 = TREE_OPERAND (expr, 1);
mpz_t mmod;
tree assumption = boolean_true_node, bound, noloop;
bool ret = false;
+ tree type1 = type;
+ if (POINTER_TYPE_P (type))
+ type1 = sizetype;
if (TREE_CODE (mod) != INTEGER_CST)
return false;
if (integer_nonzerop (mod))
mod = fold_build2 (MINUS_EXPR, niter_type, step, mod);
- tmod = fold_convert (type, mod);
+ tmod = fold_convert (type1, mod);
mpz_init (mmod);
mpz_set_double_int (mmod, tree_to_double_int (mod), true);
if (!iv1->no_overflow && !integer_zerop (mod))
{
bound = fold_build2 (MINUS_EXPR, type,
- TYPE_MAX_VALUE (type), tmod);
+ TYPE_MAX_VALUE (type1), tmod);
assumption = fold_build2 (LE_EXPR, boolean_type_node,
iv1->base, bound);
if (integer_zerop (assumption))
else
noloop = fold_build2 (GT_EXPR, boolean_type_node,
iv0->base,
- fold_build2 (PLUS_EXPR, type,
+ fold_build2 (PLUS_EXPR, type1,
iv1->base, tmod));
}
else
iv0->base - MOD <= iv1->base. */
if (!iv0->no_overflow && !integer_zerop (mod))
{
- bound = fold_build2 (PLUS_EXPR, type,
- TYPE_MIN_VALUE (type), tmod);
+ bound = fold_build2 (PLUS_EXPR, type1,
+ TYPE_MIN_VALUE (type1), tmod);
assumption = fold_build2 (GE_EXPR, boolean_type_node,
iv0->base, bound);
if (integer_zerop (assumption))
noloop = boolean_false_node;
else
noloop = fold_build2 (GT_EXPR, boolean_type_node,
- fold_build2 (MINUS_EXPR, type,
+ fold_build2 (MINUS_EXPR, type1,
iv0->base, tmod),
iv1->base);
}
struct tree_niter_desc *niter, bounds *bnds)
{
tree assumption = boolean_true_node, bound, diff;
- tree mbz, mbzl, mbzr;
+ tree mbz, mbzl, mbzr, type1;
bool rolls_p, no_overflow_p;
double_int dstep;
mpz_t mstep, max;
if (rolls_p && no_overflow_p)
return;
+
+ type1 = type;
+ if (POINTER_TYPE_P (type))
+ type1 = sizetype;
/* Now the hard part; we must formulate the assumption(s) as expressions, and
we must be careful not to introduce overflow. */
if (integer_nonzerop (iv0->step))
{
- diff = fold_build2 (MINUS_EXPR, type,
- iv0->step, build_int_cst (type, 1));
+ diff = fold_build2 (MINUS_EXPR, type1,
+ iv0->step, build_int_cst (type1, 1));
/* We need to know that iv0->base >= MIN + iv0->step - 1. Since
0 address never belongs to any object, we can assume this for
pointers. */
if (!POINTER_TYPE_P (type))
{
- bound = fold_build2 (PLUS_EXPR, type,
+ bound = fold_build2 (PLUS_EXPR, type1,
TYPE_MIN_VALUE (type), diff);
assumption = fold_build2 (GE_EXPR, boolean_type_node,
iv0->base, bound);
/* And then we can compute iv0->base - diff, and compare it with
iv1->base. */
- mbzl = fold_build2 (MINUS_EXPR, type, iv0->base, diff);
+ mbzl = fold_build2 (MINUS_EXPR, type1, iv0->base, diff);
mbzr = iv1->base;
}
else
{
- diff = fold_build2 (PLUS_EXPR, type,
- iv1->step, build_int_cst (type, 1));
+ diff = fold_build2 (PLUS_EXPR, type1,
+ iv1->step, build_int_cst (type1, 1));
if (!POINTER_TYPE_P (type))
{
- bound = fold_build2 (PLUS_EXPR, type,
+ bound = fold_build2 (PLUS_EXPR, type1,
TYPE_MAX_VALUE (type), diff);
assumption = fold_build2 (LE_EXPR, boolean_type_node,
iv1->base, bound);
}
mbzl = iv0->base;
- mbzr = fold_build2 (MINUS_EXPR, type, iv1->base, diff);
+ mbzr = fold_build2 (MINUS_EXPR, type1, iv1->base, diff);
}
if (!integer_nonzerop (assumption))
bounds *bnds)
{
tree assumption;
+ tree type1 = type;
+ if (POINTER_TYPE_P (type))
+ type1 = sizetype;
/* Say that IV0 is the control variable. Then IV0 <= IV1 iff
IV0 < IV1 + 1, assuming that IV1 is not equal to the greatest
{
if (integer_nonzerop (iv0->step))
assumption = fold_build2 (NE_EXPR, boolean_type_node,
- iv1->base, TYPE_MAX_VALUE (type));
+ iv1->base, TYPE_MAX_VALUE (type1));
else
assumption = fold_build2 (NE_EXPR, boolean_type_node,
- iv0->base, TYPE_MIN_VALUE (type));
+ iv0->base, TYPE_MIN_VALUE (type1));
if (integer_zerop (assumption))
return false;
}
if (integer_nonzerop (iv0->step))
- iv1->base = fold_build2 (PLUS_EXPR, type,
- iv1->base, build_int_cst (type, 1));
+ iv1->base = fold_build2 (PLUS_EXPR, type1,
+ iv1->base, build_int_cst (type1, 1));
else
- iv0->base = fold_build2 (MINUS_EXPR, type,
- iv0->base, build_int_cst (type, 1));
+ iv0->base = fold_build2 (MINUS_EXPR, type1,
+ iv0->base, build_int_cst (type1, 1));
- bounds_add (bnds, double_int_one, type);
+ bounds_add (bnds, double_int_one, type1);
return number_of_iterations_lt (type, iv0, iv1, niter, never_infinite, bnds);
}
&& !is_gimple_min_invariant (e)
/* And increments and decrements by a constant are simple. */
&& !((TREE_CODE (e) == PLUS_EXPR
- || TREE_CODE (e) == MINUS_EXPR)
+ || TREE_CODE (e) == MINUS_EXPR
+ || TREE_CODE (e) == POINTER_PLUS_EXPR)
&& is_gimple_min_invariant (TREE_OPERAND (e, 1))))
return expr;
return bnd;
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
op0 = TREE_OPERAND (val, 0);
op1 = TREE_OPERAND (val, 1);
return false;
istep = int_cst_value (step);
- if (TREE_CODE (ibase) == PLUS_EXPR
+ if (TREE_CODE (ibase) == POINTER_PLUS_EXPR
&& cst_and_fits_in_hwi (TREE_OPERAND (ibase, 1)))
{
idelta = int_cst_value (TREE_OPERAND (ibase, 1));
{
/* Determine the address to prefetch. */
delta = (ahead + ap * ref->prefetch_mod) * ref->group->step;
- addr = fold_build2 (PLUS_EXPR, ptr_type_node,
- addr_base, build_int_cst (ptr_type_node, delta));
+ addr = fold_build2 (POINTER_PLUS_EXPR, ptr_type_node,
+ addr_base, size_int (delta));
addr = force_gimple_operand_bsi (&bsi, unshare_expr (addr), true, NULL);
/* Create the prefetch instruction. */
VEC (ce_s, heap) *temp = NULL;
unsigned int rhsoffset = 0;
- if (TREE_CODE (expr) != PLUS_EXPR
- && TREE_CODE (expr) != MINUS_EXPR)
+ if (TREE_CODE (expr) != POINTER_PLUS_EXPR)
return false;
op0 = TREE_OPERAND (expr, 0);
op1 = TREE_OPERAND (expr, 1);
+ gcc_assert (POINTER_TYPE_P (TREE_TYPE (op0)));
get_constraint_for (op0, &temp);
- if (POINTER_TYPE_P (TREE_TYPE (op0))
- && TREE_CODE (op1) == INTEGER_CST
- && TREE_CODE (expr) == PLUS_EXPR)
- {
- rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
- }
- else
- return false;
+ if (TREE_CODE (op1) == INTEGER_CST)
+ rhsoffset = TREE_INT_CST_LOW (op1) * BITS_PER_UNIT;
for (i = 0; VEC_iterate (ce_s, lhsc, i, c); i++)
for (j = 0; VEC_iterate (ce_s, temp, j, c2); j++)
else if (INTEGRAL_TYPE_P (inner_type)
&& INTEGRAL_TYPE_P (outer_type)
&& TYPE_UNSIGNED (inner_type) == TYPE_UNSIGNED (outer_type)
- && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type)
- && simple_cst_equal (TYPE_MAX_VALUE (inner_type), TYPE_MAX_VALUE (outer_type))
- && simple_cst_equal (TYPE_MIN_VALUE (inner_type), TYPE_MIN_VALUE (outer_type)))
+ && TYPE_PRECISION (inner_type) == TYPE_PRECISION (outer_type))
{
+ tree min_inner = fold_convert (outer_type, TYPE_MIN_VALUE (inner_type));
+ tree max_inner = fold_convert (outer_type, TYPE_MAX_VALUE (inner_type));
bool first_boolean = (TREE_CODE (inner_type) == BOOLEAN_TYPE);
bool second_boolean = (TREE_CODE (outer_type) == BOOLEAN_TYPE);
- if (first_boolean == second_boolean)
+ if (simple_cst_equal (max_inner, TYPE_MAX_VALUE (outer_type))
+ && simple_cst_equal (min_inner, TYPE_MIN_VALUE (outer_type))
+ && first_boolean == second_boolean)
return true;
}
continue;
}
- if (TREE_CODE (rhs) == PLUS_EXPR
+ if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
+ || TREE_CODE (rhs) == PLUS_EXPR)
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
&& TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
&& host_integerp (TREE_OPERAND (rhs, 1), 1))
continue;
}
- if (TREE_CODE (rhs) == PLUS_EXPR
+ if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
+ || TREE_CODE (rhs) == PLUS_EXPR)
&& TREE_CODE (TREE_OPERAND (rhs, 0)) == SSA_NAME
&& TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST
&& host_integerp (TREE_OPERAND (rhs, 1), 1))
if (! POINTER_TYPE_P (TREE_TYPE (rhs)))
return;
- if ((TREE_CODE (rhs) == PLUS_EXPR
- && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
- || TREE_CODE (rhs) == NOP_EXPR
- || TREE_CODE (rhs) == CONVERT_EXPR)
+ if (((TREE_CODE (rhs) == POINTER_PLUS_EXPR
+ || TREE_CODE (rhs) == PLUS_EXPR)
+ && TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
+ || TREE_CODE (rhs) == NOP_EXPR
+ || TREE_CODE (rhs) == CONVERT_EXPR)
rhs = TREE_OPERAND (rhs, 0);
if (TREE_CODE (rhs) != SSA_NAME
other_ap_temp = (some_type *) ap_temp;
ap = ap_temp;
statements. */
- if ((TREE_CODE (rhs) == PLUS_EXPR
+ if ((TREE_CODE (rhs) == POINTER_PLUS_EXPR
&& TREE_CODE (TREE_OPERAND (rhs, 1)) == INTEGER_CST)
|| TREE_CODE (rhs) == NOP_EXPR
|| TREE_CODE (rhs) == CONVERT_EXPR)
*ass_var = dest;
return true;
- /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR). */
+ /* TODO -- Handle other codes (NEGATE_EXPR, MINUS_EXPR, POINTER_PLUS_EXPR). */
default:
return false;
/* Create base_offset */
base_offset = size_binop (PLUS_EXPR, base_offset, init);
+ base_offset = fold_convert (sizetype, base_offset);
dest = create_tmp_var (TREE_TYPE (base_offset), "base_off");
add_referenced_var (dest);
base_offset = force_gimple_operand (base_offset, &new_stmt, false, dest);
if (offset)
{
- tree tmp = create_tmp_var (TREE_TYPE (base_offset), "offset");
+ tree tmp = create_tmp_var (sizetype, "offset");
tree step;
/* For interleaved access step we divide STEP by the size of the
}
/* base + base_offset */
- addr_base = fold_build2 (PLUS_EXPR, TREE_TYPE (data_ref_base), data_ref_base,
+ addr_base = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (data_ref_base), data_ref_base,
base_offset);
vect_ptr_type = build_pointer_type (STMT_VINFO_VECTYPE (stmt_info));
tree vectype = STMT_VINFO_VECTYPE (stmt_info);
tree vptr_type = TREE_TYPE (dataref_ptr);
tree ptr_var = SSA_NAME_VAR (dataref_ptr);
- tree update = fold_convert (vptr_type, TYPE_SIZE_UNIT (vectype));
+ tree update = TYPE_SIZE_UNIT (vectype);
tree incr_stmt;
ssa_op_iter iter;
use_operand_p use_p;
tree new_dataref_ptr;
incr_stmt = build_gimple_modify_stmt (ptr_var,
- build2 (PLUS_EXPR, vptr_type,
+ build2 (POINTER_PLUS_EXPR, vptr_type,
dataref_ptr, update));
new_dataref_ptr = make_ssa_name (ptr_var, incr_stmt);
GIMPLE_STMT_OPERAND (incr_stmt, 0) = new_dataref_ptr;
init_expr = unshare_expr (initial_condition_in_loop_num (access_fn,
loop->num));
- ni = fold_build2 (PLUS_EXPR, TREE_TYPE (init_expr),
- fold_build2 (MULT_EXPR, TREE_TYPE (init_expr),
- fold_convert (TREE_TYPE (init_expr),
- niters),
- step_expr),
- init_expr);
+ if (POINTER_TYPE_P (TREE_TYPE (init_expr)))
+ ni = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (init_expr),
+ init_expr,
+ fold_convert (sizetype,
+ fold_build2 (MULT_EXPR, TREE_TYPE (niters),
+ niters, step_expr)));
+ else
+ ni = fold_build2 (PLUS_EXPR, TREE_TYPE (init_expr),
+ fold_build2 (MULT_EXPR, TREE_TYPE (init_expr),
+ fold_convert (TREE_TYPE (init_expr),
+ niters),
+ step_expr),
+ init_expr);
+
+
var = create_tmp_var (TREE_TYPE (init_expr), "tmp");
add_referenced_var (var);
/* Create: byte_misalign = addr & (vectype_size - 1) */
byte_misalign =
- fold_build2 (BIT_AND_EXPR, type, start_addr, vectype_size_minus_1);
+ fold_build2 (BIT_AND_EXPR, type, fold_convert (type, start_addr), vectype_size_minus_1);
/* Create: elem_misalign = byte_misalign / element_size */
elem_misalign =
both integers. */
gcc_assert (POINTER_TYPE_P (TREE_TYPE (val1))
== POINTER_TYPE_P (TREE_TYPE (val2)));
+ /* Convert the two values into the same type. This is needed because
+ sizetype causes sign extension even for unsigned types. */
+ val2 = fold_convert (TREE_TYPE (val1), val2);
+ STRIP_USELESS_TYPE_CONVERSION (val2);
if ((TREE_CODE (val1) == SSA_NAME
|| TREE_CODE (val1) == PLUS_EXPR
}
max = negative_overflow_infinity (TREE_TYPE (var_vr->min));
}
- else
+ else if (!POINTER_TYPE_P (TREE_TYPE (var_vr->min)))
max = fold_build2 (MINUS_EXPR, TREE_TYPE (var_vr->min),
anti_min,
build_int_cst (TREE_TYPE (var_vr->min), 1));
+ else
+ max = fold_build2 (POINTER_PLUS_EXPR, TREE_TYPE (var_vr->min),
+ anti_min,
+ size_int (-1));
min = real_min;
set_value_range (vr_p, VR_RANGE, min, max, vr_p->equiv);
}
meaningful way. Handle only arithmetic operations. */
if (code != PLUS_EXPR
&& code != MINUS_EXPR
+ && code != POINTER_PLUS_EXPR
&& code != MULT_EXPR
&& code != TRUNC_DIV_EXPR
&& code != FLOOR_DIV_EXPR
|| POINTER_TYPE_P (TREE_TYPE (op0))
|| POINTER_TYPE_P (TREE_TYPE (op1)))
{
- /* For pointer types, we are really only interested in asserting
- whether the expression evaluates to non-NULL. FIXME, we used
- to gcc_assert (code == PLUS_EXPR || code == MINUS_EXPR), but
- ivopts is generating expressions with pointer multiplication
- in them. */
- if (code == PLUS_EXPR)
+ if (code == MIN_EXPR || code == MAX_EXPR)
{
- if (range_is_nonnull (&vr0) || range_is_nonnull (&vr1))
+ /* For MIN/MAX expressions with pointers, we only care about
+ nullness, if both are non null, then the result is nonnull.
+ If both are null, then the result is null. Otherwise they
+ are varying. */
+ if (range_is_nonnull (&vr0) && range_is_nonnull (&vr1))
set_value_range_to_nonnull (vr, TREE_TYPE (expr));
else if (range_is_null (&vr0) && range_is_null (&vr1))
set_value_range_to_null (vr, TREE_TYPE (expr));
else
set_value_range_to_varying (vr);
+
+ return;
}
+ gcc_assert (code == POINTER_PLUS_EXPR);
+ /* For pointer types, we are really only interested in asserting
+ whether the expression evaluates to non-NULL. */
+ if (range_is_nonnull (&vr0) || range_is_nonnull (&vr1))
+ set_value_range_to_nonnull (vr, TREE_TYPE (expr));
+ else if (range_is_null (&vr0) && range_is_null (&vr1))
+ set_value_range_to_null (vr, TREE_TYPE (expr));
else
- {
- /* Subtracting from a pointer, may yield 0, so just drop the
- resulting range to varying. */
- set_value_range_to_varying (vr);
- }
+ set_value_range_to_varying (vr);
return;
}
gcc_assert (code != GIMPLE_MODIFY_STMT);
#endif
+ if ((code == MINUS_EXPR || code == PLUS_EXPR || code == MULT_EXPR)
+ && arg0 && arg1 && tt && POINTER_TYPE_P (tt))
+ gcc_assert (TREE_CODE (arg0) == INTEGER_CST && TREE_CODE (arg1) == INTEGER_CST);
+
+ if (code == POINTER_PLUS_EXPR && arg0 && arg1 && tt)
+ gcc_assert (POINTER_TYPE_P (tt) && POINTER_TYPE_P (TREE_TYPE (arg0))
+ && TREE_CODE (TREE_TYPE (arg1)) == INTEGER_TYPE
+ && tree_ssa_useless_type_conversion_1 (sizetype,
+ TREE_TYPE (arg1)));
+
t = make_node_stat (code PASS_MEM_STAT);
TREE_TYPE (t) = tt;
DEFTREECODE (MINUS_EXPR, "minus_expr", tcc_binary, 2)
DEFTREECODE (MULT_EXPR, "mult_expr", tcc_binary, 2)
+/* Pointer addition. The first operand is always a pointer and the
+ second operand is an integer of type sizetype. */
+DEFTREECODE (POINTER_PLUS_EXPR, "pointer_plus_expr", tcc_binary, 2)
+
/* Division for integer result that rounds the quotient toward zero. */
DEFTREECODE (TRUNC_DIV_EXPR, "trunc_div_expr", tcc_binary, 2)
return hi;
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
return (const_hash_1 (TREE_OPERAND (exp, 0)) * 9
+ const_hash_1 (TREE_OPERAND (exp, 1)));
}
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
case RANGE_EXPR:
return (compare_constant (TREE_OPERAND (t1, 0), TREE_OPERAND (t2, 0))
copy_constant (TREE_IMAGPART (exp)));
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
return build2 (TREE_CODE (exp), TREE_TYPE (exp),
copy_constant (TREE_OPERAND (exp, 0)),
break;
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
reloc = compute_reloc_for_constant (TREE_OPERAND (exp, 0));
reloc |= compute_reloc_for_constant (TREE_OPERAND (exp, 1));
break;
break;
case PLUS_EXPR:
+ case POINTER_PLUS_EXPR:
case MINUS_EXPR:
output_addressed_constants (TREE_OPERAND (exp, 1));
/* Fall through. */
}
break;
+ case POINTER_PLUS_EXPR:
case PLUS_EXPR:
if (! INTEGRAL_TYPE_P (endtype)
|| TYPE_PRECISION (endtype) >= POINTER_SIZE)