From 8c53c46cebf42cb4f4ac125ca6428c5e9b519f66 Mon Sep 17 00:00:00 2001 From: rsandifo Date: Wed, 20 Nov 2013 11:57:27 +0000 Subject: [PATCH] gcc/ada/ 2013-11-19 Kenneth Zadeck Mike Stump Richard Sandiford * gcc-interface/cuintp.c (UI_From_gnu): Use tree_to_shwi. * gcc-interface/decl.c (gnat_to_gnu_entity): Use tree_to_uhwi. * gcc-interface/utils.c (make_packable_type): Likewise. gcc/c-family/ 2013-11-19 Kenneth Zadeck Mike Stump Richard Sandiford * c-ada-spec.c (is_simple_enum): Use tree_to_shwi and tree_to_uhwi instead of TREE_INT_CST_LOW, in cases where there is a protecting tree_fits_shwi_p or tree_fits_uhwi_p. (dump_generic_ada_node): Likewise. * c-format.c (check_format_arg): Likewise. * c-pretty-print.c (pp_c_integer_constant): Likewise. gcc/ 2013-11-19 Kenneth Zadeck Mike Stump Richard Sandiford * alias.c (ao_ref_from_mem): Use tree_to_shwi and tree_to_uhwi instead of TREE_INT_CST_LOW, in cases where there is a protecting tree_fits_shwi_p or tree_fits_uhwi_p. * builtins.c (fold_builtin_powi): Likewise. * config/epiphany/epiphany.c (epiphany_special_round_type_align): Likewise. * dbxout.c (dbxout_symbol): Likewise. * expr.c (expand_expr_real_1): Likewise. * fold-const.c (fold_single_bit_test, fold_plusminus_mult_expr) (fold_binary_loc): Likewise. * gimple-fold.c (fold_const_aggregate_ref_1): Likewise. * gimple-ssa-strength-reduction.c (stmt_cost): Likewise. * omp-low.c (lower_omp_for_lastprivate): Likewise. * simplify-rtx.c (delegitimize_mem_from_attrs): Likewise. * stor-layout.c (compute_record_mode): Likewise. * tree-cfg.c (verify_expr): Likewise. * tree-dfa.c (get_ref_base_and_extent): Likewise. * tree-pretty-print.c (dump_array_domain): Likewise. * tree-sra.c (build_user_friendly_ref_for_offset): Likewise. * tree-ssa-ccp.c (fold_builtin_alloca_with_align): Likewise. * tree-ssa-loop-ivopts.c (get_loop_invariant_expr_id): Likewise. * tree-ssa-math-opts.c (execute_cse_sincos): Likewise. * tree-ssa-phiopt.c (hoist_adjacent_loads): Likewise. * tree-ssa-reassoc.c (acceptable_pow_call): Likewise. * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Likewise. (ao_ref_init_from_vn_reference, vn_reference_fold_indirect): Likewise. (vn_reference_lookup_3, simplify_binary_expression): Likewise. * tree-ssa-structalias.c (bitpos_of_field): Likewise. (get_constraint_for_1, push_fields_onto_fieldstack): Likewise. (create_variable_info_for_1): Likewise. * tree-vect-data-refs.c (vect_compute_data_ref_alignment): Likewise. (vect_verify_datarefs_alignment): Likewise. (vect_analyze_data_ref_accesses): Likewise. (vect_prune_runtime_alias_test_list): Likewise. * tree-vectorizer.h (NITERS_KNOWN_P): Likewise. git-svn-id: svn+ssh://gcc.gnu.org/svn/gcc/trunk@205103 138bc75d-0d04-0410-961f-82ee72b054a4 --- gcc/ChangeLog | 40 +++++++++++++++++++++++++++++ gcc/ada/ChangeLog | 8 ++++++ gcc/ada/gcc-interface/cuintp.c | 4 +-- gcc/ada/gcc-interface/decl.c | 2 +- gcc/ada/gcc-interface/utils.c | 2 +- gcc/alias.c | 4 +-- gcc/builtins.c | 2 +- gcc/c-family/ChangeLog | 11 ++++++++ gcc/c-family/c-ada-spec.c | 8 +++--- gcc/c-family/c-format.c | 2 +- gcc/c-family/c-pretty-print.c | 4 +-- gcc/config/epiphany/epiphany.c | 6 ++--- gcc/dbxout.c | 2 +- gcc/expr.c | 2 +- gcc/fold-const.c | 18 ++++++------- gcc/gimple-fold.c | 4 +-- gcc/gimple-ssa-strength-reduction.c | 2 +- gcc/omp-low.c | 2 +- gcc/simplify-rtx.c | 2 +- gcc/stor-layout.c | 2 +- gcc/tree-cfg.c | 4 +-- gcc/tree-dfa.c | 20 +++++++-------- gcc/tree-pretty-print.c | 2 +- gcc/tree-sra.c | 4 +-- gcc/tree-ssa-ccp.c | 2 +- gcc/tree-ssa-loop-ivopts.c | 4 +-- gcc/tree-ssa-math-opts.c | 2 +- gcc/tree-ssa-phiopt.c | 6 ++--- gcc/tree-ssa-reassoc.c | 2 +- gcc/tree-ssa-sccvn.c | 30 +++++++++++----------- gcc/tree-ssa-structalias.c | 14 +++++----- gcc/tree-vect-data-refs.c | 12 ++++----- gcc/tree-vectorizer.h | 2 +- 33 files changed, 144 insertions(+), 87 deletions(-) diff --git a/gcc/ChangeLog b/gcc/ChangeLog index 17c4c2457b11..d962593581ce 100644 --- a/gcc/ChangeLog +++ b/gcc/ChangeLog @@ -1,3 +1,43 @@ +2013-11-20 Kenneth Zadeck + Mike Stump + Richard Sandiford + + * alias.c (ao_ref_from_mem): Use tree_to_shwi and tree_to_uhwi + instead of TREE_INT_CST_LOW, in cases where there is a protecting + tree_fits_shwi_p or tree_fits_uhwi_p. + * builtins.c (fold_builtin_powi): Likewise. + * config/epiphany/epiphany.c (epiphany_special_round_type_align): + Likewise. + * dbxout.c (dbxout_symbol): Likewise. + * expr.c (expand_expr_real_1): Likewise. + * fold-const.c (fold_single_bit_test, fold_plusminus_mult_expr) + (fold_binary_loc): Likewise. + * gimple-fold.c (fold_const_aggregate_ref_1): Likewise. + * gimple-ssa-strength-reduction.c (stmt_cost): Likewise. + * omp-low.c (lower_omp_for_lastprivate): Likewise. + * simplify-rtx.c (delegitimize_mem_from_attrs): Likewise. + * stor-layout.c (compute_record_mode): Likewise. + * tree-cfg.c (verify_expr): Likewise. + * tree-dfa.c (get_ref_base_and_extent): Likewise. + * tree-pretty-print.c (dump_array_domain): Likewise. + * tree-sra.c (build_user_friendly_ref_for_offset): Likewise. + * tree-ssa-ccp.c (fold_builtin_alloca_with_align): Likewise. + * tree-ssa-loop-ivopts.c (get_loop_invariant_expr_id): Likewise. + * tree-ssa-math-opts.c (execute_cse_sincos): Likewise. + * tree-ssa-phiopt.c (hoist_adjacent_loads): Likewise. + * tree-ssa-reassoc.c (acceptable_pow_call): Likewise. + * tree-ssa-sccvn.c (copy_reference_ops_from_ref): Likewise. + (ao_ref_init_from_vn_reference, vn_reference_fold_indirect): Likewise. + (vn_reference_lookup_3, simplify_binary_expression): Likewise. + * tree-ssa-structalias.c (bitpos_of_field): Likewise. + (get_constraint_for_1, push_fields_onto_fieldstack): Likewise. + (create_variable_info_for_1): Likewise. + * tree-vect-data-refs.c (vect_compute_data_ref_alignment): Likewise. + (vect_verify_datarefs_alignment): Likewise. + (vect_analyze_data_ref_accesses): Likewise. + (vect_prune_runtime_alias_test_list): Likewise. + * tree-vectorizer.h (NITERS_KNOWN_P): Likewise. + 2013-11-20 Richard Sandiford * tree-ssa-alias.c (ao_ref_init_from_ptr_and_size): Avoid signed diff --git a/gcc/ada/ChangeLog b/gcc/ada/ChangeLog index 4d55405f6948..906bd7e4f511 100644 --- a/gcc/ada/ChangeLog +++ b/gcc/ada/ChangeLog @@ -1,3 +1,11 @@ +2013-11-20 Kenneth Zadeck + Mike Stump + Richard Sandiford + + * gcc-interface/cuintp.c (UI_From_gnu): Use tree_to_shwi. + * gcc-interface/decl.c (gnat_to_gnu_entity): Use tree_to_uhwi. + * gcc-interface/utils.c (make_packable_type): Likewise. + 2013-11-18 Richard Sandiford * gcc-interface/cuintp.c (UI_From_gnu): Use tree_to_shwi rather than diff --git a/gcc/ada/gcc-interface/cuintp.c b/gcc/ada/gcc-interface/cuintp.c index 473140daa2b4..bbe62c17e931 100644 --- a/gcc/ada/gcc-interface/cuintp.c +++ b/gcc/ada/gcc-interface/cuintp.c @@ -155,7 +155,7 @@ UI_From_gnu (tree Input) in a signed 32-bit integer. */ if (tree_fits_shwi_p (Input)) { - HOST_WIDE_INT hw_input = TREE_INT_CST_LOW (Input); + HOST_WIDE_INT hw_input = tree_to_shwi (Input); if (hw_input == (int) hw_input) return UI_From_Int (hw_input); } @@ -166,7 +166,7 @@ UI_From_gnu (tree Input) signed 32-bit integer. Then a sign test tells whether it fits in a signed 64-bit integer. */ if (tree_fits_shwi_p (Input)) - return UI_From_Int (TREE_INT_CST_LOW (Input)); + return UI_From_Int (tree_to_shwi (Input)); else if (TREE_INT_CST_HIGH (Input) < 0 && TYPE_UNSIGNED (gnu_type)) return No_Uint; #endif diff --git a/gcc/ada/gcc-interface/decl.c b/gcc/ada/gcc-interface/decl.c index 0d72be255a59..ee76a9d160f3 100644 --- a/gcc/ada/gcc-interface/decl.c +++ b/gcc/ada/gcc-interface/decl.c @@ -4925,7 +4925,7 @@ gnat_to_gnu_entity (Entity_Id gnat_entity, tree gnu_expr, int definition) /* Consider an alignment as suspicious if the alignment/size ratio is greater or equal to the byte/bit ratio. */ if (tree_fits_uhwi_p (size) - && align >= TREE_INT_CST_LOW (size) * BITS_PER_UNIT) + && align >= tree_to_uhwi (size) * BITS_PER_UNIT) post_error_ne ("?suspiciously large alignment specified for&", Expression (Alignment_Clause (gnat_entity)), gnat_entity); diff --git a/gcc/ada/gcc-interface/utils.c b/gcc/ada/gcc-interface/utils.c index 4995ca4e92c6..36e5b2debf45 100644 --- a/gcc/ada/gcc-interface/utils.c +++ b/gcc/ada/gcc-interface/utils.c @@ -815,7 +815,7 @@ make_packable_type (tree type, bool in_record) /* Round the RM size up to a unit boundary to get the minimal size for a BLKmode record. Give up if it's already the size. */ - new_size = TREE_INT_CST_LOW (TYPE_ADA_SIZE (type)); + new_size = tree_to_uhwi (TYPE_ADA_SIZE (type)); new_size = (new_size + BITS_PER_UNIT - 1) & -BITS_PER_UNIT; if (new_size == size) return type; diff --git a/gcc/alias.c b/gcc/alias.c index 68650f021104..daa667c49d9a 100644 --- a/gcc/alias.c +++ b/gcc/alias.c @@ -341,8 +341,8 @@ ao_ref_from_mem (ao_ref *ref, const_rtx mem) && (ref->offset < 0 || (DECL_P (ref->base) && (!tree_fits_uhwi_p (DECL_SIZE (ref->base)) - || (TREE_INT_CST_LOW (DECL_SIZE ((ref->base))) - < (unsigned HOST_WIDE_INT)(ref->offset + ref->size)))))) + || (tree_to_uhwi (DECL_SIZE (ref->base)) + < (unsigned HOST_WIDE_INT) (ref->offset + ref->size)))))) return false; return true; diff --git a/gcc/builtins.c b/gcc/builtins.c index e96245709c08..9074ee737cd5 100644 --- a/gcc/builtins.c +++ b/gcc/builtins.c @@ -8583,7 +8583,7 @@ fold_builtin_powi (location_t loc, tree fndecl ATTRIBUTE_UNUSED, if (tree_fits_shwi_p (arg1)) { - HOST_WIDE_INT c = TREE_INT_CST_LOW (arg1); + HOST_WIDE_INT c = tree_to_shwi (arg1); /* Evaluate powi at compile-time. */ if (TREE_CODE (arg0) == REAL_CST diff --git a/gcc/c-family/ChangeLog b/gcc/c-family/ChangeLog index df1c40fcfe8f..4c5c153943e6 100644 --- a/gcc/c-family/ChangeLog +++ b/gcc/c-family/ChangeLog @@ -1,3 +1,14 @@ +2013-11-20 Kenneth Zadeck + Mike Stump + Richard Sandiford + + * c-ada-spec.c (is_simple_enum): Use tree_to_shwi and tree_to_uhwi + instead of TREE_INT_CST_LOW, in cases where there is a protecting + tree_fits_shwi_p or tree_fits_uhwi_p. + (dump_generic_ada_node): Likewise. + * c-format.c (check_format_arg): Likewise. + * c-pretty-print.c (pp_c_integer_constant): Likewise. + 2013-11-20 Kenneth Zadeck * c-common.c (check_function_arguments_recurse): Use tree_to_uhwi. diff --git a/gcc/c-family/c-ada-spec.c b/gcc/c-family/c-ada-spec.c index 731993337894..1724c748af5a 100644 --- a/gcc/c-family/c-ada-spec.c +++ b/gcc/c-family/c-ada-spec.c @@ -1803,7 +1803,7 @@ dump_ada_template (pretty_printer *buffer, tree t, int spc) static bool is_simple_enum (tree node) { - unsigned HOST_WIDE_INT count = 0; + HOST_WIDE_INT count = 0; tree value; for (value = TYPE_VALUES (node); value; value = TREE_CHAIN (value)) @@ -1815,7 +1815,7 @@ is_simple_enum (tree node) if (!tree_fits_shwi_p (int_val)) return false; - else if (TREE_INT_CST_LOW (int_val) != count) + else if (tree_to_shwi (int_val) != count) return false; count++; @@ -2206,9 +2206,9 @@ dump_generic_ada_node (pretty_printer *buffer, tree node, tree type, int spc, if (TREE_TYPE (node) == sizetype) node = fold_convert (ssizetype, node); if (tree_fits_shwi_p (node)) - pp_wide_integer (buffer, TREE_INT_CST_LOW (node)); + pp_wide_integer (buffer, tree_to_shwi (node)); else if (tree_fits_uhwi_p (node)) - pp_unsigned_wide_integer (buffer, TREE_INT_CST_LOW (node)); + pp_unsigned_wide_integer (buffer, tree_to_uhwi (node)); else { tree val = node; diff --git a/gcc/c-family/c-format.c b/gcc/c-family/c-format.c index 8f6c0db46ae1..0552c84464fc 100644 --- a/gcc/c-family/c-format.c +++ b/gcc/c-family/c-format.c @@ -1540,7 +1540,7 @@ check_format_arg (void *ctx, tree format_tree, if (tree_fits_shwi_p (array_size)) { - HOST_WIDE_INT array_size_value = TREE_INT_CST_LOW (array_size); + HOST_WIDE_INT array_size_value = tree_to_shwi (array_size); if (array_size_value > 0 && array_size_value == (int) array_size_value && format_length > array_size_value) diff --git a/gcc/c-family/c-pretty-print.c b/gcc/c-family/c-pretty-print.c index af8a3fcdbe93..d1b588033dd0 100644 --- a/gcc/c-family/c-pretty-print.c +++ b/gcc/c-family/c-pretty-print.c @@ -918,9 +918,9 @@ pp_c_integer_constant (c_pretty_printer *pp, tree i) : TREE_TYPE (i); if (tree_fits_shwi_p (i)) - pp_wide_integer (pp, TREE_INT_CST_LOW (i)); + pp_wide_integer (pp, tree_to_shwi (i)); else if (tree_fits_uhwi_p (i)) - pp_unsigned_wide_integer (pp, TREE_INT_CST_LOW (i)); + pp_unsigned_wide_integer (pp, tree_to_uhwi (i)); else { unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (i); diff --git a/gcc/config/epiphany/epiphany.c b/gcc/config/epiphany/epiphany.c index 88459e2d4adf..c264cdaee788 100644 --- a/gcc/config/epiphany/epiphany.c +++ b/gcc/config/epiphany/epiphany.c @@ -2763,10 +2763,10 @@ epiphany_special_round_type_align (tree type, unsigned computed, offset = bit_position (field); size = DECL_SIZE (field); if (!tree_fits_uhwi_p (offset) || !tree_fits_uhwi_p (size) - || TREE_INT_CST_LOW (offset) >= try_align - || TREE_INT_CST_LOW (size) >= try_align) + || tree_to_uhwi (offset) >= try_align + || tree_to_uhwi (size) >= try_align) return try_align; - total = TREE_INT_CST_LOW (offset) + TREE_INT_CST_LOW (size); + total = tree_to_uhwi (offset) + tree_to_uhwi (size); if (total > max) max = total; } diff --git a/gcc/dbxout.c b/gcc/dbxout.c index 88596a8d49c2..ad1533e601d8 100644 --- a/gcc/dbxout.c +++ b/gcc/dbxout.c @@ -2926,7 +2926,7 @@ dbxout_symbol (tree decl, int local ATTRIBUTE_UNUSED) if (TREE_CODE (TREE_TYPE (decl)) == INTEGER_TYPE || TREE_CODE (TREE_TYPE (decl)) == ENUMERAL_TYPE) { - HOST_WIDE_INT ival = TREE_INT_CST_LOW (DECL_INITIAL (decl)); + HOST_WIDE_INT ival = tree_to_shwi (DECL_INITIAL (decl)); dbxout_begin_complex_stabs (); dbxout_symbol_name (decl, NULL, 'c'); diff --git a/gcc/expr.c b/gcc/expr.c index 53e85e503ee5..968438b2fbac 100644 --- a/gcc/expr.c +++ b/gcc/expr.c @@ -9662,7 +9662,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode, if (offset == 0 && tree_fits_uhwi_p (TYPE_SIZE (type)) && (GET_MODE_BITSIZE (DECL_MODE (base)) - == TREE_INT_CST_LOW (TYPE_SIZE (type)))) + == tree_to_uhwi (TYPE_SIZE (type)))) return expand_expr (build1 (VIEW_CONVERT_EXPR, type, base), target, tmode, modifier); if (TYPE_MODE (type) == BLKmode) diff --git a/gcc/fold-const.c b/gcc/fold-const.c index 4e1c9a1b3cb8..f41ea3834243 100644 --- a/gcc/fold-const.c +++ b/gcc/fold-const.c @@ -6648,10 +6648,10 @@ fold_single_bit_test (location_t loc, enum tree_code code, && TREE_CODE (TREE_OPERAND (inner, 1)) == INTEGER_CST && tree_fits_uhwi_p (TREE_OPERAND (inner, 1)) && bitnum < TYPE_PRECISION (type) - && (TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)) + && (tree_to_uhwi (TREE_OPERAND (inner, 1)) < (unsigned) (TYPE_PRECISION (type) - bitnum))) { - bitnum += TREE_INT_CST_LOW (TREE_OPERAND (inner, 1)); + bitnum += tree_to_uhwi (TREE_OPERAND (inner, 1)); inner = TREE_OPERAND (inner, 0); } @@ -7264,8 +7264,8 @@ fold_plusminus_mult_expr (location_t loc, enum tree_code code, tree type, HOST_WIDE_INT int01, int11, tmp; bool swap = false; tree maybe_same; - int01 = TREE_INT_CST_LOW (arg01); - int11 = TREE_INT_CST_LOW (arg11); + int01 = tree_to_shwi (arg01); + int11 = tree_to_shwi (arg11); /* Move min of absolute values to int11. */ if (absu_hwi (int01) < absu_hwi (int11)) @@ -12019,7 +12019,7 @@ fold_binary_loc (location_t loc, if (POINTER_TYPE_P (TREE_TYPE (arg0)) && tree_fits_uhwi_p (arg1)) { unsigned HOST_WIDE_INT modulus, residue; - unsigned HOST_WIDE_INT low = TREE_INT_CST_LOW (arg1); + unsigned HOST_WIDE_INT low = tree_to_uhwi (arg1); modulus = get_pointer_modulus_and_residue (arg0, &residue, integer_onep (arg1)); @@ -12650,12 +12650,12 @@ fold_binary_loc (location_t loc, /* Turn (a OP c1) OP c2 into a OP (c1+c2). */ if (TREE_CODE (op0) == code && tree_fits_uhwi_p (arg1) - && TREE_INT_CST_LOW (arg1) < prec + && tree_to_uhwi (arg1) < prec && tree_fits_uhwi_p (TREE_OPERAND (arg0, 1)) - && TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) < prec) + && tree_to_uhwi (TREE_OPERAND (arg0, 1)) < prec) { - unsigned int low = (TREE_INT_CST_LOW (TREE_OPERAND (arg0, 1)) - + TREE_INT_CST_LOW (arg1)); + unsigned int low = (tree_to_uhwi (TREE_OPERAND (arg0, 1)) + + tree_to_uhwi (arg1)); /* Deal with a OP (c1 + c2) being undefined but (a OP c1) OP c2 being well defined. */ diff --git a/gcc/gimple-fold.c b/gcc/gimple-fold.c index 5493c5f3ea0c..891d47c3676d 100644 --- a/gcc/gimple-fold.c +++ b/gcc/gimple-fold.c @@ -3067,7 +3067,7 @@ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) doffset.fits_shwi ())) { offset = doffset.to_shwi (); - offset *= TREE_INT_CST_LOW (unit_size); + offset *= tree_to_uhwi (unit_size); offset *= BITS_PER_UNIT; base = TREE_OPERAND (t, 0); @@ -3083,7 +3083,7 @@ fold_const_aggregate_ref_1 (tree t, tree (*valueize) (tree)) if (!ctor) return NULL_TREE; return fold_ctor_reference (TREE_TYPE (t), ctor, offset, - TREE_INT_CST_LOW (unit_size) + tree_to_uhwi (unit_size) * BITS_PER_UNIT, base); } diff --git a/gcc/gimple-ssa-strength-reduction.c b/gcc/gimple-ssa-strength-reduction.c index 72c62844fa67..3ac9e4d33478 100644 --- a/gcc/gimple-ssa-strength-reduction.c +++ b/gcc/gimple-ssa-strength-reduction.c @@ -613,7 +613,7 @@ stmt_cost (gimple gs, bool speed) rhs2 = gimple_assign_rhs2 (gs); if (tree_fits_shwi_p (rhs2)) - return mult_by_coeff_cost (TREE_INT_CST_LOW (rhs2), lhs_mode, speed); + return mult_by_coeff_cost (tree_to_shwi (rhs2), lhs_mode, speed); gcc_assert (TREE_CODE (rhs1) != INTEGER_CST); return mul_cost (speed, lhs_mode); diff --git a/gcc/omp-low.c b/gcc/omp-low.c index bf834bfd7b4b..5d7e504b2fa7 100644 --- a/gcc/omp-low.c +++ b/gcc/omp-low.c @@ -8836,7 +8836,7 @@ lower_omp_for_lastprivate (struct omp_for_data *fd, gimple_seq *body_p, type optimizations deduce the value and remove a copy. */ if (tree_fits_shwi_p (fd->loop.step)) { - HOST_WIDE_INT step = TREE_INT_CST_LOW (fd->loop.step); + HOST_WIDE_INT step = tree_to_shwi (fd->loop.step); if (step == 1 || step == -1) cond_code = EQ_EXPR; } diff --git a/gcc/simplify-rtx.c b/gcc/simplify-rtx.c index b91ae1ade3f9..791f90dca2b4 100644 --- a/gcc/simplify-rtx.c +++ b/gcc/simplify-rtx.c @@ -305,7 +305,7 @@ delegitimize_mem_from_attrs (rtx x) { offset += bitpos / BITS_PER_UNIT; if (toffset) - offset += TREE_INT_CST_LOW (toffset); + offset += tree_to_shwi (toffset); } break; } diff --git a/gcc/stor-layout.c b/gcc/stor-layout.c index 0dece714bf8d..6138b63d2d9f 100644 --- a/gcc/stor-layout.c +++ b/gcc/stor-layout.c @@ -1625,7 +1625,7 @@ compute_record_mode (tree type) does not apply to unions. */ if (TREE_CODE (type) == RECORD_TYPE && mode != VOIDmode && tree_fits_uhwi_p (TYPE_SIZE (type)) - && GET_MODE_BITSIZE (mode) == TREE_INT_CST_LOW (TYPE_SIZE (type))) + && GET_MODE_BITSIZE (mode) == tree_to_uhwi (TYPE_SIZE (type))) SET_TYPE_MODE (type, mode); else SET_TYPE_MODE (type, mode_for_size_tree (TYPE_SIZE (type), MODE_INT, 1)); diff --git a/gcc/tree-cfg.c b/gcc/tree-cfg.c index b9fb7195bd2c..f64fc5206565 100644 --- a/gcc/tree-cfg.c +++ b/gcc/tree-cfg.c @@ -2715,7 +2715,7 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) } if (INTEGRAL_TYPE_P (TREE_TYPE (t)) && (TYPE_PRECISION (TREE_TYPE (t)) - != TREE_INT_CST_LOW (TREE_OPERAND (t, 1)))) + != tree_to_uhwi (TREE_OPERAND (t, 1)))) { error ("integral result type precision does not match " "field size of BIT_FIELD_REF"); @@ -2724,7 +2724,7 @@ verify_expr (tree *tp, int *walk_subtrees, void *data ATTRIBUTE_UNUSED) else if (!INTEGRAL_TYPE_P (TREE_TYPE (t)) && TYPE_MODE (TREE_TYPE (t)) != BLKmode && (GET_MODE_PRECISION (TYPE_MODE (TREE_TYPE (t))) - != TREE_INT_CST_LOW (TREE_OPERAND (t, 1)))) + != tree_to_uhwi (TREE_OPERAND (t, 1)))) { error ("mode precision of non-integral result does not " "match field size of BIT_FIELD_REF"); diff --git a/gcc/tree-dfa.c b/gcc/tree-dfa.c index 0d1eb999ccbb..0ecf7a3e1554 100644 --- a/gcc/tree-dfa.c +++ b/gcc/tree-dfa.c @@ -413,7 +413,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, if (! tree_fits_uhwi_p (size_tree)) bitsize = -1; else - bitsize = TREE_INT_CST_LOW (size_tree); + bitsize = tree_to_uhwi (size_tree); } /* Initially, maxsize is the same as the accessed element size. @@ -461,8 +461,8 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, if (tree_fits_shwi_p (fsize) && tree_fits_shwi_p (ssize) && doffset.fits_shwi ()) - maxsize += ((TREE_INT_CST_LOW (ssize) - - TREE_INT_CST_LOW (fsize)) + maxsize += ((tree_to_shwi (ssize) + - tree_to_shwi (fsize)) * BITS_PER_UNIT - doffset.to_shwi ()); else @@ -480,8 +480,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, && csize && tree_fits_uhwi_p (csize) && bit_offset.fits_shwi ()) - maxsize = TREE_INT_CST_LOW (csize) - - bit_offset.to_shwi (); + maxsize = tree_to_uhwi (csize) - bit_offset.to_shwi (); else maxsize = -1; } @@ -524,8 +523,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, && asize && tree_fits_uhwi_p (asize) && bit_offset.fits_shwi ()) - maxsize = TREE_INT_CST_LOW (asize) - - bit_offset.to_shwi (); + maxsize = tree_to_uhwi (asize) - bit_offset.to_shwi (); else maxsize = -1; @@ -574,7 +572,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, && (!bit_offset.fits_shwi () || !tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (exp))) || (bit_offset.to_shwi () + maxsize - == (HOST_WIDE_INT) TREE_INT_CST_LOW + == (HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (TREE_TYPE (exp)))))) maxsize = -1; @@ -611,7 +609,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, && (!bit_offset.fits_shwi () || !tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (exp))) || (bit_offset.to_shwi () + maxsize - == (HOST_WIDE_INT) TREE_INT_CST_LOW + == (HOST_WIDE_INT) tree_to_uhwi (TYPE_SIZE (TREE_TYPE (exp)))))) maxsize = -1; @@ -635,7 +633,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, base decl. */ if (maxsize == -1 && tree_fits_uhwi_p (DECL_SIZE (exp))) - maxsize = TREE_INT_CST_LOW (DECL_SIZE (exp)) - hbit_offset; + maxsize = tree_to_uhwi (DECL_SIZE (exp)) - hbit_offset; } else if (CONSTANT_CLASS_P (exp)) { @@ -643,7 +641,7 @@ get_ref_base_and_extent (tree exp, HOST_WIDE_INT *poffset, base type constant. */ if (maxsize == -1 && tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (exp)))) - maxsize = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) - hbit_offset; + maxsize = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (exp))) - hbit_offset; } /* ??? Due to negative offsets in ARRAY_REF we can end up with diff --git a/gcc/tree-pretty-print.c b/gcc/tree-pretty-print.c index 7ed48833fc33..a18b42dc9e52 100644 --- a/gcc/tree-pretty-print.c +++ b/gcc/tree-pretty-print.c @@ -275,7 +275,7 @@ dump_array_domain (pretty_printer *buffer, tree domain, int spc, int flags) if (min && max && integer_zerop (min) && tree_fits_shwi_p (max)) - pp_wide_integer (buffer, TREE_INT_CST_LOW (max) + 1); + pp_wide_integer (buffer, tree_to_shwi (max) + 1); else { if (min) diff --git a/gcc/tree-sra.c b/gcc/tree-sra.c index 5432048854ee..0102f3fe986e 100644 --- a/gcc/tree-sra.c +++ b/gcc/tree-sra.c @@ -1656,12 +1656,12 @@ build_user_friendly_ref_for_offset (tree *res, tree type, HOST_WIDE_INT offset, tr_pos = bit_position (fld); if (!tr_pos || !tree_fits_uhwi_p (tr_pos)) continue; - pos = TREE_INT_CST_LOW (tr_pos); + pos = tree_to_uhwi (tr_pos); gcc_assert (TREE_CODE (type) == RECORD_TYPE || pos == 0); tr_size = DECL_SIZE (fld); if (!tr_size || !tree_fits_uhwi_p (tr_size)) continue; - size = TREE_INT_CST_LOW (tr_size); + size = tree_to_uhwi (tr_size); if (size == 0) { if (pos != offset) diff --git a/gcc/tree-ssa-ccp.c b/gcc/tree-ssa-ccp.c index 3a9875de2e85..5eb5ffd33a13 100644 --- a/gcc/tree-ssa-ccp.c +++ b/gcc/tree-ssa-ccp.c @@ -1886,7 +1886,7 @@ fold_builtin_alloca_with_align (gimple stmt) || !tree_fits_uhwi_p (arg)) return NULL_TREE; - size = TREE_INT_CST_LOW (arg); + size = tree_to_uhwi (arg); /* Heuristic: don't fold large allocas. */ threshold = (unsigned HOST_WIDE_INT)PARAM_VALUE (PARAM_LARGE_STACK_FRAME); diff --git a/gcc/tree-ssa-loop-ivopts.c b/gcc/tree-ssa-loop-ivopts.c index 6d7d78ea6e45..476f3a1f286f 100644 --- a/gcc/tree-ssa-loop-ivopts.c +++ b/gcc/tree-ssa-loop-ivopts.c @@ -4014,7 +4014,7 @@ get_loop_invariant_expr_id (struct ivopts_data *data, tree ubase, tree ind = TREE_OPERAND (usym, 1); if (TREE_CODE (ind) == INTEGER_CST && tree_fits_shwi_p (ind) - && TREE_INT_CST_LOW (ind) == 0) + && tree_to_shwi (ind) == 0) usym = TREE_OPERAND (usym, 0); } if (TREE_CODE (csym) == ARRAY_REF) @@ -4022,7 +4022,7 @@ get_loop_invariant_expr_id (struct ivopts_data *data, tree ubase, tree ind = TREE_OPERAND (csym, 1); if (TREE_CODE (ind) == INTEGER_CST && tree_fits_shwi_p (ind) - && TREE_INT_CST_LOW (ind) == 0) + && tree_to_shwi (ind) == 0) csym = TREE_OPERAND (csym, 0); } if (operand_equal_p (usym, csym, 0)) diff --git a/gcc/tree-ssa-math-opts.c b/gcc/tree-ssa-math-opts.c index ce7116ee4fcd..21750312673b 100644 --- a/gcc/tree-ssa-math-opts.c +++ b/gcc/tree-ssa-math-opts.c @@ -1508,7 +1508,7 @@ execute_cse_sincos (void) if (!tree_fits_shwi_p (arg1)) break; - n = TREE_INT_CST_LOW (arg1); + n = tree_to_shwi (arg1); result = gimple_expand_builtin_powi (&gsi, loc, arg0, n); } diff --git a/gcc/tree-ssa-phiopt.c b/gcc/tree-ssa-phiopt.c index 54a981935fdd..188143269f51 100644 --- a/gcc/tree-ssa-phiopt.c +++ b/gcc/tree-ssa-phiopt.c @@ -1983,9 +1983,9 @@ hoist_adjacent_loads (basic_block bb0, basic_block bb1, || !tree_fits_uhwi_p (tree_size2)) continue; - offset1 = TREE_INT_CST_LOW (tree_offset1); - offset2 = TREE_INT_CST_LOW (tree_offset2); - size2 = TREE_INT_CST_LOW (tree_size2); + offset1 = tree_to_uhwi (tree_offset1); + offset2 = tree_to_uhwi (tree_offset2); + size2 = tree_to_uhwi (tree_size2); align1 = DECL_ALIGN (field1) % param_align_bits; if (offset1 % BITS_PER_UNIT != 0) diff --git a/gcc/tree-ssa-reassoc.c b/gcc/tree-ssa-reassoc.c index 4c4924c721c2..a67720139e96 100644 --- a/gcc/tree-ssa-reassoc.c +++ b/gcc/tree-ssa-reassoc.c @@ -3641,7 +3641,7 @@ acceptable_pow_call (gimple stmt, tree *base, HOST_WIDE_INT *exponent) if (!tree_fits_shwi_p (arg1)) return false; - *exponent = TREE_INT_CST_LOW (arg1); + *exponent = tree_to_shwi (arg1); break; default: diff --git a/gcc/tree-ssa-sccvn.c b/gcc/tree-ssa-sccvn.c index 2b2d0e9b997b..1b9514e21088 100644 --- a/gcc/tree-ssa-sccvn.c +++ b/gcc/tree-ssa-sccvn.c @@ -786,7 +786,7 @@ copy_reference_ops_from_ref (tree ref, vec *result) /* The base address gets its own vn_reference_op_s structure. */ temp.op0 = TREE_OPERAND (ref, 1); if (tree_fits_shwi_p (TREE_OPERAND (ref, 1))) - temp.off = TREE_INT_CST_LOW (TREE_OPERAND (ref, 1)); + temp.off = tree_to_shwi (TREE_OPERAND (ref, 1)); break; case BIT_FIELD_REF: /* Record bits and position. */ @@ -950,7 +950,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref, if (!tree_fits_uhwi_p (size_tree)) size = -1; else - size = TREE_INT_CST_LOW (size_tree); + size = tree_to_uhwi (size_tree); } /* Initially, maxsize is the same as the accessed element size. @@ -1021,7 +1021,7 @@ ao_ref_init_from_vn_reference (ao_ref *ref, max_size = -1; else { - offset += (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (field)) + offset += (tree_to_uhwi (DECL_FIELD_OFFSET (field)) * BITS_PER_UNIT); offset += TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (field)); } @@ -1037,9 +1037,9 @@ ao_ref_init_from_vn_reference (ao_ref *ref, max_size = -1; else { - HOST_WIDE_INT hindex = TREE_INT_CST_LOW (op->op0); - hindex -= TREE_INT_CST_LOW (op->op1); - hindex *= TREE_INT_CST_LOW (op->op2); + HOST_WIDE_INT hindex = tree_to_shwi (op->op0); + hindex -= tree_to_shwi (op->op1); + hindex *= tree_to_shwi (op->op2); hindex *= BITS_PER_UNIT; offset += hindex; } @@ -1168,7 +1168,7 @@ vn_reference_fold_indirect (vec *ops, mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off); op->op0 = build_fold_addr_expr (addr_base); if (tree_fits_shwi_p (mem_op->op0)) - mem_op->off = TREE_INT_CST_LOW (mem_op->op0); + mem_op->off = tree_to_shwi (mem_op->op0); else mem_op->off = -1; } @@ -1233,7 +1233,7 @@ vn_reference_maybe_forwprop_address (vec *ops, mem_op->op0 = double_int_to_tree (TREE_TYPE (mem_op->op0), off); if (tree_fits_shwi_p (mem_op->op0)) - mem_op->off = TREE_INT_CST_LOW (mem_op->op0); + mem_op->off = tree_to_shwi (mem_op->op0); else mem_op->off = -1; if (TREE_CODE (op->op0) == SSA_NAME) @@ -1604,9 +1604,9 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) tree base2; HOST_WIDE_INT offset2, size2, maxsize2; base2 = get_ref_base_and_extent (ref2, &offset2, &size2, &maxsize2); - size2 = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) * 8; + size2 = tree_to_uhwi (gimple_call_arg (def_stmt, 2)) * 8; if ((unsigned HOST_WIDE_INT)size2 / 8 - == TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)) + == tree_to_uhwi (gimple_call_arg (def_stmt, 2)) && maxsize2 != -1 && operand_equal_p (base, base2, 0) && offset2 <= offset @@ -1876,7 +1876,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))) { lhs = TREE_OPERAND (tem, 0); - lhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)); + lhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1)); } else if (DECL_P (tem)) lhs = build_fold_addr_expr (tem); @@ -1902,7 +1902,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) && tree_fits_uhwi_p (TREE_OPERAND (tem, 1))) { rhs = TREE_OPERAND (tem, 0); - rhs_offset += TREE_INT_CST_LOW (TREE_OPERAND (tem, 1)); + rhs_offset += tree_to_uhwi (TREE_OPERAND (tem, 1)); } else if (DECL_P (tem)) rhs = build_fold_addr_expr (tem); @@ -1913,7 +1913,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) && TREE_CODE (rhs) != ADDR_EXPR) return (void *)-1; - copy_size = TREE_INT_CST_LOW (gimple_call_arg (def_stmt, 2)); + copy_size = tree_to_uhwi (gimple_call_arg (def_stmt, 2)); /* The bases of the destination and the references have to agree. */ if ((TREE_CODE (base) != MEM_REF @@ -1929,7 +1929,7 @@ vn_reference_lookup_3 (ao_ref *ref, tree vuse, void *vr_) /* And the access has to be contained within the memcpy destination. */ at = offset / BITS_PER_UNIT; if (TREE_CODE (base) == MEM_REF) - at += TREE_INT_CST_LOW (TREE_OPERAND (base, 1)); + at += tree_to_uhwi (TREE_OPERAND (base, 1)); if (lhs_offset > at || lhs_offset + copy_size < at + maxsize / BITS_PER_UNIT) return (void *)-1; @@ -3237,7 +3237,7 @@ simplify_binary_expression (gimple stmt) && is_gimple_min_invariant (op0)) return build_invariant_address (TREE_TYPE (op0), TREE_OPERAND (op0, 0), - TREE_INT_CST_LOW (op1)); + tree_to_uhwi (op1)); /* Avoid folding if nothing changed. */ if (op0 == gimple_assign_rhs1 (stmt) diff --git a/gcc/tree-ssa-structalias.c b/gcc/tree-ssa-structalias.c index 688f547d03ad..6174dcf8ecf7 100644 --- a/gcc/tree-ssa-structalias.c +++ b/gcc/tree-ssa-structalias.c @@ -3002,8 +3002,8 @@ bitpos_of_field (const tree fdecl) || !tree_fits_shwi_p (DECL_FIELD_BIT_OFFSET (fdecl))) return -1; - return (TREE_INT_CST_LOW (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT - + TREE_INT_CST_LOW (DECL_FIELD_BIT_OFFSET (fdecl))); + return (tree_to_shwi (DECL_FIELD_OFFSET (fdecl)) * BITS_PER_UNIT + + tree_to_shwi (DECL_FIELD_BIT_OFFSET (fdecl))); } @@ -3434,7 +3434,7 @@ get_constraint_for_1 (tree t, vec *results, bool address_p, { unsigned HOST_WIDE_INT size; if (tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (t)))) - size = TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (t))); + size = tree_to_uhwi (TYPE_SIZE (TREE_TYPE (t))); else size = -1; for (; curr; curr = vi_next (curr)) @@ -5364,7 +5364,7 @@ push_fields_onto_fieldstack (tree type, vec *fieldstack, && !pair->has_unknown_size && pair->offset + (HOST_WIDE_INT)pair->size == offset + foff) { - pair->size += TREE_INT_CST_LOW (DECL_SIZE (field)); + pair->size += tree_to_uhwi (DECL_SIZE (field)); } else { @@ -5372,7 +5372,7 @@ push_fields_onto_fieldstack (tree type, vec *fieldstack, e.offset = offset + foff; e.has_unknown_size = has_unknown_size; if (!has_unknown_size) - e.size = TREE_INT_CST_LOW (DECL_SIZE (field)); + e.size = tree_to_uhwi (DECL_SIZE (field)); else e.size = -1; e.must_have_pointers = must_have_pointers_p; @@ -5689,7 +5689,7 @@ create_variable_info_for_1 (tree decl, const char *name) vi = new_var_info (decl, name); vi->offset = 0; vi->may_have_pointers = true; - vi->fullsize = TREE_INT_CST_LOW (declsize); + vi->fullsize = tree_to_uhwi (declsize); vi->size = vi->fullsize; vi->is_full_var = true; fieldstack.release (); @@ -5697,7 +5697,7 @@ create_variable_info_for_1 (tree decl, const char *name) } vi = new_var_info (decl, name); - vi->fullsize = TREE_INT_CST_LOW (declsize); + vi->fullsize = tree_to_uhwi (declsize); for (i = 0, newvi = vi; fieldstack.iterate (i, &fo); ++i, newvi = vi_next (newvi)) diff --git a/gcc/tree-vect-data-refs.c b/gcc/tree-vect-data-refs.c index 259a28461292..42b0d22ec720 100644 --- a/gcc/tree-vect-data-refs.c +++ b/gcc/tree-vect-data-refs.c @@ -783,7 +783,7 @@ vect_compute_data_ref_alignment (struct data_reference *dr) return false; } - SET_DR_MISALIGNMENT (dr, TREE_INT_CST_LOW (misalign)); + SET_DR_MISALIGNMENT (dr, tree_to_uhwi (misalign)); if (dump_enabled_p ()) { @@ -965,7 +965,7 @@ not_size_aligned (tree exp) if (!tree_fits_uhwi_p (TYPE_SIZE (TREE_TYPE (exp)))) return true; - return (TREE_INT_CST_LOW (TYPE_SIZE (TREE_TYPE (exp))) + return (tree_to_uhwi (TYPE_SIZE (TREE_TYPE (exp))) > get_object_alignment (exp)); } @@ -2571,13 +2571,13 @@ vect_analyze_data_ref_accesses (loop_vec_info loop_vinfo, bb_vec_info bb_vinfo) /* If init_b == init_a + the size of the type * k, we have an interleaving, and DRA is accessed before DRB. */ - HOST_WIDE_INT type_size_a = TREE_INT_CST_LOW (sza); + HOST_WIDE_INT type_size_a = tree_to_uhwi (sza); if ((init_b - init_a) % type_size_a != 0) break; /* The step (if not zero) is greater than the difference between data-refs' inits. This splits groups into suitable sizes. */ - HOST_WIDE_INT step = TREE_INT_CST_LOW (DR_STEP (dra)); + HOST_WIDE_INT step = tree_to_shwi (DR_STEP (dra)); if (step != 0 && step <= (init_b - init_a)) break; @@ -2883,8 +2883,8 @@ vect_prune_runtime_alias_test_list (loop_vec_info loop_vinfo) || !tree_fits_shwi_p (dr_a2->offset)) continue; - HOST_WIDE_INT diff = TREE_INT_CST_LOW (dr_a2->offset) - - TREE_INT_CST_LOW (dr_a1->offset); + HOST_WIDE_INT diff = (tree_to_shwi (dr_a2->offset) + - tree_to_shwi (dr_a1->offset)); /* Now we check if the following condition is satisfied: diff --git a/gcc/tree-vectorizer.h b/gcc/tree-vectorizer.h index a6c5b596e9b6..1634d7fa7f5e 100644 --- a/gcc/tree-vectorizer.h +++ b/gcc/tree-vectorizer.h @@ -389,7 +389,7 @@ typedef struct _loop_vec_info { #define NITERS_KNOWN_P(n) \ (tree_fits_shwi_p ((n)) \ -&& TREE_INT_CST_LOW ((n)) > 0) +&& tree_to_shwi ((n)) > 0) #define LOOP_VINFO_NITERS_KNOWN_P(L) \ NITERS_KNOWN_P ((L)->num_iters) -- 2.39.5