]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
2013-11-27 Bernd Edlinger <bernd.edlinger@hotmail.de>
authorBernd Edlinger <edlinger@gcc.gnu.org>
Wed, 27 Nov 2013 16:33:01 +0000 (16:33 +0000)
committerBernd Edlinger <edlinger@gcc.gnu.org>
Wed, 27 Nov 2013 16:33:01 +0000 (16:33 +0000)
reverted r205398 on request: Remove parameter keep_aligning from
get_inner_reference.

From-SVN: r205452

22 files changed:
gcc/ChangeLog
gcc/ada/ChangeLog
gcc/ada/gcc-interface/decl.c
gcc/ada/gcc-interface/trans.c
gcc/ada/gcc-interface/utils2.c
gcc/asan.c
gcc/builtins.c
gcc/cfgexpand.c
gcc/config/mips/mips.c
gcc/dbxout.c
gcc/dwarf2out.c
gcc/expr.c
gcc/fold-const.c
gcc/gimple-ssa-strength-reduction.c
gcc/simplify-rtx.c
gcc/tree-affine.c
gcc/tree-data-ref.c
gcc/tree-scalar-evolution.c
gcc/tree-ssa-loop-ivopts.c
gcc/tree-vect-data-refs.c
gcc/tree.h
gcc/tsan.c

index 1ef702c50fee5c6255b32e396ef2ea06c2777864..39a4f1e71926f645ea6f6df9713ea2b9cb774999 100644 (file)
        for the inner loop if collapse_bb is non-NULL.
        (expand_omp_simd): Use cont_bb rather than e->dest as latch.
 
-2013-11-26  Bernd Edlinger  <bernd.edlinger@hotmail.de>
-
-       Remove parameter keep_aligning from get_inner_reference.
-       * tree.h (get_inner_reference): Adjust header.
-       * expr.c (get_inner_reference): Remove parameter keep_aligning.
-       (get_bit_range, expand_assignment,
-       expand_expr_addr_expr_1, expand_expr_real_1): Adjust.
-       * asan.c (instrument_derefs): Adjust.
-       * builtins.c (get_object_alignment_2): Adjust. Remove handling of
-       VIEW_CONVERT_EXPR.
-       * cfgexpand.c (expand_debug_expr): Adjust.
-       * dbxout.c (dbxout_expand_expr): Adjust.
-       * dwarf2out.c (loc_list_for_address_of_addr_expr_of_indirect_ref,
-       loc_list_from_tree, fortran_common): Adjust.
-       * fold-const.c (optimize_bit_field_compare,
-       decode_field_reference, fold_unary_loc, fold_comparison,
-       split_address_to_core_and_offset): Adjust.
-       * gimple-ssa-strength-reduction.c (slsr_process_ref): Adjust.
-       * simplifx-rtx.c (delegitimize_mem_from_attrs): Adjust.
-       * tree-affine.c (tree_to_aff_combination,
-       get_inner_reference_aff): Adjust.
-       * tree-data-ref.c (split_constant_offset_1,
-       dr_analyze_innermost): Adjust.
-       * tree-vect-data-refs.c (vect_check_gather,
-       vect_analyze_data_refs): Adjust.
-       * tree-scalar-evolution.c (interpret_rhs_expr): Adjust.
-       * tree-ssa-loop-ivopts.c (may_be_unaligned_p,
-       split_address_cost): Adjust.
-       * tsan.c (instrument_expr): Adjust.
-       * config/mips/mips.c (r10k_safe_mem_expr_p): Adjust.
-
 2013-11-26  Yufeng Zhang  <yufeng.zhang@arm.com>
 
        * config/arm/arm.c (arm_legitimize_address): Check xop1 is not
index d4908a0d80b830deb2cdc2b0d269668f9a26415a..8bb3462bc5a51aadf7c1b9b5997308c0b4e08098 100644 (file)
@@ -1,10 +1,3 @@
-2013-11-26  Bernd Edlinger  <bernd.edlinger@hotmail.de>
-
-       Remove parameter keep_aligning from get_inner_reference.
-       * gcc-interface/decl.c (elaborate_expression_1): Adjust.
-       * gcc-interface/trans.c (Attribute_to_gnu): Adjust.
-       * gcc-interface/utils2.c (build_unary_op): Adjust.
-
 2013-11-23  Eric Botcazou  <ebotcazou@adacore.com>
 
        * gcc-interface/trans.c (Loop_Statement_to_gnu): Set TREE_SIDE_EFFECTS
index 51adf18e0a32ac058b744f0a9cb765e4bf9f6f34..ee76a9d160f38737015ecdc1f74056f70c0ad704 100644 (file)
@@ -6269,7 +6269,7 @@ elaborate_expression_1 (tree gnu_expr, Entity_Id gnat_entity, tree gnu_name,
          int unsignedp, volatilep;
 
          inner = get_inner_reference (inner, &bitsize, &bitpos, &offset,
-                                      &mode, &unsignedp, &volatilep);
+                                      &mode, &unsignedp, &volatilep, false);
          /* If the offset is variable, err on the side of caution.  */
          if (offset)
            inner = NULL_TREE;
index 6c7a418662fedc637a9608c04884a29c7e977ec7..e533de6dcbf7a33caabf4cdcbb859704fe4d12df 100644 (file)
@@ -2060,7 +2060,7 @@ Attribute_to_gnu (Node_Id gnat_node, tree *gnu_result_type_p, int attribute)
                          && TREE_CODE (gnu_prefix) == FIELD_DECL));
 
        get_inner_reference (gnu_prefix, &bitsize, &bitpos, &gnu_offset,
-                            &mode, &unsignedp, &volatilep);
+                            &mode, &unsignedp, &volatilep, false);
 
        if (TREE_CODE (gnu_prefix) == COMPONENT_REF)
          {
index b6299cbeefada57746aa7be27861e152632c34ed..224a87d87774071749a8855254c74ec7d82f4f67 100644 (file)
@@ -1312,7 +1312,8 @@ build_unary_op (enum tree_code op_code, tree result_type, tree operand)
              int unsignedp, volatilep;
 
              inner = get_inner_reference (operand, &bitsize, &bitpos, &offset,
-                                          &mode, &unsignedp, &volatilep);
+                                          &mode, &unsignedp, &volatilep,
+                                          false);
 
              /* If INNER is a padding type whose field has a self-referential
                 size, convert to that inner type.  We know the offset is zero
index c901e1da380f43a946322a27f84e0bd002e50034..677435e05aed34cdfc96749f4e20046174da9b81 100644 (file)
@@ -1488,7 +1488,7 @@ instrument_derefs (gimple_stmt_iterator *iter, tree t,
   enum machine_mode mode;
   int volatilep = 0, unsignedp = 0;
   tree inner = get_inner_reference (t, &bitsize, &bitpos, &offset,
-                                   &mode, &unsignedp, &volatilep);
+                                   &mode, &unsignedp, &volatilep, false);
   if (bitpos % (size_in_bytes * BITS_PER_UNIT)
       || bitsize != size_in_bytes * BITS_PER_UNIT)
     {
index 3f03b01281b993c55af64dc92841e67f5210ca07..d2248ea5ec617094e0aab6e969e3c9ff1fd627c1 100644 (file)
@@ -329,7 +329,7 @@ get_object_alignment_2 (tree exp, unsigned int *alignp,
   /* Get the innermost object and the constant (bitpos) and possibly
      variable (offset) offset of the access.  */
   exp = get_inner_reference (exp, &bitsize, &bitpos, &offset,
-                            &mode, &unsignedp, &volatilep);
+                            &mode, &unsignedp, &volatilep, true);
 
   /* Extract alignment information from the innermost object and
      possibly adjust bitpos and offset.  */
@@ -360,6 +360,10 @@ get_object_alignment_2 (tree exp, unsigned int *alignp,
       align = DECL_ALIGN (exp);
       known_alignment = true;
     }
+  else if (TREE_CODE (exp) == VIEW_CONVERT_EXPR)
+    {
+      align = TYPE_ALIGN (TREE_TYPE (exp));
+    }
   else if (TREE_CODE (exp) == INDIRECT_REF
           || TREE_CODE (exp) == MEM_REF
           || TREE_CODE (exp) == TARGET_MEM_REF)
index 98983f40b3b1da443e848ae6693328bac458b549..207f8767573f2cfff65414e54f4794b2b8ed5f0e 100644 (file)
@@ -3941,7 +3941,7 @@ expand_debug_expr (tree exp)
        tree offset;
        int volatilep = 0;
        tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
-                                       &mode1, &unsignedp, &volatilep);
+                                       &mode1, &unsignedp, &volatilep, false);
        rtx orig_op0;
 
        if (bitsize == 0)
index 69e67be49739fb58d57755c858456e8e516176b6..36ba6df7a4cfdd7e0c87f78ef44601b3baf035df 100644 (file)
@@ -14948,7 +14948,7 @@ r10k_safe_mem_expr_p (tree expr, unsigned HOST_WIDE_INT offset)
   int unsigned_p, volatile_p;
 
   inner = get_inner_reference (expr, &bitsize, &bitoffset, &var_offset, &mode,
-                              &unsigned_p, &volatile_p);
+                              &unsigned_p, &volatile_p, false);
   if (!DECL_P (inner) || !DECL_SIZE_UNIT (inner) || var_offset)
     return false;
 
index 5988c7e1bd4085acc44b4b5b39346ca61ae805da..bc6a3af0f96e52c4430541c2e49158b30e0f12b8 100644 (file)
@@ -2515,7 +2515,7 @@ dbxout_expand_expr (tree expr)
        rtx x;
 
        tem = get_inner_reference (expr, &bitsize, &bitpos, &offset,
-                                  &mode, &unsignedp, &volatilep);
+                                  &mode, &unsignedp, &volatilep, true);
 
        x = dbxout_expand_expr (tem);
        if (x == NULL || !MEM_P (x))
index 6376306802ab46869e39a97bc46cb7fc7c6b2c6e..3448ec4a98c7f8a3b118952b5f3c680ad0badd5f 100644 (file)
@@ -13934,7 +13934,7 @@ loc_list_for_address_of_addr_expr_of_indirect_ref (tree loc, bool toplev)
 
   obj = get_inner_reference (TREE_OPERAND (loc, 0),
                             &bitsize, &bitpos, &offset, &mode,
-                            &unsignedp, &volatilep);
+                            &unsignedp, &volatilep, false);
   STRIP_NOPS (obj);
   if (bitpos % BITS_PER_UNIT)
     {
@@ -14211,7 +14211,7 @@ loc_list_from_tree (tree loc, int want_address)
        int unsignedp, volatilep = 0;
 
        obj = get_inner_reference (loc, &bitsize, &bitpos, &offset, &mode,
-                                  &unsignedp, &volatilep);
+                                  &unsignedp, &volatilep, false);
 
        gcc_assert (obj != loc);
 
@@ -15521,7 +15521,7 @@ fortran_common (tree decl, HOST_WIDE_INT *value)
     return NULL_TREE;
 
   cvar = get_inner_reference (val_expr, &bitsize, &bitpos, &offset,
-                             &mode, &unsignedp, &volatilep);
+                             &mode, &unsignedp, &volatilep, true);
 
   if (cvar == NULL_TREE
       || TREE_CODE (cvar) != VAR_DECL
index 8f8b5272846c18cb30ce2b05b0e1b034e95c0f4d..4815c886f773889997962abe08393b29ea4fea4d 100644 (file)
@@ -4657,7 +4657,7 @@ get_bit_range (unsigned HOST_WIDE_INT *bitstart,
       int unsignedp;
       int volatilep = 0;
       get_inner_reference (TREE_OPERAND (exp, 0), &rbitsize, &rbitpos,
-                          &roffset, &rmode, &unsignedp, &volatilep);
+                          &roffset, &rmode, &unsignedp, &volatilep, false);
       if ((rbitpos % BITS_PER_UNIT) != 0)
        {
          *bitstart = *bitend = 0;
@@ -4810,7 +4810,7 @@ expand_assignment (tree to, tree from, bool nontemporal)
 
       push_temp_slots ();
       tem = get_inner_reference (to, &bitsize, &bitpos, &offset, &mode1,
-                                &unsignedp, &volatilep);
+                                &unsignedp, &volatilep, true);
 
       /* Make sure bitpos is not negative, it can wreak havoc later.  */
       if (bitpos < 0)
@@ -6652,13 +6652,27 @@ store_field (rtx target, HOST_WIDE_INT bitsize, HOST_WIDE_INT bitpos,
 
    If the field describes a variable-sized object, *PMODE is set to
    BLKmode and *PBITSIZE is set to -1.  An access cannot be made in
-   this case, but the address of the object can be found.  */
+   this case, but the address of the object can be found.
+
+   If KEEP_ALIGNING is true and the target is STRICT_ALIGNMENT, we don't
+   look through nodes that serve as markers of a greater alignment than
+   the one that can be deduced from the expression.  These nodes make it
+   possible for front-ends to prevent temporaries from being created by
+   the middle-end on alignment considerations.  For that purpose, the
+   normal operating mode at high-level is to always pass FALSE so that
+   the ultimate containing object is really returned; moreover, the
+   associated predicate handled_component_p will always return TRUE
+   on these nodes, thus indicating that they are essentially handled
+   by get_inner_reference.  TRUE should only be passed when the caller
+   is scanning the expression in order to build another representation
+   and specifically knows how to handle these nodes; as such, this is
+   the normal operating mode in the RTL expanders.  */
 
 tree
 get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
                     HOST_WIDE_INT *pbitpos, tree *poffset,
                     enum machine_mode *pmode, int *punsignedp,
-                    int *pvolatilep)
+                    int *pvolatilep, bool keep_aligning)
 {
   tree size_tree = 0;
   enum machine_mode mode = VOIDmode;
@@ -6778,6 +6792,14 @@ get_inner_reference (tree exp, HOST_WIDE_INT *pbitsize,
          break;
 
        case VIEW_CONVERT_EXPR:
+         if (keep_aligning && STRICT_ALIGNMENT
+             && (TYPE_ALIGN (TREE_TYPE (exp))
+              > TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0))))
+             && (TYPE_ALIGN (TREE_TYPE (TREE_OPERAND (exp, 0)))
+                 < BIGGEST_ALIGNMENT)
+             && (TYPE_ALIGN_OK (TREE_TYPE (exp))
+                 || TYPE_ALIGN_OK (TREE_TYPE (TREE_OPERAND (exp, 0)))))
+           goto done;
          break;
 
        case MEM_REF:
@@ -7642,7 +7664,7 @@ expand_expr_addr_expr_1 (tree exp, rtx target, enum machine_mode tmode,
         they won't change the final object whose address will be returned
         (they actually exist only for that purpose).  */
       inner = get_inner_reference (exp, &bitsize, &bitpos, &offset,
-                                  &mode1, &unsignedp, &volatilep);
+                                  &mode1, &unsignedp, &volatilep, false);
       break;
     }
 
@@ -9919,7 +9941,7 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
        tree offset;
        int volatilep = 0, must_force_mem;
        tree tem = get_inner_reference (exp, &bitsize, &bitpos, &offset,
-                                       &mode1, &unsignedp, &volatilep);
+                                       &mode1, &unsignedp, &volatilep, true);
        rtx orig_op0, memloc;
        bool mem_attrs_from_type = false;
 
@@ -10280,7 +10302,8 @@ expand_expr_real_1 (tree exp, rtx target, enum machine_mode tmode,
        int volatilep = 0;
        tree tem
          = get_inner_reference (treeop0, &bitsize, &bitpos,
-                                &offset, &mode1, &unsignedp, &volatilep);
+                                &offset, &mode1, &unsignedp, &volatilep,
+                                true);
        rtx orig_op0;
 
        /* ??? We should work harder and deal with non-zero offsets.  */
index fcd7f087be8d66c635f7d10ceaefada3ce1217c7..5cf8ed196d75098bf7645051966c59cb3164216b 100644 (file)
@@ -3503,7 +3503,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
      do anything if the inner expression is a PLACEHOLDER_EXPR since we
      then will no longer be able to replace it.  */
   linner = get_inner_reference (lhs, &lbitsize, &lbitpos, &offset, &lmode,
-                               &lunsignedp, &lvolatilep);
+                               &lunsignedp, &lvolatilep, false);
   if (linner == lhs || lbitsize == GET_MODE_BITSIZE (lmode) || lbitsize < 0
       || offset != 0 || TREE_CODE (linner) == PLACEHOLDER_EXPR || lvolatilep)
     return 0;
@@ -3513,7 +3513,7 @@ optimize_bit_field_compare (location_t loc, enum tree_code code,
      /* If this is not a constant, we can only do something if bit positions,
        sizes, and signedness are the same.  */
      rinner = get_inner_reference (rhs, &rbitsize, &rbitpos, &offset, &rmode,
-                                  &runsignedp, &rvolatilep);
+                                  &runsignedp, &rvolatilep, false);
 
      if (rinner == rhs || lbitpos != rbitpos || lbitsize != rbitsize
         || lunsignedp != runsignedp || offset != 0
@@ -3687,7 +3687,7 @@ decode_field_reference (location_t loc, tree exp, HOST_WIDE_INT *pbitsize,
     }
 
   inner = get_inner_reference (exp, pbitsize, pbitpos, &offset, pmode,
-                              punsignedp, pvolatilep);
+                              punsignedp, pvolatilep, false);
   if ((inner == exp && and_mask == 0)
       || *pbitsize < 0 || offset != 0
       || TREE_CODE (inner) == PLACEHOLDER_EXPR)
@@ -8071,7 +8071,7 @@ fold_unary_loc (location_t loc, enum tree_code code, tree type, tree op0)
          int unsignedp, volatilep;
           tree base = TREE_OPERAND (op0, 0);
          base = get_inner_reference (base, &bitsize, &bitpos, &offset,
-                                     &mode, &unsignedp, &volatilep);
+                                     &mode, &unsignedp, &volatilep, false);
          /* If the reference was to a (constant) zero offset, we can use
             the address of the base if it has the same base type
             as the result type and the pointer type is unqualified.  */
@@ -9096,7 +9096,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
        {
          base0 = get_inner_reference (TREE_OPERAND (arg0, 0),
                                       &bitsize, &bitpos0, &offset0, &mode,
-                                      &unsignedp, &volatilep);
+                                      &unsignedp, &volatilep, false);
          if (TREE_CODE (base0) == INDIRECT_REF)
            base0 = TREE_OPERAND (base0, 0);
          else
@@ -9130,7 +9130,7 @@ fold_comparison (location_t loc, enum tree_code code, tree type,
        {
          base1 = get_inner_reference (TREE_OPERAND (arg1, 0),
                                       &bitsize, &bitpos1, &offset1, &mode,
-                                      &unsignedp, &volatilep);
+                                      &unsignedp, &volatilep, false);
          if (TREE_CODE (base1) == INDIRECT_REF)
            base1 = TREE_OPERAND (base1, 0);
          else
@@ -16997,7 +16997,8 @@ split_address_to_core_and_offset (tree exp,
   if (TREE_CODE (exp) == ADDR_EXPR)
     {
       core = get_inner_reference (TREE_OPERAND (exp, 0), &bitsize, pbitpos,
-                                 poffset, &mode, &unsignedp, &volatilep);
+                                 poffset, &mode, &unsignedp, &volatilep,
+                                 false);
       core = build_fold_addr_expr_loc (loc, core);
     }
   else
index 1f88135002141ad11ee9a82d1c0559ced20dcd9c..bc2484b5b1338344ed3d88be4073a58afb4927a3 100644 (file)
@@ -940,7 +940,7 @@ slsr_process_ref (gimple gs)
     return;
 
   base = get_inner_reference (ref_expr, &bitsize, &bitpos, &offset, &mode,
-                             &unsignedp, &volatilep);
+                             &unsignedp, &volatilep, false);
   index = double_int::from_uhwi (bitpos);
 
   if (!restructure_reference (&base, &offset, &index, &type))
index f680e1773a7c0594b070f1ea58adcc50bc5913c7..ec138584c385ed3562e8e02d43925290cb218a44 100644 (file)
@@ -296,7 +296,7 @@ delegitimize_mem_from_attrs (rtx x)
            int unsignedp, volatilep = 0;
 
            decl = get_inner_reference (decl, &bitsize, &bitpos, &toffset,
-                                       &mode, &unsignedp, &volatilep);
+                                       &mode, &unsignedp, &volatilep, false);
            if (bitsize != GET_MODE_BITSIZE (mode)
                || (bitpos % BITS_PER_UNIT)
                || (toffset && !tree_fits_shwi_p (toffset)))
index ea80e7593e6d43cf56b89611169285e8879058c1..f93f186a761cfa84d950570281a2dcb9cf64aa14 100644 (file)
@@ -325,7 +325,8 @@ tree_to_aff_combination (tree expr, tree type, aff_tree *comb)
          return;
        }
       core = get_inner_reference (TREE_OPERAND (expr, 0), &bitsize, &bitpos,
-                                 &toffset, &mode, &unsignedp, &volatilep);
+                                 &toffset, &mode, &unsignedp, &volatilep,
+                                 false);
       if (bitpos % BITS_PER_UNIT != 0)
        break;
       aff_combination_const (comb, type,
@@ -894,7 +895,7 @@ get_inner_reference_aff (tree ref, aff_tree *addr, double_int *size)
   int uns, vol;
   aff_tree tmp;
   tree base = get_inner_reference (ref, &bitsize, &bitpos, &toff, &mode,
-                                  &uns, &vol);
+                                  &uns, &vol, false);
   tree base_addr = build_fold_addr_expr (base);
 
   /* ADDR = &BASE + TOFF + BITPOS / BITS_PER_UNIT.  */
index 559a546d29f2de3dae229bb2f5426015cea10cc7..fef6a716b7a02829f0cd76ef63bdae32813c9a8f 100644 (file)
@@ -619,7 +619,7 @@ split_constant_offset_1 (tree type, tree op0, enum tree_code code, tree op1,
 
        op0 = TREE_OPERAND (op0, 0);
        base = get_inner_reference (op0, &pbitsize, &pbitpos, &poffset,
-                                   &pmode, &punsignedp, &pvolatilep);
+                                   &pmode, &punsignedp, &pvolatilep, false);
 
        if (pbitpos % BITS_PER_UNIT != 0)
          return false;
@@ -769,7 +769,7 @@ dr_analyze_innermost (struct data_reference *dr, struct loop *nest)
     fprintf (dump_file, "analyze_innermost: ");
 
   base = get_inner_reference (ref, &pbitsize, &pbitpos, &poffset,
-                             &pmode, &punsignedp, &pvolatilep);
+                             &pmode, &punsignedp, &pvolatilep, false);
   gcc_assert (base != NULL_TREE);
 
   if (pbitpos % BITS_PER_UNIT != 0)
index ddea81b3b8e4f5153eeb09229d83b529afd73cf3..ada942df389b8d5855b5b2658dc297ba8fe8e219 100644 (file)
@@ -1658,7 +1658,7 @@ interpret_rhs_expr (struct loop *loop, gimple at_stmt,
 
          base = get_inner_reference (TREE_OPERAND (rhs1, 0),
                                      &bitsize, &bitpos, &offset,
-                                     &mode, &unsignedp, &volatilep);
+                                     &mode, &unsignedp, &volatilep, false);
 
          if (TREE_CODE (base) == MEM_REF)
            {
index f790bb180cd01f4b9a4d76a33c76494917c0f126..1f5590a7ac262357e5192b837babd08792a9217f 100644 (file)
@@ -1684,7 +1684,7 @@ may_be_unaligned_p (tree ref, tree step)
      does to check whether the object must be loaded by parts when
      STRICT_ALIGNMENT is true.  */
   base = get_inner_reference (ref, &bitsize, &bitpos, &toffset, &mode,
-                             &unsignedp, &volatilep);
+                             &unsignedp, &volatilep, true);
   base_type = TREE_TYPE (base);
   base_align = get_object_alignment (base);
   base_align = MAX (base_align, TYPE_ALIGN (base_type));
@@ -3781,7 +3781,7 @@ split_address_cost (struct ivopts_data *data,
   int unsignedp, volatilep;
 
   core = get_inner_reference (addr, &bitsize, &bitpos, &toffset, &mode,
-                             &unsignedp, &volatilep);
+                             &unsignedp, &volatilep, false);
 
   if (toffset != 0
       || bitpos % BITS_PER_UNIT != 0
index a61f2a1a8685336f3a5455ddfab4111230ce076f..76a3563f397dc43d02707963d6f386b37bbb075b 100644 (file)
@@ -2971,7 +2971,7 @@ vect_check_gather (gimple stmt, loop_vec_info loop_vinfo, tree *basep,
      SSA_NAME OFF and put the loop invariants into a tree BASE
      that can be gimplified before the loop.  */
   base = get_inner_reference (DR_REF (dr), &pbitsize, &pbitpos, &off,
-                             &pmode, &punsignedp, &pvolatilep);
+                             &pmode, &punsignedp, &pvolatilep, false);
   gcc_assert (base != NULL_TREE && (pbitpos % BITS_PER_UNIT) == 0);
 
   if (TREE_CODE (base) == MEM_REF)
@@ -3518,7 +3518,7 @@ again:
            }
 
          outer_base = get_inner_reference (inner_base, &pbitsize, &pbitpos,
-                         &poffset, &pmode, &punsignedp, &pvolatilep);
+                         &poffset, &pmode, &punsignedp, &pvolatilep, false);
          gcc_assert (outer_base != NULL_TREE);
 
          if (pbitpos % BITS_PER_UNIT != 0)
index 88c8d56bf3ab91ad8d7e25d14766c6f1f4f5d91b..11ab1ce3eb09385bae8f5c03a1b46af04239c291 100644 (file)
@@ -4515,7 +4515,8 @@ extern tree build_personality_function (const char *);
    look for the ultimate containing object, which is returned and specify
    the access position and size.  */
 extern tree get_inner_reference (tree, HOST_WIDE_INT *, HOST_WIDE_INT *,
-                                tree *, enum machine_mode *, int *, int *);
+                                tree *, enum machine_mode *, int *, int *,
+                                bool);
 
 /* Return a tree representing the lower bound of the array mentioned in
    EXP, an ARRAY_REF or an ARRAY_RANGE_REF.  */
index 10b74fd96ee3079f56dcb75051b9068795bf066d..4efcfe565aa0c2ec3b3c6be59148fe2bb2d53e6a 100644 (file)
@@ -121,7 +121,7 @@ instrument_expr (gimple_stmt_iterator gsi, tree expr, bool is_write)
   enum machine_mode mode;
   int volatilep = 0, unsignedp = 0;
   base = get_inner_reference (expr, &bitsize, &bitpos, &offset,
-                             &mode, &unsignedp, &volatilep);
+                             &mode, &unsignedp, &volatilep, false);
 
   /* No need to instrument accesses to decls that don't escape,
      they can't escape to other threads then.  */