]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
tree-optimization/119586 - aligned access to unaligned data
authorRichard Biener <rguenther@suse.de>
Wed, 2 Apr 2025 11:12:58 +0000 (13:12 +0200)
committerRichard Biener <rguenth@gcc.gnu.org>
Wed, 2 Apr 2025 12:19:18 +0000 (14:19 +0200)
The following reverts parts of r15-8047 which assesses alignment
analysis for VMAT_STRIDED_SLP is correct by using aligned accesses
where allowed by it.  As the PR shows this analysis is still incorrect,
so revert back to assuming we got it wrong.

PR tree-optimization/119586
* tree-vect-stmts.cc (vectorizable_load): Assume we got
alignment analysis for VMAT_STRIDED_SLP wrong.
(vectorizable_store): Likewise.

* gcc.dg/vect/pr119586.c: New testcase.

gcc/testsuite/gcc.dg/vect/pr119586.c [new file with mode: 0644]
gcc/tree-vect-stmts.cc

diff --git a/gcc/testsuite/gcc.dg/vect/pr119586.c b/gcc/testsuite/gcc.dg/vect/pr119586.c
new file mode 100644 (file)
index 0000000..04a00ef
--- /dev/null
@@ -0,0 +1,21 @@
+#include "tree-vect.h"
+
+void __attribute__((noipa)) foo (long *) {}
+void __attribute__((noipa))
+d()
+{
+  long e[6][8][5];
+  for (int b = 0; b < 6; b++)
+    for (int c = 0; c < 8; c++)
+      {
+        e[b][c][0] = 1;
+        e[b][c][1] = 1;
+        e[b][c][4] = 1;
+      }
+  foo (&e[0][0][0]);
+}
+int main()
+{
+  check_vect ();
+  d();
+}
index 8bd5ea96667d71292b2ca3613065ba07e3ac858d..3005ae6eaaea6e039278c5a85c58318013f11007 100644 (file)
@@ -8906,10 +8906,17 @@ vectorizable_store (vec_info *vinfo,
                }
            }
          unsigned align;
-         if (alignment_support_scheme == dr_aligned)
-           align = known_alignment (DR_TARGET_ALIGNMENT (first_dr_info));
-         else
-           align = dr_alignment (vect_dr_behavior (vinfo, first_dr_info));
+         /* ???  We'd want to use
+              if (alignment_support_scheme == dr_aligned)
+                align = known_alignment (DR_TARGET_ALIGNMENT (first_dr_info));
+            since doing that is what we assume we can in the above checks.
+            But this interferes with groups with gaps where for example
+            VF == 2 makes the group in the unrolled loop aligned but the
+            fact that we advance with step between the two subgroups
+            makes the access to the second unaligned.  See PR119586.
+            We have to anticipate that here or adjust code generation to
+            avoid the misaligned loads by means of permutations.  */
+         align = dr_alignment (vect_dr_behavior (vinfo, first_dr_info));
          /* Alignment is at most the access size if we do multiple stores.  */
          if (nstores > 1)
            align = MIN (tree_to_uhwi (TYPE_SIZE_UNIT (ltype)), align);
@@ -10884,10 +10891,8 @@ vectorizable_load (vec_info *vinfo,
                }
            }
          unsigned align;
-         if (alignment_support_scheme == dr_aligned)
-           align = known_alignment (DR_TARGET_ALIGNMENT (first_dr_info));
-         else
-           align = dr_alignment (vect_dr_behavior (vinfo, first_dr_info));
+         /* ???  The above is still wrong, see vectorizable_store.  */
+         align = dr_alignment (vect_dr_behavior (vinfo, first_dr_info));
          /* Alignment is at most the access size if we do multiple loads.  */
          if (nloads > 1)
            align = MIN (tree_to_uhwi (TYPE_SIZE_UNIT (ltype)), align);