]> git.ipfire.org Git - thirdparty/gcc.git/commitdiff
tree-optimization/122502 - avoid folding during imm use walk with active ranger
authorRichard Biener <rguenther@suse.de>
Fri, 31 Oct 2025 07:57:39 +0000 (08:57 +0100)
committerRichard Biener <rguenther@suse.de>
Fri, 31 Oct 2025 09:33:58 +0000 (10:33 +0100)
The following works around an unfortunate interaction with ranger
and immediate use walking.  An actual solution needs more thoughts.

PR tree-optimization/122502
* tree-scalar-evolution.cc (final_value_replacement_loop):
Avoid folding from within FOR_EACH_IMM_USE_STMT due to active
ranger.

* gcc.dg/torture/pr122502.c: New testcase.

gcc/testsuite/gcc.dg/torture/pr122502.c [new file with mode: 0644]
gcc/tree-scalar-evolution.cc

diff --git a/gcc/testsuite/gcc.dg/torture/pr122502.c b/gcc/testsuite/gcc.dg/torture/pr122502.c
new file mode 100644 (file)
index 0000000..5e2cb2e
--- /dev/null
@@ -0,0 +1,21 @@
+/* { dg-do compile } */
+
+short int *ts;
+
+void
+c2 (unsigned long long int s4, int ns)
+{
+  short int *b2 = (short int *)&ns;
+
+  while (ns != 0)
+    {
+      int xn;
+
+      for (xn = 0; xn < 3; ++xn)
+        for (*b2 = 0; *b2 < 2; ++*b2)
+          s4 += xn;
+      if (s4 != 0)
+        b2 = ts;
+      ++ns;
+    }
+}
index 7907893b916cb5ca058650f9be55169970f857d2..9f82abc4b81f121fd72f9349f940ea1ec7656193 100644 (file)
@@ -3995,11 +3995,17 @@ final_value_replacement_loop (class loop *loop)
        {
          gimple *use_stmt;
          imm_use_iterator imm_iter;
+         auto_vec<gimple *, 4> to_fold;
          FOR_EACH_IMM_USE_STMT (use_stmt, imm_iter, rslt)
+           if (!stmt_can_throw_internal (cfun, use_stmt))
+             to_fold.safe_push (use_stmt);
+         /* Delay folding until after the immediate use walk is completed
+            as we have an active ranger and that might walk immediate
+            uses of rslt again.  See PR122502.  */
+         for (gimple *use_stmt : to_fold)
            {
              gimple_stmt_iterator gsi = gsi_for_stmt (use_stmt);
-             if (!stmt_can_throw_internal (cfun, use_stmt)
-                 && fold_stmt (&gsi, follow_all_ssa_edges))
+             if (fold_stmt (&gsi, follow_all_ssa_edges))
                update_stmt (gsi_stmt (gsi));
            }
        }