]> git.ipfire.org Git - thirdparty/glibc.git/commitdiff
x86: Fix mis-merge of XSAVE ld.so trampoline selection [BZ #22641]
authorFlorian Weimer <fweimer@redhat.com>
Thu, 11 Jan 2018 15:54:40 +0000 (16:54 +0100)
committerFlorian Weimer <fweimer@redhat.com>
Thu, 11 Jan 2018 15:54:40 +0000 (16:54 +0100)
The change is best viewed with “diff -w”:

@@ -226,6 +226,7 @@ init_cpu_features (struct cpu_features *cpu_features)
    /* Determine if FMA4 is usable.  */
    if (HAS_CPU_FEATURE (FMA4))
      cpu_features->feature[index_FMA4_Usable] |= bit_FMA4_Usable;
+ }

       /* For _dl_runtime_resolve, set xsave_state_size to xsave area
   size + integer register save size and align it to 64 bytes.  */
@@ -292,7 +293,6 @@ init_cpu_features (struct cpu_features *cpu_features)
      }
  }
     }
-    }

 #if !HAS_CPUID
 no_cpuid:

Without this change, XSAVE support will never be selected unless the CPU
also supports AVX, which is not what we want.  For example, if AVX is
disabled, but MPX is supported, the BND registers are not preserved if
we use FXSAVE instead of XSAVE.

This fixes commit 26d289bb92b6d1125536644f607c73617463477d (x86-64:
Use fxsave/xsave/xsavec in _dl_runtime_resolve).

ChangeLog
NEWS
sysdeps/x86/cpu-features.c

index e7db122597070f992aae8b8438bfdf1115b3f69f..95be6382d9787afbb464ba50403c059264822604 100644 (file)
--- a/ChangeLog
+++ b/ChangeLog
@@ -1,3 +1,8 @@
+2018-01-11  Florian Weimer  <fweimer@redhat.com>
+
+       * sysdeps/x86/cpu-features.c (init_cpu_features): Move check for
+       XSAVE out of the AVX check.
+
 2017-10-22  H.J. Lu  <hongjiu.lu@intel.com>
 
        [BZ #21265]
diff --git a/NEWS b/NEWS
index eb7cb338e025460cbf5b997a949279d05df67383..7dd7193225de606173b16305601f29bfc6d4ffd5 100644 (file)
--- a/NEWS
+++ b/NEWS
@@ -47,6 +47,7 @@ The following bugs are resolved with this release:
   [21289] Fix symbol redirect for fts_set
   [21624] Unsafe alloca allows local attackers to alias stack and heap (CVE-2017-1000366)
   [21666] Avoid .symver on common symbols
+  [22641] x86: Fix mis-merge of XSAVE ld.so trampoline selection
 
 \f
 Version 2.23
index 2060fa38e6f0f8fdad25db7a2616967a5976a1ce..316a1180d10cada9107619aba246630a1b70e7ef 100644 (file)
@@ -226,69 +226,69 @@ init_cpu_features (struct cpu_features *cpu_features)
          /* Determine if FMA4 is usable.  */
          if (HAS_CPU_FEATURE (FMA4))
            cpu_features->feature[index_FMA4_Usable] |= bit_FMA4_Usable;
+       }
 
-         /* For _dl_runtime_resolve, set xsave_state_size to xsave area
-            size + integer register save size and align it to 64 bytes.  */
-         if (cpu_features->max_cpuid >= 0xd)
-           {
-             unsigned int eax, ebx, ecx, edx;
+      /* For _dl_runtime_resolve, set xsave_state_size to xsave area
+        size + integer register save size and align it to 64 bytes.  */
+      if (cpu_features->max_cpuid >= 0xd)
+       {
+         unsigned int eax, ebx, ecx, edx;
 
-             __cpuid_count (0xd, 0, eax, ebx, ecx, edx);
-             if (ebx != 0)
-               {
-                 cpu_features->xsave_state_size
+         __cpuid_count (0xd, 0, eax, ebx, ecx, edx);
+         if (ebx != 0)
+           {
+             cpu_features->xsave_state_size
                = ALIGN_UP (ebx + STATE_SAVE_OFFSET, 64);
 
-                 __cpuid_count (0xd, 1, eax, ebx, ecx, edx);
+             __cpuid_count (0xd, 1, eax, ebx, ecx, edx);
 
-                 /* Check if XSAVEC is available.  */
-                 if ((eax & (1 << 1)) != 0)
-                   {
-                     unsigned int xstate_comp_offsets[32];
-                     unsigned int xstate_comp_sizes[32];
-                     unsigned int i;
+             /* Check if XSAVEC is available.  */
+             if ((eax & (1 << 1)) != 0)
+               {
+                 unsigned int xstate_comp_offsets[32];
+                 unsigned int xstate_comp_sizes[32];
+                 unsigned int i;
 
-                     xstate_comp_offsets[0] = 0;
-                     xstate_comp_offsets[1] = 160;
-                     xstate_comp_offsets[2] = 576;
-                     xstate_comp_sizes[0] = 160;
-                     xstate_comp_sizes[1] = 256;
+                 xstate_comp_offsets[0] = 0;
+                 xstate_comp_offsets[1] = 160;
+                 xstate_comp_offsets[2] = 576;
+                 xstate_comp_sizes[0] = 160;
+                 xstate_comp_sizes[1] = 256;
 
-                     for (i = 2; i < 32; i++)
+                 for (i = 2; i < 32; i++)
+                   {
+                     if ((STATE_SAVE_MASK & (1 << i)) != 0)
                        {
-                         if ((STATE_SAVE_MASK & (1 << i)) != 0)
-                           {
-                             __cpuid_count (0xd, i, eax, ebx, ecx, edx);
-                             xstate_comp_sizes[i] = eax;
-                           }
-                         else
-                           {
-                             ecx = 0;
-                             xstate_comp_sizes[i] = 0;
-                           }
-
-                         if (i > 2)
-                           {
-                             xstate_comp_offsets[i]
-                               = (xstate_comp_offsets[i - 1]
-                                  + xstate_comp_sizes[i -1]);
-                             if ((ecx & (1 << 1)) != 0)
-                               xstate_comp_offsets[i]
-                             = ALIGN_UP (xstate_comp_offsets[i], 64);
-                           }
+                         __cpuid_count (0xd, i, eax, ebx, ecx, edx);
+                         xstate_comp_sizes[i] = eax;
+                       }
+                     else
+                       {
+                         ecx = 0;
+                         xstate_comp_sizes[i] = 0;
                        }
 
-                     /* Use XSAVEC.  */
-                     unsigned int size
-                       = xstate_comp_offsets[31] + xstate_comp_sizes[31];
-                     if (size)
+                     if (i > 2)
                        {
-                         cpu_features->xsave_state_size
-                           = ALIGN_UP (size + STATE_SAVE_OFFSET, 64);
-                         cpu_features->feature[index_XSAVEC_Usable]
-                           |= bit_XSAVEC_Usable;
+                         xstate_comp_offsets[i]
+                           = (xstate_comp_offsets[i - 1]
+                              + xstate_comp_sizes[i -1]);
+                         if ((ecx & (1 << 1)) != 0)
+                           xstate_comp_offsets[i]
+                             = ALIGN_UP (xstate_comp_offsets[i], 64);
                        }
                    }
+
+                 /* Use XSAVEC.  */
+                 unsigned int size
+                   = xstate_comp_offsets[31] + xstate_comp_sizes[31];
+                 if (size)
+                   {
+                     cpu_features->xsave_state_size
+                       = ALIGN_UP (size + STATE_SAVE_OFFSET, 64);
+                     cpu_features->feature[index_XSAVEC_Usable]
+                       |= bit_XSAVEC_Usable;
+                   }
                }
            }
        }