return default_floatformat_for_type (gdbarch, name, len);
}
+/* Compute an XCR0 mask based on a target description. */
+
+static uint64_t
+i386_xcr0_from_tdesc (const struct target_desc *tdesc)
+{
+ if (! tdesc_has_registers (tdesc))
+ return 0;
+
+ const struct tdesc_feature *feature_core;
+
+ const struct tdesc_feature *feature_sse, *feature_avx, *feature_mpx,
+ *feature_avx512, *feature_pkeys;
+
+ /* Get core registers. */
+ feature_core = tdesc_find_feature (tdesc, "org.gnu.gdb.i386.core");
+ if (feature_core == NULL)
+ return 0;
+
+ /* Get SSE registers. */
+ feature_sse = tdesc_find_feature (tdesc, "org.gnu.gdb.i386.sse");
+
+ /* Try AVX registers. */
+ feature_avx = tdesc_find_feature (tdesc, "org.gnu.gdb.i386.avx");
+
+ /* Try MPX registers. */
+ feature_mpx = tdesc_find_feature (tdesc, "org.gnu.gdb.i386.mpx");
+
+ /* Try AVX512 registers. */
+ feature_avx512 = tdesc_find_feature (tdesc, "org.gnu.gdb.i386.avx512");
+
+ /* Try PKEYS */
+ feature_pkeys = tdesc_find_feature (tdesc, "org.gnu.gdb.i386.pkeys");
+
+ /* The XCR0 bits. */
+ uint64_t xcr0 = X86_XSTATE_X87;
+
+ if (feature_sse)
+ xcr0 |= X86_XSTATE_SSE;
+
+ if (feature_avx)
+ {
+ /* AVX register description requires SSE register description. */
+ if (!feature_sse)
+ return 0;
+
+ xcr0 |= X86_XSTATE_AVX;
+ }
+
+ if (feature_mpx)
+ xcr0 |= X86_XSTATE_MPX_MASK;
+
+ if (feature_avx512)
+ {
+ /* AVX512 register description requires AVX register description. */
+ if (!feature_avx)
+ return 0;
+
+ xcr0 |= X86_XSTATE_AVX512;
+ }
+
+ if (feature_pkeys)
+ xcr0 |= X86_XSTATE_PKRU;
+
+ return xcr0;
+}
+
static int
i386_validate_tdesc_p (i386_gdbarch_tdep *tdep,
struct tdesc_arch_data *tdesc_data)
x86_xsave_layout xsave_layout = target_fetch_x86_xsave_layout ();
+ /* If the target did not provide an XSAVE layout but the target
+ description includes registers from the XSAVE extended region,
+ use a fallback XSAVE layout. Specifically, this fallback layout
+ is used when writing out a local core dump for a remote
+ target. */
+ if (xsave_layout.sizeof_xsave == 0)
+ xsave_layout
+ = i387_fallback_xsave_layout (i386_xcr0_from_tdesc (info.target_desc));
+
/* If there is already a candidate, use it. */
for (arches = gdbarch_list_lookup_by_info (arches, &info);
arches != NULL;
return true;
}
+/* See i387-tdep.h. */
+
+x86_xsave_layout
+i387_fallback_xsave_layout (uint64_t xcr0)
+{
+ x86_xsave_layout layout;
+
+ if (HAS_PKRU (xcr0))
+ {
+ /* Intel CPUs supporting PKRU. */
+ layout.avx_offset = 576;
+ layout.bndregs_offset = 960;
+ layout.bndcfg_offset = 1024;
+ layout.k_offset = 1088;
+ layout.zmm_h_offset = 1152;
+ layout.zmm_offset = 1664;
+ layout.pkru_offset = 2688;
+ layout.sizeof_xsave = 2696;
+ }
+ else if (HAS_AVX512 (xcr0))
+ {
+ /* Intel CPUs supporting AVX512. */
+ layout.avx_offset = 576;
+ layout.bndregs_offset = 960;
+ layout.bndcfg_offset = 1024;
+ layout.k_offset = 1088;
+ layout.zmm_h_offset = 1152;
+ layout.zmm_offset = 1664;
+ layout.sizeof_xsave = 2688;
+ }
+ else if (HAS_MPX (xcr0))
+ {
+ /* Intel CPUs supporting MPX. */
+ layout.avx_offset = 576;
+ layout.bndregs_offset = 960;
+ layout.bndcfg_offset = 1024;
+ layout.sizeof_xsave = 1088;
+ }
+ else if (HAS_AVX (xcr0))
+ {
+ /* Intel and AMD CPUs supporting AVX. */
+ layout.avx_offset = 576;
+ layout.sizeof_xsave = 832;
+ }
+
+ return layout;
+}
+
/* Extract from XSAVE a bitset of the features that are available on the
target, but which have not yet been enabled. */
extern bool i387_guess_xsave_layout (uint64_t xcr0, size_t xsave_size,
x86_xsave_layout &layout);
+/* Compute an XSAVE layout based on the XCR0 bitmask. This is used
+ as a fallback if a target does not provide an XSAVE layout. */
+
+extern x86_xsave_layout i387_fallback_xsave_layout (uint64_t xcr0);
+
/* Similar to i387_supply_fxsave, but use XSAVE extended state. */
extern void i387_supply_xsave (struct regcache *regcache, int regnum,