CPUID_7_0_EBX_HLE, CPUID_7_0_EBX_AVX2,
CPUID_7_0_EBX_INVPCID, CPUID_7_0_EBX_RTM,
CPUID_7_0_EBX_RDSEED */
-#define TCG_7_0_ECX_FEATURES (CPUID_7_0_ECX_PKU | \
+#define TCG_7_0_ECX_FEATURES (CPUID_7_0_ECX_UMIP | CPUID_7_0_ECX_PKU | \
/* CPUID_7_0_ECX_OSPKE is dynamic */ \
CPUID_7_0_ECX_LA57 | CPUID_7_0_ECX_PKS)
#define TCG_7_0_EDX_FEATURES 0
#define HF_IOBPT_SHIFT 24 /* an io breakpoint enabled */
#define HF_MPX_EN_SHIFT 25 /* MPX Enabled (CR4+XCR0+BNDCFGx) */
#define HF_MPX_IU_SHIFT 26 /* BND registers in-use */
+#define HF_UMIP_SHIFT 27 /* CR4.UMIP */
#define HF_CPL_MASK (3 << HF_CPL_SHIFT)
#define HF_INHIBIT_IRQ_MASK (1 << HF_INHIBIT_IRQ_SHIFT)
#define HF_IOBPT_MASK (1 << HF_IOBPT_SHIFT)
#define HF_MPX_EN_MASK (1 << HF_MPX_EN_SHIFT)
#define HF_MPX_IU_MASK (1 << HF_MPX_IU_SHIFT)
+#define HF_UMIP_MASK (1 << HF_UMIP_SHIFT)
/* hflags2 */
(~(target_ulong)(CR4_VME_MASK | CR4_PVI_MASK | CR4_TSD_MASK \
| CR4_DE_MASK | CR4_PSE_MASK | CR4_PAE_MASK \
| CR4_MCE_MASK | CR4_PGE_MASK | CR4_PCE_MASK \
- | CR4_OSFXSR_MASK | CR4_OSXMMEXCPT_MASK |CR4_UMIP_MASK \
+ | CR4_OSFXSR_MASK | CR4_OSXMMEXCPT_MASK | CR4_UMIP_MASK \
| CR4_LA57_MASK \
| CR4_FSGSBASE_MASK | CR4_PCIDE_MASK | CR4_OSXSAVE_MASK \
| CR4_SMEP_MASK | CR4_SMAP_MASK | CR4_PKE_MASK | CR4_PKS_MASK))
}
/* Clear bits we're going to recompute. */
- hflags = env->hflags & ~(HF_OSFXSR_MASK | HF_SMAP_MASK);
+ hflags = env->hflags & ~(HF_OSFXSR_MASK | HF_SMAP_MASK | HF_UMIP_MASK);
/* SSE handling */
if (!(env->features[FEAT_1_EDX] & CPUID_SSE)) {
if (new_cr4 & CR4_SMAP_MASK) {
hflags |= HF_SMAP_MASK;
}
+ if (!(env->features[FEAT_7_0_ECX] & CPUID_7_0_ECX_UMIP)) {
+ new_cr4 &= ~CR4_UMIP_MASK;
+ }
+ if (new_cr4 & CR4_UMIP_MASK) {
+ hflags |= HF_UMIP_MASK;
+ }
if (!(env->features[FEAT_7_0_ECX] & CPUID_7_0_ECX_PKU)) {
new_cr4 &= ~CR4_PKE_MASK;
case 0: /* sldt */
if (!PE(s) || VM86(s))
goto illegal_op;
+ if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) {
+ break;
+ }
gen_svm_check_intercept(s, SVM_EXIT_LDTR_READ);
tcg_gen_ld32u_tl(s->T0, cpu_env,
offsetof(CPUX86State, ldt.selector));
case 1: /* str */
if (!PE(s) || VM86(s))
goto illegal_op;
+ if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) {
+ break;
+ }
gen_svm_check_intercept(s, SVM_EXIT_TR_READ);
tcg_gen_ld32u_tl(s->T0, cpu_env,
offsetof(CPUX86State, tr.selector));
modrm = x86_ldub_code(env, s);
switch (modrm) {
CASE_MODRM_MEM_OP(0): /* sgdt */
+ if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) {
+ break;
+ }
gen_svm_check_intercept(s, SVM_EXIT_GDTR_READ);
gen_lea_modrm(env, s, modrm);
tcg_gen_ld32u_tl(s->T0,
break;
CASE_MODRM_MEM_OP(1): /* sidt */
+ if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) {
+ break;
+ }
gen_svm_check_intercept(s, SVM_EXIT_IDTR_READ);
gen_lea_modrm(env, s, modrm);
tcg_gen_ld32u_tl(s->T0, cpu_env, offsetof(CPUX86State, idt.limit));
break;
CASE_MODRM_OP(4): /* smsw */
+ if (s->flags & HF_UMIP_MASK && !check_cpl0(s)) {
+ break;
+ }
gen_svm_check_intercept(s, SVM_EXIT_READ_CR0);
tcg_gen_ld_tl(s->T0, cpu_env, offsetof(CPUX86State, cr[0]));
/*