]> git.ipfire.org Git - thirdparty/kernel/stable.git/commitdiff
KVM: VMX: Explicitly reference RCX as the vmx_vcpu pointer in asm blobs
authorSean Christopherson <sean.j.christopherson@intel.com>
Thu, 20 Dec 2018 20:25:16 +0000 (12:25 -0800)
committerGreg Kroah-Hartman <gregkh@linuxfoundation.org>
Thu, 14 May 2020 05:57:21 +0000 (07:57 +0200)
commit 051a2d3e59e51ae49fd56aef34e472832897ce46 upstream.

Use '%% " _ASM_CX"' instead of '%0' to dereference RCX, i.e. the
'struct vcpu_vmx' pointer, in the VM-Enter asm blobs of vmx_vcpu_run()
and nested_vmx_check_vmentry_hw().  Using the symbolic name means that
adding/removing an output parameter(s) requires "rewriting" almost all
of the asm blob, which makes it nearly impossible to understand what's
being changed in even the most minor patches.

Opportunistically improve the code comments.

Signed-off-by: Sean Christopherson <sean.j.christopherson@intel.com>
Reviewed-by: Andi Kleen <ak@linux.intel.com>
Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>
Signed-off-by: Greg Kroah-Hartman <gregkh@linuxfoundation.org>
arch/x86/kvm/vmx.c

index fe5036641c596640d361f6ad7c82444f2a5bc698..5b06a98ffd4cb410e6705aa4b3dc484ab7b8389b 100644 (file)
@@ -10776,9 +10776,9 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
                "push %%" _ASM_DX "; push %%" _ASM_BP ";"
                "push %%" _ASM_CX " \n\t" /* placeholder for guest rcx */
                "push %%" _ASM_CX " \n\t"
-               "cmp %%" _ASM_SP ", %c[host_rsp](%0) \n\t"
+               "cmp %%" _ASM_SP ", %c[host_rsp](%%" _ASM_CX ") \n\t"
                "je 1f \n\t"
-               "mov %%" _ASM_SP ", %c[host_rsp](%0) \n\t"
+               "mov %%" _ASM_SP ", %c[host_rsp](%%" _ASM_CX ") \n\t"
                /* Avoid VMWRITE when Enlightened VMCS is in use */
                "test %%" _ASM_SI ", %%" _ASM_SI " \n\t"
                "jz 2f \n\t"
@@ -10788,32 +10788,33 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
                __ex(ASM_VMX_VMWRITE_RSP_RDX) "\n\t"
                "1: \n\t"
                /* Reload cr2 if changed */
-               "mov %c[cr2](%0), %%" _ASM_AX " \n\t"
+               "mov %c[cr2](%%" _ASM_CX "), %%" _ASM_AX " \n\t"
                "mov %%cr2, %%" _ASM_DX " \n\t"
                "cmp %%" _ASM_AX ", %%" _ASM_DX " \n\t"
                "je 3f \n\t"
                "mov %%" _ASM_AX", %%cr2 \n\t"
                "3: \n\t"
                /* Check if vmlaunch of vmresume is needed */
-               "cmpb $0, %c[launched](%0) \n\t"
+               "cmpb $0, %c[launched](%%" _ASM_CX ") \n\t"
                /* Load guest registers.  Don't clobber flags. */
-               "mov %c[rax](%0), %%" _ASM_AX " \n\t"
-               "mov %c[rbx](%0), %%" _ASM_BX " \n\t"
-               "mov %c[rdx](%0), %%" _ASM_DX " \n\t"
-               "mov %c[rsi](%0), %%" _ASM_SI " \n\t"
-               "mov %c[rdi](%0), %%" _ASM_DI " \n\t"
-               "mov %c[rbp](%0), %%" _ASM_BP " \n\t"
+               "mov %c[rax](%%" _ASM_CX "), %%" _ASM_AX " \n\t"
+               "mov %c[rbx](%%" _ASM_CX "), %%" _ASM_BX " \n\t"
+               "mov %c[rdx](%%" _ASM_CX "), %%" _ASM_DX " \n\t"
+               "mov %c[rsi](%%" _ASM_CX "), %%" _ASM_SI " \n\t"
+               "mov %c[rdi](%%" _ASM_CX "), %%" _ASM_DI " \n\t"
+               "mov %c[rbp](%%" _ASM_CX "), %%" _ASM_BP " \n\t"
 #ifdef CONFIG_X86_64
-               "mov %c[r8](%0),  %%r8  \n\t"
-               "mov %c[r9](%0),  %%r9  \n\t"
-               "mov %c[r10](%0), %%r10 \n\t"
-               "mov %c[r11](%0), %%r11 \n\t"
-               "mov %c[r12](%0), %%r12 \n\t"
-               "mov %c[r13](%0), %%r13 \n\t"
-               "mov %c[r14](%0), %%r14 \n\t"
-               "mov %c[r15](%0), %%r15 \n\t"
+               "mov %c[r8](%%" _ASM_CX "),  %%r8  \n\t"
+               "mov %c[r9](%%" _ASM_CX "),  %%r9  \n\t"
+               "mov %c[r10](%%" _ASM_CX "), %%r10 \n\t"
+               "mov %c[r11](%%" _ASM_CX "), %%r11 \n\t"
+               "mov %c[r12](%%" _ASM_CX "), %%r12 \n\t"
+               "mov %c[r13](%%" _ASM_CX "), %%r13 \n\t"
+               "mov %c[r14](%%" _ASM_CX "), %%r14 \n\t"
+               "mov %c[r15](%%" _ASM_CX "), %%r15 \n\t"
 #endif
-               "mov %c[rcx](%0), %%" _ASM_CX " \n\t" /* kills %0 (ecx) */
+               /* Load guest RCX.  This kills the vmx_vcpu pointer! */
+               "mov %c[rcx](%%" _ASM_CX "), %%" _ASM_CX " \n\t"
 
                /* Enter guest mode */
                "jne 1f \n\t"
@@ -10821,26 +10822,33 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
                "jmp 2f \n\t"
                "1: " __ex(ASM_VMX_VMRESUME) "\n\t"
                "2: "
-               /* Save guest registers, load host registers, keep flags */
-               "mov %0, %c[wordsize](%%" _ASM_SP ") \n\t"
-               "pop %0 \n\t"
-               "setbe %c[fail](%0)\n\t"
-               "mov %%" _ASM_AX ", %c[rax](%0) \n\t"
-               "mov %%" _ASM_BX ", %c[rbx](%0) \n\t"
-               __ASM_SIZE(pop) " %c[rcx](%0) \n\t"
-               "mov %%" _ASM_DX ", %c[rdx](%0) \n\t"
-               "mov %%" _ASM_SI ", %c[rsi](%0) \n\t"
-               "mov %%" _ASM_DI ", %c[rdi](%0) \n\t"
-               "mov %%" _ASM_BP ", %c[rbp](%0) \n\t"
+
+               /* Save guest's RCX to the stack placeholder (see above) */
+               "mov %%" _ASM_CX ", %c[wordsize](%%" _ASM_SP ") \n\t"
+
+               /* Load host's RCX, i.e. the vmx_vcpu pointer */
+               "pop %%" _ASM_CX " \n\t"
+
+               /* Set vmx->fail based on EFLAGS.{CF,ZF} */
+               "setbe %c[fail](%%" _ASM_CX ")\n\t"
+
+               /* Save all guest registers, including RCX from the stack */
+               "mov %%" _ASM_AX ", %c[rax](%%" _ASM_CX ") \n\t"
+               "mov %%" _ASM_BX ", %c[rbx](%%" _ASM_CX ") \n\t"
+               __ASM_SIZE(pop) " %c[rcx](%%" _ASM_CX ") \n\t"
+               "mov %%" _ASM_DX ", %c[rdx](%%" _ASM_CX ") \n\t"
+               "mov %%" _ASM_SI ", %c[rsi](%%" _ASM_CX ") \n\t"
+               "mov %%" _ASM_DI ", %c[rdi](%%" _ASM_CX ") \n\t"
+               "mov %%" _ASM_BP ", %c[rbp](%%" _ASM_CX ") \n\t"
 #ifdef CONFIG_X86_64
-               "mov %%r8,  %c[r8](%0) \n\t"
-               "mov %%r9,  %c[r9](%0) \n\t"
-               "mov %%r10, %c[r10](%0) \n\t"
-               "mov %%r11, %c[r11](%0) \n\t"
-               "mov %%r12, %c[r12](%0) \n\t"
-               "mov %%r13, %c[r13](%0) \n\t"
-               "mov %%r14, %c[r14](%0) \n\t"
-               "mov %%r15, %c[r15](%0) \n\t"
+               "mov %%r8,  %c[r8](%%" _ASM_CX ") \n\t"
+               "mov %%r9,  %c[r9](%%" _ASM_CX ") \n\t"
+               "mov %%r10, %c[r10](%%" _ASM_CX ") \n\t"
+               "mov %%r11, %c[r11](%%" _ASM_CX ") \n\t"
+               "mov %%r12, %c[r12](%%" _ASM_CX ") \n\t"
+               "mov %%r13, %c[r13](%%" _ASM_CX ") \n\t"
+               "mov %%r14, %c[r14](%%" _ASM_CX ") \n\t"
+               "mov %%r15, %c[r15](%%" _ASM_CX ") \n\t"
 
                /*
                 * Clear all general purpose registers (except RSP, which is loaded by
@@ -10860,7 +10868,7 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
                "xor %%r15d, %%r15d \n\t"
 #endif
                "mov %%cr2, %%" _ASM_AX "   \n\t"
-               "mov %%" _ASM_AX ", %c[cr2](%0) \n\t"
+               "mov %%" _ASM_AX ", %c[cr2](%%" _ASM_CX ") \n\t"
 
                "xor %%eax, %%eax \n\t"
                "xor %%ebx, %%ebx \n\t"