Skip to content

Commit

Permalink
KVM: VMX: Explicitly reference RCX as the vmx_vcpu pointer in asm blobs
Browse files Browse the repository at this point in the history
Use '%% " _ASM_CX"' instead of '%0' to dereference RCX, i.e. the
'struct vcpu_vmx' pointer, in the VM-Enter asm blobs of vmx_vcpu_run()
and nested_vmx_check_vmentry_hw().  Using the symbolic name means that
adding/removing an output parameter(s) requires "rewriting" almost all
of the asm blob, which makes it nearly impossible to understand what's
being changed in even the most minor patches.

Opportunistically improve the code comments.

Signed-off-by: Sean Christopherson <sean.j.christopherson@intel.com>
Reviewed-by: Andi Kleen <ak@linux.intel.com>
Signed-off-by: Paolo Bonzini <pbonzini@redhat.com>
  • Loading branch information
Sean Christopherson authored and Paolo Bonzini committed Dec 21, 2018
1 parent c6ad459 commit 051a2d3
Show file tree
Hide file tree
Showing 2 changed files with 50 additions and 42 deletions.
6 changes: 3 additions & 3 deletions arch/x86/kvm/vmx/nested.c
Original file line number Diff line number Diff line change
Expand Up @@ -2759,17 +2759,17 @@ static int __noclone nested_vmx_check_vmentry_hw(struct kvm_vcpu *vcpu)
asm(
/* Set HOST_RSP */
__ex("vmwrite %%" _ASM_SP ", %%" _ASM_DX) "\n\t"
"mov %%" _ASM_SP ", %c[host_rsp](%0)\n\t"
"mov %%" _ASM_SP ", %c[host_rsp](%% " _ASM_CX")\n\t"

/* Check if vmlaunch or vmresume is needed */
"cmpl $0, %c[launched](%0)\n\t"
"cmpl $0, %c[launched](%% " _ASM_CX")\n\t"
"jne 1f\n\t"
__ex("vmlaunch") "\n\t"
"jmp 2f\n\t"
"1: " __ex("vmresume") "\n\t"
"2: "
/* Set vmx->fail accordingly */
"setbe %c[fail](%0)\n\t"
"setbe %c[fail](%% " _ASM_CX")\n\t"

".pushsection .rodata\n\t"
".global vmx_early_consistency_check_return\n\t"
Expand Down
86 changes: 47 additions & 39 deletions arch/x86/kvm/vmx/vmx.c
Original file line number Diff line number Diff line change
Expand Up @@ -6440,9 +6440,9 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
"push %%" _ASM_DX "; push %%" _ASM_BP ";"
"push %%" _ASM_CX " \n\t" /* placeholder for guest rcx */
"push %%" _ASM_CX " \n\t"
"cmp %%" _ASM_SP ", %c[host_rsp](%0) \n\t"
"cmp %%" _ASM_SP ", %c[host_rsp](%%" _ASM_CX ") \n\t"
"je 1f \n\t"
"mov %%" _ASM_SP ", %c[host_rsp](%0) \n\t"
"mov %%" _ASM_SP ", %c[host_rsp](%%" _ASM_CX ") \n\t"
/* Avoid VMWRITE when Enlightened VMCS is in use */
"test %%" _ASM_SI ", %%" _ASM_SI " \n\t"
"jz 2f \n\t"
Expand All @@ -6452,59 +6452,67 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
__ex("vmwrite %%" _ASM_SP ", %%" _ASM_DX) "\n\t"
"1: \n\t"
/* Reload cr2 if changed */
"mov %c[cr2](%0), %%" _ASM_AX " \n\t"
"mov %c[cr2](%%" _ASM_CX "), %%" _ASM_AX " \n\t"
"mov %%cr2, %%" _ASM_DX " \n\t"
"cmp %%" _ASM_AX ", %%" _ASM_DX " \n\t"
"je 3f \n\t"
"mov %%" _ASM_AX", %%cr2 \n\t"
"3: \n\t"
/* Check if vmlaunch or vmresume is needed */
"cmpl $0, %c[launched](%0) \n\t"
"cmpl $0, %c[launched](%%" _ASM_CX ") \n\t"
/* Load guest registers. Don't clobber flags. */
"mov %c[rax](%0), %%" _ASM_AX " \n\t"
"mov %c[rbx](%0), %%" _ASM_BX " \n\t"
"mov %c[rdx](%0), %%" _ASM_DX " \n\t"
"mov %c[rsi](%0), %%" _ASM_SI " \n\t"
"mov %c[rdi](%0), %%" _ASM_DI " \n\t"
"mov %c[rbp](%0), %%" _ASM_BP " \n\t"
"mov %c[rax](%%" _ASM_CX "), %%" _ASM_AX " \n\t"
"mov %c[rbx](%%" _ASM_CX "), %%" _ASM_BX " \n\t"
"mov %c[rdx](%%" _ASM_CX "), %%" _ASM_DX " \n\t"
"mov %c[rsi](%%" _ASM_CX "), %%" _ASM_SI " \n\t"
"mov %c[rdi](%%" _ASM_CX "), %%" _ASM_DI " \n\t"
"mov %c[rbp](%%" _ASM_CX "), %%" _ASM_BP " \n\t"
#ifdef CONFIG_X86_64
"mov %c[r8](%0), %%r8 \n\t"
"mov %c[r9](%0), %%r9 \n\t"
"mov %c[r10](%0), %%r10 \n\t"
"mov %c[r11](%0), %%r11 \n\t"
"mov %c[r12](%0), %%r12 \n\t"
"mov %c[r13](%0), %%r13 \n\t"
"mov %c[r14](%0), %%r14 \n\t"
"mov %c[r15](%0), %%r15 \n\t"
"mov %c[r8](%%" _ASM_CX "), %%r8 \n\t"
"mov %c[r9](%%" _ASM_CX "), %%r9 \n\t"
"mov %c[r10](%%" _ASM_CX "), %%r10 \n\t"
"mov %c[r11](%%" _ASM_CX "), %%r11 \n\t"
"mov %c[r12](%%" _ASM_CX "), %%r12 \n\t"
"mov %c[r13](%%" _ASM_CX "), %%r13 \n\t"
"mov %c[r14](%%" _ASM_CX "), %%r14 \n\t"
"mov %c[r15](%%" _ASM_CX "), %%r15 \n\t"
#endif
"mov %c[rcx](%0), %%" _ASM_CX " \n\t" /* kills %0 (ecx) */
/* Load guest RCX. This kills the vmx_vcpu pointer! */
"mov %c[rcx](%%" _ASM_CX "), %%" _ASM_CX " \n\t"

/* Enter guest mode */
"jne 1f \n\t"
__ex("vmlaunch") "\n\t"
"jmp 2f \n\t"
"1: " __ex("vmresume") "\n\t"
"2: "
/* Save guest registers, load host registers, keep flags */
"mov %0, %c[wordsize](%%" _ASM_SP ") \n\t"
"pop %0 \n\t"
"setbe %c[fail](%0)\n\t"
"mov %%" _ASM_AX ", %c[rax](%0) \n\t"
"mov %%" _ASM_BX ", %c[rbx](%0) \n\t"
__ASM_SIZE(pop) " %c[rcx](%0) \n\t"
"mov %%" _ASM_DX ", %c[rdx](%0) \n\t"
"mov %%" _ASM_SI ", %c[rsi](%0) \n\t"
"mov %%" _ASM_DI ", %c[rdi](%0) \n\t"
"mov %%" _ASM_BP ", %c[rbp](%0) \n\t"

/* Save guest's RCX to the stack placeholder (see above) */
"mov %%" _ASM_CX ", %c[wordsize](%%" _ASM_SP ") \n\t"

/* Load host's RCX, i.e. the vmx_vcpu pointer */
"pop %%" _ASM_CX " \n\t"

/* Set vmx->fail based on EFLAGS.{CF,ZF} */
"setbe %c[fail](%%" _ASM_CX ")\n\t"

/* Save all guest registers, including RCX from the stack */
"mov %%" _ASM_AX ", %c[rax](%%" _ASM_CX ") \n\t"
"mov %%" _ASM_BX ", %c[rbx](%%" _ASM_CX ") \n\t"
__ASM_SIZE(pop) " %c[rcx](%%" _ASM_CX ") \n\t"
"mov %%" _ASM_DX ", %c[rdx](%%" _ASM_CX ") \n\t"
"mov %%" _ASM_SI ", %c[rsi](%%" _ASM_CX ") \n\t"
"mov %%" _ASM_DI ", %c[rdi](%%" _ASM_CX ") \n\t"
"mov %%" _ASM_BP ", %c[rbp](%%" _ASM_CX ") \n\t"
#ifdef CONFIG_X86_64
"mov %%r8, %c[r8](%0) \n\t"
"mov %%r9, %c[r9](%0) \n\t"
"mov %%r10, %c[r10](%0) \n\t"
"mov %%r11, %c[r11](%0) \n\t"
"mov %%r12, %c[r12](%0) \n\t"
"mov %%r13, %c[r13](%0) \n\t"
"mov %%r14, %c[r14](%0) \n\t"
"mov %%r15, %c[r15](%0) \n\t"
"mov %%r8, %c[r8](%%" _ASM_CX ") \n\t"
"mov %%r9, %c[r9](%%" _ASM_CX ") \n\t"
"mov %%r10, %c[r10](%%" _ASM_CX ") \n\t"
"mov %%r11, %c[r11](%%" _ASM_CX ") \n\t"
"mov %%r12, %c[r12](%%" _ASM_CX ") \n\t"
"mov %%r13, %c[r13](%%" _ASM_CX ") \n\t"
"mov %%r14, %c[r14](%%" _ASM_CX ") \n\t"
"mov %%r15, %c[r15](%%" _ASM_CX ") \n\t"
/*
* Clear host registers marked as clobbered to prevent
* speculative use.
Expand All @@ -6519,7 +6527,7 @@ static void __noclone vmx_vcpu_run(struct kvm_vcpu *vcpu)
"xor %%r15d, %%r15d \n\t"
#endif
"mov %%cr2, %%" _ASM_AX " \n\t"
"mov %%" _ASM_AX ", %c[cr2](%0) \n\t"
"mov %%" _ASM_AX ", %c[cr2](%%" _ASM_CX ") \n\t"

"xor %%eax, %%eax \n\t"
"xor %%ebx, %%ebx \n\t"
Expand Down

0 comments on commit 051a2d3

Please sign in to comment.