| From: Avi Kivity <avi@redhat.com> |
| Date: Sun, 16 Sep 2012 15:10:59 +0300 |
| Subject: KVM: SVM: Make use of asm.h |
| |
| commit 7454766f7bead388251aedee35a478356a7f4e72 upstream. |
| |
| Use macros for bitness-insensitive register names, instead of |
| rolling our own. |
| |
| Signed-off-by: Avi Kivity <avi@redhat.com> |
| Signed-off-by: Marcelo Tosatti <mtosatti@redhat.com> |
| Signed-off-by: Ben Hutchings <ben@decadent.org.uk> |
| --- |
| arch/x86/kvm/svm.c | 46 ++++++++++++++++++++-------------------------- |
| 1 file changed, 20 insertions(+), 26 deletions(-) |
| |
| --- a/arch/x86/kvm/svm.c |
| +++ b/arch/x86/kvm/svm.c |
| @@ -3679,12 +3679,6 @@ static void svm_cancel_injection(struct |
| svm_complete_interrupts(svm); |
| } |
| |
| -#ifdef CONFIG_X86_64 |
| -#define R "r" |
| -#else |
| -#define R "e" |
| -#endif |
| - |
| static void svm_vcpu_run(struct kvm_vcpu *vcpu) |
| { |
| struct vcpu_svm *svm = to_svm(vcpu); |
| @@ -3711,13 +3705,13 @@ static void svm_vcpu_run(struct kvm_vcpu |
| local_irq_enable(); |
| |
| asm volatile ( |
| - "push %%"R"bp; \n\t" |
| - "mov %c[rbx](%[svm]), %%"R"bx \n\t" |
| - "mov %c[rcx](%[svm]), %%"R"cx \n\t" |
| - "mov %c[rdx](%[svm]), %%"R"dx \n\t" |
| - "mov %c[rsi](%[svm]), %%"R"si \n\t" |
| - "mov %c[rdi](%[svm]), %%"R"di \n\t" |
| - "mov %c[rbp](%[svm]), %%"R"bp \n\t" |
| + "push %%" _ASM_BP "; \n\t" |
| + "mov %c[rbx](%[svm]), %%" _ASM_BX " \n\t" |
| + "mov %c[rcx](%[svm]), %%" _ASM_CX " \n\t" |
| + "mov %c[rdx](%[svm]), %%" _ASM_DX " \n\t" |
| + "mov %c[rsi](%[svm]), %%" _ASM_SI " \n\t" |
| + "mov %c[rdi](%[svm]), %%" _ASM_DI " \n\t" |
| + "mov %c[rbp](%[svm]), %%" _ASM_BP " \n\t" |
| #ifdef CONFIG_X86_64 |
| "mov %c[r8](%[svm]), %%r8 \n\t" |
| "mov %c[r9](%[svm]), %%r9 \n\t" |
| @@ -3730,20 +3724,20 @@ static void svm_vcpu_run(struct kvm_vcpu |
| #endif |
| |
| /* Enter guest mode */ |
| - "push %%"R"ax \n\t" |
| - "mov %c[vmcb](%[svm]), %%"R"ax \n\t" |
| + "push %%" _ASM_AX " \n\t" |
| + "mov %c[vmcb](%[svm]), %%" _ASM_AX " \n\t" |
| __ex(SVM_VMLOAD) "\n\t" |
| __ex(SVM_VMRUN) "\n\t" |
| __ex(SVM_VMSAVE) "\n\t" |
| - "pop %%"R"ax \n\t" |
| + "pop %%" _ASM_AX " \n\t" |
| |
| /* Save guest registers, load host registers */ |
| - "mov %%"R"bx, %c[rbx](%[svm]) \n\t" |
| - "mov %%"R"cx, %c[rcx](%[svm]) \n\t" |
| - "mov %%"R"dx, %c[rdx](%[svm]) \n\t" |
| - "mov %%"R"si, %c[rsi](%[svm]) \n\t" |
| - "mov %%"R"di, %c[rdi](%[svm]) \n\t" |
| - "mov %%"R"bp, %c[rbp](%[svm]) \n\t" |
| + "mov %%" _ASM_BX ", %c[rbx](%[svm]) \n\t" |
| + "mov %%" _ASM_CX ", %c[rcx](%[svm]) \n\t" |
| + "mov %%" _ASM_DX ", %c[rdx](%[svm]) \n\t" |
| + "mov %%" _ASM_SI ", %c[rsi](%[svm]) \n\t" |
| + "mov %%" _ASM_DI ", %c[rdi](%[svm]) \n\t" |
| + "mov %%" _ASM_BP ", %c[rbp](%[svm]) \n\t" |
| #ifdef CONFIG_X86_64 |
| "mov %%r8, %c[r8](%[svm]) \n\t" |
| "mov %%r9, %c[r9](%[svm]) \n\t" |
| @@ -3754,7 +3748,7 @@ static void svm_vcpu_run(struct kvm_vcpu |
| "mov %%r14, %c[r14](%[svm]) \n\t" |
| "mov %%r15, %c[r15](%[svm]) \n\t" |
| #endif |
| - "pop %%"R"bp" |
| + "pop %%" _ASM_BP |
| : |
| : [svm]"a"(svm), |
| [vmcb]"i"(offsetof(struct vcpu_svm, vmcb_pa)), |
| @@ -3775,9 +3769,11 @@ static void svm_vcpu_run(struct kvm_vcpu |
| [r15]"i"(offsetof(struct vcpu_svm, vcpu.arch.regs[VCPU_REGS_R15])) |
| #endif |
| : "cc", "memory" |
| - , R"bx", R"cx", R"dx", R"si", R"di" |
| #ifdef CONFIG_X86_64 |
| + , "rbx", "rcx", "rdx", "rsi", "rdi" |
| , "r8", "r9", "r10", "r11" , "r12", "r13", "r14", "r15" |
| +#else |
| + , "ebx", "ecx", "edx", "esi", "edi" |
| #endif |
| ); |
| |
| @@ -3837,8 +3833,6 @@ static void svm_vcpu_run(struct kvm_vcpu |
| mark_all_clean(svm->vmcb); |
| } |
| |
| -#undef R |
| - |
| static void svm_set_cr3(struct kvm_vcpu *vcpu, unsigned long root) |
| { |
| struct vcpu_svm *svm = to_svm(vcpu); |