From: Varun Sethi Date: Wed, 25 Apr 2012 01:26:43 +0000 (+0000) Subject: KVM: PPC: bookehv: Use a Macro for saving/restoring guest registers to/from their... X-Git-Url: https://git.stricted.de/?a=commitdiff_plain;h=185e4188dab6456409cad66c579501dd89487188;p=GitHub%2FLineageOS%2Fandroid_kernel_motorola_exynos9610.git KVM: PPC: bookehv: Use a Macro for saving/restoring guest registers to/from their 64 bit copies. Introduced PPC_STD/PPC_LD macros for saving/restoring guest registers to/from their 64 bit copies. Signed-off-by: Varun Sethi Signed-off-by: Alexander Graf --- diff --git a/arch/powerpc/include/asm/kvm_asm.h b/arch/powerpc/include/asm/kvm_asm.h index 097815233284..7d4018dd0e11 100644 --- a/arch/powerpc/include/asm/kvm_asm.h +++ b/arch/powerpc/include/asm/kvm_asm.h @@ -20,6 +20,14 @@ #ifndef __POWERPC_KVM_ASM_H__ #define __POWERPC_KVM_ASM_H__ +#ifdef CONFIG_64BIT +#define PPC_STD(sreg, offset, areg) std sreg, (offset)(areg) +#define PPC_LD(treg, offset, areg) ld treg, (offset)(areg) +#else +#define PPC_STD(sreg, offset, areg) stw sreg, (offset+4)(areg) +#define PPC_LD(treg, offset, areg) lwz treg, (offset+4)(areg) +#endif + /* IVPR must be 64KiB-aligned. */ #define VCPU_SIZE_ORDER 4 #define VCPU_SIZE_LOG (VCPU_SIZE_ORDER + 12) diff --git a/arch/powerpc/kvm/bookehv_interrupts.S b/arch/powerpc/kvm/bookehv_interrupts.S index 909e96e0650c..41d34850f826 100644 --- a/arch/powerpc/kvm/bookehv_interrupts.S +++ b/arch/powerpc/kvm/bookehv_interrupts.S @@ -93,11 +93,7 @@ #endif oris r8, r6, MSR_CE@h -#ifdef CONFIG_64BIT - std r6, (VCPU_SHARED_MSR)(r11) -#else - stw r6, (VCPU_SHARED_MSR + 4)(r11) -#endif + PPC_STD(r6, VCPU_SHARED_MSR, r11) ori r8, r8, MSR_ME | MSR_RI PPC_STL r5, VCPU_PC(r4) @@ -335,11 +331,7 @@ _GLOBAL(kvmppc_resume_host) stw r5, VCPU_SHARED_MAS0(r11) mfspr r7, SPRN_MAS2 stw r6, VCPU_SHARED_MAS1(r11) -#ifdef CONFIG_64BIT - std r7, (VCPU_SHARED_MAS2)(r11) -#else - stw r7, (VCPU_SHARED_MAS2 + 4)(r11) -#endif + PPC_STD(r7, VCPU_SHARED_MAS2, r11) mfspr r5, SPRN_MAS3 mfspr r6, SPRN_MAS4 stw r5, VCPU_SHARED_MAS7_3+4(r11) @@ -527,11 +519,7 @@ lightweight_exit: stw r3, VCPU_HOST_MAS6(r4) lwz r3, VCPU_SHARED_MAS0(r11) lwz r5, VCPU_SHARED_MAS1(r11) -#ifdef CONFIG_64BIT - ld r6, (VCPU_SHARED_MAS2)(r11) -#else - lwz r6, (VCPU_SHARED_MAS2 + 4)(r11) -#endif + PPC_LD(r6, VCPU_SHARED_MAS2, r11) lwz r7, VCPU_SHARED_MAS7_3+4(r11) lwz r8, VCPU_SHARED_MAS4(r11) mtspr SPRN_MAS0, r3 @@ -565,11 +553,7 @@ lightweight_exit: PPC_LL r6, VCPU_CTR(r4) PPC_LL r7, VCPU_CR(r4) PPC_LL r8, VCPU_PC(r4) -#ifdef CONFIG_64BIT - ld r9, (VCPU_SHARED_MSR)(r11) -#else - lwz r9, (VCPU_SHARED_MSR + 4)(r11) -#endif + PPC_LD(r9, VCPU_SHARED_MSR, r11) PPC_LL r0, VCPU_GPR(r0)(r4) PPC_LL r1, VCPU_GPR(r1)(r4) PPC_LL r2, VCPU_GPR(r2)(r4)