KVM: PPC: bookehv: Use a Macro for saving/restoring guest registers to/from their...
authorVarun Sethi <Varun.Sethi@freescale.com>
Wed, 25 Apr 2012 01:26:43 +0000 (01:26 +0000)
committerAlexander Graf <agraf@suse.de>
Sun, 6 May 2012 14:19:08 +0000 (16:19 +0200)
Introduced PPC_STD/PPC_LD macros for saving/restoring guest registers to/from their 64 bit copies.

Signed-off-by: Varun Sethi <Varun.Sethi@freescale.com>
Signed-off-by: Alexander Graf <agraf@suse.de>
arch/powerpc/include/asm/kvm_asm.h
arch/powerpc/kvm/bookehv_interrupts.S

index 097815233284050721e16a62333f3da2202cb144..7d4018dd0e112114fb177c93a3f10a1fd09578dc 100644 (file)
 #ifndef __POWERPC_KVM_ASM_H__
 #define __POWERPC_KVM_ASM_H__
 
+#ifdef CONFIG_64BIT
+#define PPC_STD(sreg, offset, areg)  std sreg, (offset)(areg)
+#define PPC_LD(treg, offset, areg)   ld treg, (offset)(areg)
+#else
+#define PPC_STD(sreg, offset, areg)  stw sreg, (offset+4)(areg)
+#define PPC_LD(treg, offset, areg)   lwz treg, (offset+4)(areg)
+#endif
+
 /* IVPR must be 64KiB-aligned. */
 #define VCPU_SIZE_ORDER 4
 #define VCPU_SIZE_LOG   (VCPU_SIZE_ORDER + 12)
index 909e96e0650c49cc4f720b1603e98b3e9f6b24be..41d34850f82604b2f75020ab43594f32a96cd5c1 100644 (file)
 #endif
 
        oris    r8, r6, MSR_CE@h
-#ifdef CONFIG_64BIT
-       std     r6, (VCPU_SHARED_MSR)(r11)
-#else
-       stw     r6, (VCPU_SHARED_MSR + 4)(r11)
-#endif
+       PPC_STD(r6, VCPU_SHARED_MSR, r11)
        ori     r8, r8, MSR_ME | MSR_RI
        PPC_STL r5, VCPU_PC(r4)
 
@@ -335,11 +331,7 @@ _GLOBAL(kvmppc_resume_host)
        stw     r5, VCPU_SHARED_MAS0(r11)
        mfspr   r7, SPRN_MAS2
        stw     r6, VCPU_SHARED_MAS1(r11)
-#ifdef CONFIG_64BIT
-       std     r7, (VCPU_SHARED_MAS2)(r11)
-#else
-       stw     r7, (VCPU_SHARED_MAS2 + 4)(r11)
-#endif
+       PPC_STD(r7, VCPU_SHARED_MAS2, r11)
        mfspr   r5, SPRN_MAS3
        mfspr   r6, SPRN_MAS4
        stw     r5, VCPU_SHARED_MAS7_3+4(r11)
@@ -527,11 +519,7 @@ lightweight_exit:
        stw     r3, VCPU_HOST_MAS6(r4)
        lwz     r3, VCPU_SHARED_MAS0(r11)
        lwz     r5, VCPU_SHARED_MAS1(r11)
-#ifdef CONFIG_64BIT
-       ld      r6, (VCPU_SHARED_MAS2)(r11)
-#else
-       lwz     r6, (VCPU_SHARED_MAS2 + 4)(r11)
-#endif
+       PPC_LD(r6, VCPU_SHARED_MAS2, r11)
        lwz     r7, VCPU_SHARED_MAS7_3+4(r11)
        lwz     r8, VCPU_SHARED_MAS4(r11)
        mtspr   SPRN_MAS0, r3
@@ -565,11 +553,7 @@ lightweight_exit:
        PPC_LL  r6, VCPU_CTR(r4)
        PPC_LL  r7, VCPU_CR(r4)
        PPC_LL  r8, VCPU_PC(r4)
-#ifdef CONFIG_64BIT
-       ld      r9, (VCPU_SHARED_MSR)(r11)
-#else
-       lwz     r9, (VCPU_SHARED_MSR + 4)(r11)
-#endif
+       PPC_LD(r9, VCPU_SHARED_MSR, r11)
        PPC_LL  r0, VCPU_GPR(r0)(r4)
        PPC_LL  r1, VCPU_GPR(r1)(r4)
        PPC_LL  r2, VCPU_GPR(r2)(r4)