msr_check_and_set(MSR_FP);
- if (current->thread.regs && (current->thread.regs->msr & MSR_FP))
+ if (current->thread.regs && (current->thread.regs->msr & MSR_FP)) {
+ check_if_tm_restore_required(current);
__giveup_fpu(current);
+ }
}
EXPORT_SYMBOL(enable_kernel_fp);
#endif /* CONFIG_PPC_FPU */
msr_check_and_set(MSR_VEC);
- if (current->thread.regs && (current->thread.regs->msr & MSR_VEC))
+ if (current->thread.regs && (current->thread.regs->msr & MSR_VEC)) {
+ check_if_tm_restore_required(current);
__giveup_altivec(current);
+ }
}
EXPORT_SYMBOL(enable_kernel_altivec);
msr_check_and_set(MSR_FP|MSR_VEC|MSR_VSX);
if (current->thread.regs && (current->thread.regs->msr & MSR_VSX)) {
+ check_if_tm_restore_required(current);
if (current->thread.regs->msr & MSR_FP)
__giveup_fpu(current);
if (current->thread.regs->msr & MSR_VEC)
msr_check_and_set(MSR_SPE);
- if (current->thread.regs && (current->thread.regs->msr & MSR_SPE))
+ if (current->thread.regs && (current->thread.regs->msr & MSR_SPE)) {
+ check_if_tm_restore_required(current);
__giveup_spe(current);
+ }
}
EXPORT_SYMBOL(enable_kernel_spe);