#ifndef _ASM_INTERRUPT_H
#define _ASM_INTERRUPT_H
+#include <linux/config.h>
#include <asm/hazards.h>
__asm__ (
- ".macro\tlocal_irq_enable\n\t"
- ".set\tpush\n\t"
- ".set\treorder\n\t"
- ".set\tnoat\n\t"
- "mfc0\t$1,$12\n\t"
- "ori\t$1,0x1f\n\t"
- "xori\t$1,0x1e\n\t"
- "mtc0\t$1,$12\n\t"
- "irq_enable_hazard\n\t"
- ".set\tpop\n\t"
- ".endm");
+ " .macro local_irq_enable \n"
+ " .set push \n"
+ " .set reorder \n"
+ " .set noat \n"
+#if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2)
+ " ei \n"
+#else
+ " mfc0 $1,$12 \n"
+ " ori $1,0x1f \n"
+ " xori $1,0x1e \n"
+ " mtc0 $1,$12 \n"
+#endif
+ " irq_enable_hazard \n"
+ " .set pop \n"
+ " .endm");
static inline void local_irq_enable(void)
{
* no nops at all.
*/
__asm__ (
- ".macro\tlocal_irq_disable\n\t"
- ".set\tpush\n\t"
- ".set\tnoat\n\t"
- "mfc0\t$1,$12\n\t"
- "ori\t$1,1\n\t"
- "xori\t$1,1\n\t"
- ".set\tnoreorder\n\t"
- "mtc0\t$1,$12\n\t"
- "irq_disable_hazard\n\t"
- ".set\tpop\n\t"
- ".endm");
+ " .macro local_irq_disable\n"
+ " .set push \n"
+ " .set noat \n"
+#if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2)
+ " di \n"
+#else
+ " mfc0 $1,$12 \n"
+ " ori $1,1 \n"
+ " xori $1,1 \n"
+ " .set noreorder \n"
+ " mtc0 $1,$12 \n"
+#endif
+ " irq_disable_hazard \n"
+ " .set pop \n"
+ " .endm \n");
static inline void local_irq_disable(void)
{
}
__asm__ (
- ".macro\tlocal_save_flags flags\n\t"
- ".set\tpush\n\t"
- ".set\treorder\n\t"
- "mfc0\t\\flags, $12\n\t"
- ".set\tpop\n\t"
- ".endm");
+ " .macro local_save_flags flags \n"
+ " .set push \n"
+ " .set reorder \n"
+ " mfc0 \\flags, $12 \n"
+ " .set pop \n"
+ " .endm \n");
#define local_save_flags(x) \
__asm__ __volatile__( \
: "=r" (x))
__asm__ (
- ".macro\tlocal_irq_save result\n\t"
- ".set\tpush\n\t"
- ".set\treorder\n\t"
- ".set\tnoat\n\t"
- "mfc0\t\\result, $12\n\t"
- "ori\t$1, \\result, 1\n\t"
- "xori\t$1, 1\n\t"
- ".set\tnoreorder\n\t"
- "mtc0\t$1, $12\n\t"
- "irq_disable_hazard\n\t"
- ".set\tpop\n\t"
- ".endm");
+ " .macro local_irq_save result \n"
+ " .set push \n"
+ " .set reorder \n"
+ " .set noat \n"
+#if defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2)
+ " di \\result \n"
+#else
+ " mfc0 \\result, $12 \n"
+ " ori $1, \\result, 1 \n"
+ " xori $1, 1 \n"
+ " .set noreorder \n"
+ " mtc0 $1, $12 \n"
+#endif
+ " irq_disable_hazard \n"
+ " .set pop \n"
+ " .endm \n");
#define local_irq_save(x) \
__asm__ __volatile__( \
: "memory")
__asm__ (
- ".macro\tlocal_irq_restore flags\n\t"
- ".set\tnoreorder\n\t"
- ".set\tnoat\n\t"
- "mfc0\t$1, $12\n\t"
- "andi\t\\flags, 1\n\t"
- "ori\t$1, 1\n\t"
- "xori\t$1, 1\n\t"
- "or\t\\flags, $1\n\t"
- "mtc0\t\\flags, $12\n\t"
- "irq_disable_hazard\n\t"
- ".set\tat\n\t"
- ".set\treorder\n\t"
- ".endm");
+ " .macro local_irq_restore flags \n"
+ " .set noreorder \n"
+ " .set noat \n"
+#if (defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2)) && \
+ defined(CONFIG_IRQ_CPU)
+ /*
+ * Slow, but doesn't suffer from a relativly unlikely race
+ * condition we're having since days 1.
+ */
+ " beqz \\flags, 1f \n"
+ " di \n"
+ " ei \n"
+ "1: \n"
+#elif defined(CONFIG_CPU_MIPS32_R2) || defined(CONFIG_CPU_MIPS64_R2)
+ /*
+ * Fast, dangerous. Life is fun, life is good.
+ */
+ " mfc0 $1, $12 \n"
+ " ins $1, \\flags, 0, 1 \n"
+ " mtc0 $1, $12 \n"
+#else
+ " mfc0 $1, $12 \n"
+ " andi \\flags, 1 \n"
+ " ori $1, 1 \n"
+ " xori $1, 1 \n"
+ " or \\flags, $1 \n"
+ " mtc0 \\flags, $12 \n"
+#endif
+ " irq_disable_hazard \n"
+ " .set at \n"
+ " .set reorder \n"
+ " .endm \n");
#define local_irq_restore(flags) \
do { \