: "=&r" (__dummy)); \
} while (0)
+#ifdef CONFIG_CPU_HAS_SR_RB
+#define lookup_exception_vector() \
+({ \
+ unsigned long _vec; \
+ \
+ __asm__ __volatile__ ( \
+ "stc r2_bank, %0\n\t" \
+ : "=r" (_vec) \
+ ); \
+ \
+ _vec; \
+})
+#else
+#define lookup_exception_vector() \
+({ \
+ unsigned long _vec; \
+ __asm__ __volatile__ ( \
+ "mov r4, %0\n\t" \
+ : "=r" (_vec) \
+ ); \
+ \
+ _vec; \
+})
+#endif
+
int handle_unaligned_access(opcode_t instruction, struct pt_regs *regs,
struct mem_access *ma);
return ret;
}
-#ifdef CONFIG_CPU_HAS_SR_RB
-#define lookup_exception_vector(x) \
- __asm__ __volatile__ ("stc r2_bank, %0\n\t" : "=r" ((x)))
-#else
-#define lookup_exception_vector(x) \
- __asm__ __volatile__ ("mov r4, %0\n\t" : "=r" ((x)))
-#endif
-
/*
* Handle various address error exceptions:
* - instruction address error:
/* Intentional ifdef */
#ifdef CONFIG_CPU_HAS_SR_RB
- lookup_exception_vector(error_code);
+ error_code = lookup_exception_vector();
#endif
oldfs = get_fs();
}
#endif
- lookup_exception_vector(error_code);
+ error_code = lookup_exception_vector();
local_irq_enable();
CHK_REMOTE_DEBUG(regs);
/* not a FPU inst. */
#endif
- lookup_exception_vector(inst);
+ inst = lookup_exception_vector();
local_irq_enable();
CHK_REMOTE_DEBUG(regs);
struct pt_regs *regs = RELOC_HIDE(&__regs, 0);
long ex;
- lookup_exception_vector(ex);
+ ex = lookup_exception_vector();
die_if_kernel("exception", regs, ex);
}