* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (C) 2001 - 2005 Tensilica Inc.
+ * Copyright (C) 2001 - 2008 Tensilica Inc.
*/
#ifndef _XTENSA_ATOMIC_H
/*
* This Xtensa implementation assumes that the right mechanism
- * for exclusion is for locking interrupts to level 1.
+ * for exclusion is for locking interrupts to level EXCM_LEVEL.
*
* Locking interrupts looks like this:
*
- * rsil a15, 1
+ * rsil a15, LOCKLEVEL
* <code>
* wsr a15, PS
* rsync
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (C) 2001 - 2005 Tensilica Inc.
+ * Copyright (C) 2001 - 2008 Tensilica Inc.
*/
#ifndef _XTENSA_PROCESSOR_H
/* LOCKLEVEL defines the interrupt level that masks all
* general-purpose interrupts.
*/
-#define LOCKLEVEL 1
+#define LOCKLEVEL XCHAL_EXCM_LEVEL
/* WSBITS and WBBITS are the width of the WINDOWSTART and WINDOWBASE
* registers
#define PS_UM_BIT 5
#define PS_EXCM_BIT 4
#define PS_INTLEVEL_SHIFT 0
+#define PS_INTLEVEL_WIDTH 4
#define PS_INTLEVEL_MASK 0x0000000F
/* DBREAKCn register fields. */
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (C) 2001 - 2005 Tensilica Inc.
+ * Copyright (C) 2001 - 2008 Tensilica Inc.
*/
#ifndef _XTENSA_TIMEX_H
#define _INTLEVEL(x) XCHAL_INT ## x ## _LEVEL
#define INTLEVEL(x) _INTLEVEL(x)
-#if INTLEVEL(XCHAL_TIMER0_INTERRUPT) == 1
+#if INTLEVEL(XCHAL_TIMER0_INTERRUPT) <= XCHAL_EXCM_LEVEL
# define LINUX_TIMER 0
# define LINUX_TIMER_INT XCHAL_TIMER0_INTERRUPT
-#elif INTLEVEL(XCHAL_TIMER1_INTERRUPT) == 1
+#elif INTLEVEL(XCHAL_TIMER1_INTERRUPT) <= XCHAL_EXCM_LEVEL
# define LINUX_TIMER 1
# define LINUX_TIMER_INT XCHAL_TIMER1_INTERRUPT
-#elif INTLEVEL(XCHAL_TIMER2_INTERRUPT) == 1
+#elif INTLEVEL(XCHAL_TIMER2_INTERRUPT) <= XCHAL_EXCM_LEVEL
# define LINUX_TIMER 2
# define LINUX_TIMER_INT XCHAL_TIMER2_INTERRUPT
#else
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (C) 2004-2007 by Tensilica Inc.
+ * Copyright (C) 2004 - 2008 by Tensilica Inc.
*
* Chris Zankel <chris@zankel.net>
*
* so we can allow exceptions and interrupts (*) again.
* Set PS(EXCM = 0, UM = 0, RING = 0, OWB = 0, WOE = 1, INTLEVEL = X)
*
- * (*) We only allow interrupts if PS.INTLEVEL was not set to 1 before
- * (interrupts disabled) and if this exception is not an interrupt.
+ * (*) We only allow interrupts of higher priority than current IRQ
*/
rsr a3, ps
addi a0, a0, -4
movi a2, 1
- extui a3, a3, 0, 1 # a3 = PS.INTLEVEL[0]
- moveqz a3, a2, a0 # a3 = 1 iff interrupt exception
+ extui a3, a3, PS_INTLEVEL_SHIFT, PS_INTLEVEL_WIDTH
+ # a3 = PS.INTLEVEL
+ movnez a2, a3, a3 # a2 = 1: level-1, > 1: high priority
+ moveqz a3, a2, a0 # a3 = IRQ level iff interrupt
movi a2, 1 << PS_WOE_BIT
or a3, a3, a2
rsr a0, exccause
l32i a0, a1, PT_DEPC
l32i a3, a1, PT_AREG3
+ _bltui a0, VALID_DOUBLE_EXCEPTION_ADDRESS, 1f
+
+ wsr a0, depc
l32i a2, a1, PT_AREG2
- _bgeui a0, VALID_DOUBLE_EXCEPTION_ADDRESS, 1f
+ l32i a0, a1, PT_AREG0
+ l32i a1, a1, PT_AREG1
+ rfde
+1:
/* Restore a0...a3 and return */
+ rsr a0, ps
+ extui a2, a0, PS_INTLEVEL_SHIFT, PS_INTLEVEL_WIDTH
+ movi a0, 2f
+ slli a2, a2, 4
+ add a0, a2, a0
+ l32i a2, a1, PT_AREG2
+ jx a0
+
+ .macro irq_exit_level level
+ .align 16
+ .if XCHAL_EXCM_LEVEL >= \level
+ l32i a0, a1, PT_PC
+ wsr a0, epc\level
l32i a0, a1, PT_AREG0
l32i a1, a1, PT_AREG1
- rfe
+ rfi \level
+ .endif
+ .endm
-1: wsr a0, depc
+ .align 16
+2:
l32i a0, a1, PT_AREG0
l32i a1, a1, PT_AREG1
- rfde
+ rfe
+
+ .align 16
+ /* no rfi for level-1 irq, handled by rfe above*/
+ nop
+
+ irq_exit_level 2
+ irq_exit_level 3
+ irq_exit_level 4
+ irq_exit_level 5
+ irq_exit_level 6
ENDPROC(kernel_exception)
wsr a1, windowbase
rsync
- movi a1, (1 << PS_WOE_BIT) | 1
+ movi a1, (1 << PS_WOE_BIT) | LOCKLEVEL
wsr a1, ps
rsync
l32i a1, a3, EXC_TABLE_KSTK
wsr a3, excsave1
- movi a4, (1 << PS_WOE_BIT) | 1
+ movi a4, (1 << PS_WOE_BIT) | LOCKLEVEL
wsr a4, ps
rsync
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (C) 2001 - 2005 Tensilica Inc.
+ * Copyright (C) 2001 - 2008 Tensilica Inc.
*
* Chris Zankel <chris@zankel.net>
* Marc Gauthier <marc@tensilica.com, marc@alumni.uwaterloo.ca>
wsr a0, cpenable
#endif
- /* Set PS.INTLEVEL=1, PS.WOE=0, kernel stack, PS.EXCM=0
+ /* Set PS.INTLEVEL=LOCKLEVEL, PS.WOE=0, kernel stack, PS.EXCM=0
*
* Note: PS.EXCM must be cleared before using any loop
* instructions; otherwise, they are silently disabled, and
* at most one iteration of the loop is executed.
*/
- movi a1, 1
+ movi a1, LOCKLEVEL
wsr a1, ps
rsync
movi a1, init_thread_union
addi a1, a1, KERNEL_STACK_SIZE
- movi a2, 0x00040001 # WOE=1, INTLEVEL=1, UM=0
+ movi a2, (1 << PS_WOE_BIT) | LOCKLEVEL
+ # WOE=1, INTLEVEL=LOCKLEVEL, UM=0
wsr a2, ps # (enable reg-windows; progmode stack)
rsync
extern char _UserExceptionVector_text_end;
extern char _DoubleExceptionVector_literal_start;
extern char _DoubleExceptionVector_text_end;
+#if XCHAL_EXCM_LEVEL >= 2
+extern char _Level2InterruptVector_text_start;
+extern char _Level2InterruptVector_text_end;
+#endif
+#if XCHAL_EXCM_LEVEL >= 3
+extern char _Level3InterruptVector_text_start;
+extern char _Level3InterruptVector_text_end;
+#endif
+#if XCHAL_EXCM_LEVEL >= 4
+extern char _Level4InterruptVector_text_start;
+extern char _Level4InterruptVector_text_end;
+#endif
+#if XCHAL_EXCM_LEVEL >= 5
+extern char _Level5InterruptVector_text_start;
+extern char _Level5InterruptVector_text_end;
+#endif
+#if XCHAL_EXCM_LEVEL >= 6
+extern char _Level6InterruptVector_text_start;
+extern char _Level6InterruptVector_text_end;
+#endif
+
#ifdef CONFIG_S32C1I_SELFTEST
mem_reserve(__pa(&_DoubleExceptionVector_literal_start),
__pa(&_DoubleExceptionVector_text_end), 0);
+#if XCHAL_EXCM_LEVEL >= 2
+ mem_reserve(__pa(&_Level2InterruptVector_text_start),
+ __pa(&_Level2InterruptVector_text_end), 0);
+#endif
+#if XCHAL_EXCM_LEVEL >= 3
+ mem_reserve(__pa(&_Level3InterruptVector_text_start),
+ __pa(&_Level3InterruptVector_text_end), 0);
+#endif
+#if XCHAL_EXCM_LEVEL >= 4
+ mem_reserve(__pa(&_Level4InterruptVector_text_start),
+ __pa(&_Level4InterruptVector_text_end), 0);
+#endif
+#if XCHAL_EXCM_LEVEL >= 5
+ mem_reserve(__pa(&_Level5InterruptVector_text_start),
+ __pa(&_Level5InterruptVector_text_end), 0);
+#endif
+#if XCHAL_EXCM_LEVEL >= 6
+ mem_reserve(__pa(&_Level6InterruptVector_text_start),
+ __pa(&_Level6InterruptVector_text_end), 0);
+#endif
+
bootmem_init();
#ifdef CONFIG_OF
}
/*
- * Level-1 interrupt.
- * We currently have no priority encoding.
+ * IRQ handler.
+ * PS.INTLEVEL is the current IRQ priority level.
*/
-unsigned long ignored_level1_interrupts;
extern void do_IRQ(int, struct pt_regs *);
-void do_interrupt (struct pt_regs *regs)
+void do_interrupt(struct pt_regs *regs)
{
- unsigned long intread = get_sr (interrupt);
- unsigned long intenable = get_sr (intenable);
- int i, mask;
-
- /* Handle all interrupts (no priorities).
- * (Clear the interrupt before processing, in case it's
- * edge-triggered or software-generated)
- */
+ static const unsigned int_level_mask[] = {
+ 0,
+ XCHAL_INTLEVEL1_MASK,
+ XCHAL_INTLEVEL2_MASK,
+ XCHAL_INTLEVEL3_MASK,
+ XCHAL_INTLEVEL4_MASK,
+ XCHAL_INTLEVEL5_MASK,
+ XCHAL_INTLEVEL6_MASK,
+ XCHAL_INTLEVEL7_MASK,
+ };
+ unsigned level = get_sr(ps) & PS_INTLEVEL_MASK;
+
+ if (WARN_ON_ONCE(level >= ARRAY_SIZE(int_level_mask)))
+ return;
- for (i=0, mask = 1; i < XCHAL_NUM_INTERRUPTS; i++, mask <<= 1) {
- if (mask & (intread & intenable)) {
- set_sr (mask, intclear);
- do_IRQ (i,regs);
+ for (;;) {
+ unsigned intread = get_sr(interrupt);
+ unsigned intenable = get_sr(intenable);
+ unsigned int_at_level = intread & intenable &
+ int_level_mask[level];
+
+ if (!int_at_level)
+ return;
+
+ /*
+ * Clear the interrupt before processing, in case it's
+ * edge-triggered or software-generated
+ */
+ while (int_at_level) {
+ unsigned i = __ffs(int_at_level);
+ unsigned mask = 1 << i;
+
+ int_at_level ^= mask;
+ set_sr(mask, intclear);
+ do_IRQ(i, regs);
}
}
}
unsigned int a0, ps;
__asm__ __volatile__ (
- "movi a14, " __stringify(PS_EXCM_BIT | 1) "\n\t"
+ "movi a14, " __stringify(PS_EXCM_BIT | LOCKLEVEL) "\n\t"
"mov a12, a0\n\t"
"rsr a13, sar\n\t"
"xsr a14, ps\n\t"
* Public License. See the file "COPYING" in the main directory of
* this archive for more details.
*
- * Copyright (C) 2005 Tensilica, Inc.
+ * Copyright (C) 2005 - 2008 Tensilica, Inc.
*
* Chris Zankel <chris@zankel.net>
*
ENDPROC(_DebugInterruptVector)
+
+/*
+ * Medium priority level interrupt vectors
+ *
+ * Each takes less than 16 (0x10) bytes, no literals, by placing
+ * the extra 8 bytes that would otherwise be required in the window
+ * vectors area where there is space. With relocatable vectors,
+ * all vectors are within ~ 4 kB range of each other, so we can
+ * simply jump (J) to another vector without having to use JX.
+ *
+ * common_exception code gets current IRQ level in PS.INTLEVEL
+ * and preserves it for the IRQ handling time.
+ */
+
+ .macro irq_entry_level level
+
+ .if XCHAL_EXCM_LEVEL >= \level
+ .section .Level\level\()InterruptVector.text, "ax"
+ENTRY(_Level\level\()InterruptVector)
+ wsr a0, epc1
+ rsr a0, epc\level
+ xsr a0, epc1
+ # branch to user or kernel vector
+ j _SimulateUserKernelVectorException
+ .endif
+
+ .endm
+
+ irq_entry_level 2
+ irq_entry_level 3
+ irq_entry_level 4
+ irq_entry_level 5
+ irq_entry_level 6
+
+
/* Window overflow and underflow handlers.
* The handlers must be 64 bytes apart, first starting with the underflow
* handlers underflow-4 to underflow-12, then the overflow handlers
ENDPROC(_WindowOverflow4)
+#if XCHAL_EXCM_LEVEL >= 2
+ /* Not a window vector - but a convenient location
+ * (where we know there's space) for continuation of
+ * medium priority interrupt dispatch code.
+ * On entry here, a0 contains PS, and EPC2 contains saved a0:
+ */
+ .align 4
+_SimulateUserKernelVectorException:
+ wsr a0, excsave2
+ movi a0, 4 # LEVEL1_INTERRUPT cause
+ wsr a0, exccause
+ rsr a0, ps
+ bbsi.l a0, PS_UM_BIT, 1f # branch if user mode
+ rsr a0, excsave2 # restore a0
+ j _KernelExceptionVector # simulate kernel vector exception
+1: rsr a0, excsave2 # restore a0
+ j _UserExceptionVector # simulate user vector exception
+#endif
+
+
/* 4-Register Window Underflow Vector (Handler) */
ENTRY_ALIGN64(_WindowUnderflow4)
* License. See the file "COPYING" in the main directory of this archive
* for more details.
*
- * Copyright (C) 2001 - 2005 Tensilica Inc.
+ * Copyright (C) 2001 - 2008 Tensilica Inc.
*
* Chris Zankel <chris@zankel.net>
* Marc Gauthier <marc@tensilica.com, marc@alumni.uwaterloo.ca>
RELOCATE_ENTRY(_WindowVectors_text,
.WindowVectors.text);
+#if XCHAL_EXCM_LEVEL >= 2
+ RELOCATE_ENTRY(_Level2InterruptVector_text,
+ .Level2InterruptVector.text);
+#endif
+#if XCHAL_EXCM_LEVEL >= 3
+ RELOCATE_ENTRY(_Level3InterruptVector_text,
+ .Level3InterruptVector.text);
+#endif
+#if XCHAL_EXCM_LEVEL >= 4
+ RELOCATE_ENTRY(_Level4InterruptVector_text,
+ .Level4InterruptVector.text);
+#endif
+#if XCHAL_EXCM_LEVEL >= 5
+ RELOCATE_ENTRY(_Level5InterruptVector_text,
+ .Level5InterruptVector.text);
+#endif
+#if XCHAL_EXCM_LEVEL >= 6
+ RELOCATE_ENTRY(_Level6InterruptVector_text,
+ .Level6InterruptVector.text);
+#endif
RELOCATE_ENTRY(_KernelExceptionVector_text,
.KernelExceptionVector.text);
RELOCATE_ENTRY(_UserExceptionVector_text,
XCHAL_DEBUG_VECTOR_VADDR,
4,
.DebugInterruptVector.literal)
+#undef LAST
+#define LAST .DebugInterruptVector.text
+#if XCHAL_EXCM_LEVEL >= 2
+ SECTION_VECTOR (_Level2InterruptVector_text,
+ .Level2InterruptVector.text,
+ XCHAL_INTLEVEL2_VECTOR_VADDR,
+ SIZEOF(LAST), LAST)
+# undef LAST
+# define LAST .Level2InterruptVector.text
+#endif
+#if XCHAL_EXCM_LEVEL >= 3
+ SECTION_VECTOR (_Level3InterruptVector_text,
+ .Level3InterruptVector.text,
+ XCHAL_INTLEVEL3_VECTOR_VADDR,
+ SIZEOF(LAST), LAST)
+# undef LAST
+# define LAST .Level3InterruptVector.text
+#endif
+#if XCHAL_EXCM_LEVEL >= 4
+ SECTION_VECTOR (_Level4InterruptVector_text,
+ .Level4InterruptVector.text,
+ XCHAL_INTLEVEL4_VECTOR_VADDR,
+ SIZEOF(LAST), LAST)
+# undef LAST
+# define LAST .Level4InterruptVector.text
+#endif
+#if XCHAL_EXCM_LEVEL >= 5
+ SECTION_VECTOR (_Level5InterruptVector_text,
+ .Level5InterruptVector.text,
+ XCHAL_INTLEVEL5_VECTOR_VADDR,
+ SIZEOF(LAST), LAST)
+# undef LAST
+# define LAST .Level5InterruptVector.text
+#endif
+#if XCHAL_EXCM_LEVEL >= 6
+ SECTION_VECTOR (_Level6InterruptVector_text,
+ .Level6InterruptVector.text,
+ XCHAL_INTLEVEL6_VECTOR_VADDR,
+ SIZEOF(LAST), LAST)
+# undef LAST
+# define LAST .Level6InterruptVector.text
+#endif
SECTION_VECTOR (_KernelExceptionVector_literal,
.KernelExceptionVector.literal,
XCHAL_KERNEL_VECTOR_VADDR - 4,
- SIZEOF(.DebugInterruptVector.text),
- .DebugInterruptVector.text)
+ SIZEOF(LAST), LAST)
+#undef LAST
SECTION_VECTOR (_KernelExceptionVector_text,
.KernelExceptionVector.text,
XCHAL_KERNEL_VECTOR_VADDR,