*
* Chris Dearman (chris@mips.com)
* Copyright (C) 2007 Mips Technologies, Inc.
+ * Copyright (C) 2014 Imagination Technologies Ltd.
*/
#ifndef __ASM_MACH_MIPS_KERNEL_ENTRY_INIT_H
#define __ASM_MACH_MIPS_KERNEL_ENTRY_INIT_H
+ /*
+ * Prepare segments for EVA boot:
+ *
+ * This is in case the processor boots in legacy configuration
+ * (SI_EVAReset is de-asserted and CONFIG5.K == 0)
+ *
+ * On entry, t1 is loaded with CP0_CONFIG
+ *
+ * ========================= Mappings =============================
+ * Virtual memory Physical memory Mapping
+ * 0x00000000 - 0x7fffffff 0x80000000 - 0xfffffffff MUSUK (kuseg)
+ * Flat 2GB physical memory
+ *
+ * 0x80000000 - 0x9fffffff 0x00000000 - 0x1ffffffff MUSUK (kseg0)
+ * 0xa0000000 - 0xbf000000 0x00000000 - 0x1ffffffff MUSUK (kseg1)
+ * 0xc0000000 - 0xdfffffff - MK (kseg2)
+ * 0xe0000000 - 0xffffffff - MK (kseg3)
+ *
+ *
+ * Lowmem is expanded to 2GB
+ */
+ .macro eva_entry
+ /*
+ * Get Config.K0 value and use it to program
+ * the segmentation registers
+ */
+ andi t1, 0x7 /* CCA */
+ move t2, t1
+ ins t2, t1, 16, 3
+ /* SegCtl0 */
+ li t0, ((MIPS_SEGCFG_MK << MIPS_SEGCFG_AM_SHIFT) | \
+ (0 << MIPS_SEGCFG_PA_SHIFT) | \
+ (1 << MIPS_SEGCFG_EU_SHIFT)) | \
+ (((MIPS_SEGCFG_MK << MIPS_SEGCFG_AM_SHIFT) | \
+ (0 << MIPS_SEGCFG_PA_SHIFT) | \
+ (1 << MIPS_SEGCFG_EU_SHIFT)) << 16)
+ or t0, t2
+ mtc0 t0, $5, 2
+
+ /* SegCtl1 */
+ li t0, ((MIPS_SEGCFG_MUSUK << MIPS_SEGCFG_AM_SHIFT) | \
+ (0 << MIPS_SEGCFG_PA_SHIFT) | \
+ (2 << MIPS_SEGCFG_C_SHIFT) | \
+ (1 << MIPS_SEGCFG_EU_SHIFT)) | \
+ (((MIPS_SEGCFG_MUSUK << MIPS_SEGCFG_AM_SHIFT) | \
+ (0 << MIPS_SEGCFG_PA_SHIFT) | \
+ (1 << MIPS_SEGCFG_EU_SHIFT)) << 16)
+ ins t0, t1, 16, 3
+ mtc0 t0, $5, 3
+
+ /* SegCtl2 */
+ li t0, ((MIPS_SEGCFG_MUSUK << MIPS_SEGCFG_AM_SHIFT) | \
+ (6 << MIPS_SEGCFG_PA_SHIFT) | \
+ (1 << MIPS_SEGCFG_EU_SHIFT)) | \
+ (((MIPS_SEGCFG_MUSUK << MIPS_SEGCFG_AM_SHIFT) | \
+ (4 << MIPS_SEGCFG_PA_SHIFT) | \
+ (1 << MIPS_SEGCFG_EU_SHIFT)) << 16)
+ or t0, t2
+ mtc0 t0, $5, 4
+
+ jal mips_ihb
+ mfc0 t0, $16, 5
+ li t2, 0x40000000 /* K bit */
+ or t0, t0, t2
+ mtc0 t0, $16, 5
+ sync
+ jal mips_ihb
+ .endm
+
.macro kernel_entry_setup
#ifdef CONFIG_MIPS_MT_SMTC
mfc0 t0, CP0_CONFIG
nonmt_processor:
.asciz "SMTC kernel requires the MT ASE to run\n"
__FINIT
-0:
#endif
+
+#ifdef CONFIG_EVA
+ sync
+ ehb
+
+ mfc0 t1, CP0_CONFIG
+ bgez t1, 9f
+ mfc0 t0, CP0_CONFIG, 1
+ bgez t0, 9f
+ mfc0 t0, CP0_CONFIG, 2
+ bgez t0, 9f
+ mfc0 t0, CP0_CONFIG, 3
+ sll t0, t0, 6 /* SC bit */
+ bgez t0, 9f
+
+ eva_entry
+ b 0f
+9:
+ /* Assume we came from YAMON... */
+ PTR_LA v0, 0x9fc00534 /* YAMON print */
+ lw v0, (v0)
+ move a0, zero
+ PTR_LA a1, nonsc_processor
+ jal v0
+
+ PTR_LA v0, 0x9fc00520 /* YAMON exit */
+ lw v0, (v0)
+ li a0, 1
+ jal v0
+
+1: b 1b
+ nop
+ __INITDATA
+nonsc_processor:
+ .asciz "EVA kernel requires a MIPS core with Segment Control implemented\n"
+ __FINIT
+#endif /* CONFIG_EVA */
+0:
.endm
/*