Commit | Line | Data |
---|---|---|
2a4aca11 BH |
1 | /* |
2 | * This file contains low-level functions for performing various | |
3 | * types of TLB invalidations on various processors with no hash | |
4 | * table. | |
5 | * | |
6 | * This file implements the following functions for all no-hash | |
7 | * processors. Some aren't implemented for some variants. Some | |
8 | * are inline in tlbflush.h | |
9 | * | |
10 | * - tlbil_va | |
11 | * - tlbil_pid | |
12 | * - tlbil_all | |
e7f75ad0 | 13 | * - tlbivax_bcast |
2a4aca11 BH |
14 | * |
15 | * Code mostly moved over from misc_32.S | |
16 | * | |
17 | * Copyright (C) 1995-1996 Gary Thomas (gdt@linuxppc.org) | |
18 | * | |
19 | * Partially rewritten by Cort Dougan (cort@cs.nmt.edu) | |
20 | * Paul Mackerras, Kumar Gala and Benjamin Herrenschmidt. | |
21 | * | |
22 | * This program is free software; you can redistribute it and/or | |
23 | * modify it under the terms of the GNU General Public License | |
24 | * as published by the Free Software Foundation; either version | |
25 | * 2 of the License, or (at your option) any later version. | |
26 | * | |
27 | */ | |
28 | ||
29 | #include <asm/reg.h> | |
30 | #include <asm/page.h> | |
31 | #include <asm/cputable.h> | |
32 | #include <asm/mmu.h> | |
33 | #include <asm/ppc_asm.h> | |
34 | #include <asm/asm-offsets.h> | |
35 | #include <asm/processor.h> | |
e7f75ad0 | 36 | #include <asm/bug.h> |
2a4aca11 BH |
37 | |
38 | #if defined(CONFIG_40x) | |
39 | ||
40 | /* | |
41 | * 40x implementation needs only tlbil_va | |
42 | */ | |
d4e167da | 43 | _GLOBAL(__tlbil_va) |
2a4aca11 BH |
44 | /* We run the search with interrupts disabled because we have to change |
45 | * the PID and I don't want to preempt when that happens. | |
46 | */ | |
47 | mfmsr r5 | |
48 | mfspr r6,SPRN_PID | |
49 | wrteei 0 | |
50 | mtspr SPRN_PID,r4 | |
51 | tlbsx. r3, 0, r3 | |
52 | mtspr SPRN_PID,r6 | |
53 | wrtee r5 | |
54 | bne 1f | |
55 | sync | |
56 | /* There are only 64 TLB entries, so r3 < 64, which means bit 25 is | |
57 | * clear. Since 25 is the V bit in the TLB_TAG, loading this value | |
58 | * will invalidate the TLB entry. */ | |
59 | tlbwe r3, r3, TLB_TAG | |
60 | isync | |
61 | 1: blr | |
62 | ||
63 | #elif defined(CONFIG_8xx) | |
64 | ||
65 | /* | |
66 | * Nothing to do for 8xx, everything is inline | |
67 | */ | |
68 | ||
e7f75ad0 | 69 | #elif defined(CONFIG_44x) /* Includes 47x */ |
2a4aca11 BH |
70 | |
71 | /* | |
72 | * 440 implementation uses tlbsx/we for tlbil_va and a full sweep | |
73 | * of the TLB for everything else. | |
74 | */ | |
d4e167da | 75 | _GLOBAL(__tlbil_va) |
2a4aca11 | 76 | mfspr r5,SPRN_MMUCR |
e7f75ad0 DK |
77 | mfmsr r10 |
78 | ||
79 | /* | |
80 | * We write 16 bits of STID since 47x supports that much, we | |
81 | * will never be passed out of bounds values on 440 (hopefully) | |
82 | */ | |
83 | rlwimi r5,r4,0,16,31 | |
2a4aca11 | 84 | |
760ec0e0 BH |
85 | /* We have to run the search with interrupts disabled, otherwise |
86 | * an interrupt which causes a TLB miss can clobber the MMUCR | |
87 | * between the mtspr and the tlbsx. | |
88 | * | |
89 | * Critical and Machine Check interrupts take care of saving | |
90 | * and restoring MMUCR, so only normal interrupts have to be | |
91 | * taken care of. | |
92 | */ | |
760ec0e0 | 93 | wrteei 0 |
2a4aca11 | 94 | mtspr SPRN_MMUCR,r5 |
e7f75ad0 DK |
95 | tlbsx. r6,0,r3 |
96 | bne 10f | |
2a4aca11 | 97 | sync |
e7f75ad0 DK |
98 | BEGIN_MMU_FTR_SECTION |
99 | b 2f | |
100 | END_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x) | |
101 | /* On 440 There are only 64 TLB entries, so r3 < 64, which means bit | |
102 | * 22, is clear. Since 22 is the V bit in the TLB_PAGEID, loading this | |
2a4aca11 BH |
103 | * value will invalidate the TLB entry. |
104 | */ | |
e7f75ad0 | 105 | tlbwe r6,r6,PPC44x_TLB_PAGEID |
2a4aca11 | 106 | isync |
e7f75ad0 DK |
107 | 10: wrtee r10 |
108 | blr | |
109 | 2: | |
110 | #ifdef CONFIG_PPC_47x | |
111 | oris r7,r6,0x8000 /* specify way explicitely */ | |
112 | clrrwi r4,r3,12 /* get an EPN for the hashing with V = 0 */ | |
113 | ori r4,r4,PPC47x_TLBE_SIZE | |
114 | tlbwe r4,r7,0 /* write it */ | |
115 | isync | |
116 | wrtee r10 | |
117 | blr | |
118 | #else /* CONFIG_PPC_47x */ | |
119 | 1: trap | |
120 | EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0; | |
121 | #endif /* !CONFIG_PPC_47x */ | |
2a4aca11 BH |
122 | |
123 | _GLOBAL(_tlbil_all) | |
124 | _GLOBAL(_tlbil_pid) | |
e7f75ad0 DK |
125 | BEGIN_MMU_FTR_SECTION |
126 | b 2f | |
127 | END_MMU_FTR_SECTION_IFSET(MMU_FTR_TYPE_47x) | |
2a4aca11 BH |
128 | li r3,0 |
129 | sync | |
130 | ||
131 | /* Load high watermark */ | |
132 | lis r4,tlb_44x_hwater@ha | |
133 | lwz r5,tlb_44x_hwater@l(r4) | |
134 | ||
135 | 1: tlbwe r3,r3,PPC44x_TLB_PAGEID | |
136 | addi r3,r3,1 | |
137 | cmpw 0,r3,r5 | |
138 | ble 1b | |
139 | ||
140 | isync | |
141 | blr | |
e7f75ad0 DK |
142 | 2: |
143 | #ifdef CONFIG_PPC_47x | |
144 | /* 476 variant. There's not simple way to do this, hopefully we'll | |
145 | * try to limit the amount of such full invalidates | |
146 | */ | |
147 | mfmsr r11 /* Interrupts off */ | |
148 | wrteei 0 | |
149 | li r3,-1 /* Current set */ | |
150 | lis r10,tlb_47x_boltmap@h | |
151 | ori r10,r10,tlb_47x_boltmap@l | |
152 | lis r7,0x8000 /* Specify way explicitely */ | |
153 | ||
154 | b 9f /* For each set */ | |
155 | ||
156 | 1: li r9,4 /* Number of ways */ | |
157 | li r4,0 /* Current way */ | |
158 | li r6,0 /* Default entry value 0 */ | |
159 | andi. r0,r8,1 /* Check if way 0 is bolted */ | |
160 | mtctr r9 /* Load way counter */ | |
161 | bne- 3f /* Bolted, skip loading it */ | |
162 | ||
163 | 2: /* For each way */ | |
164 | or r5,r3,r4 /* Make way|index for tlbre */ | |
165 | rlwimi r5,r5,16,8,15 /* Copy index into position */ | |
166 | tlbre r6,r5,0 /* Read entry */ | |
167 | 3: addis r4,r4,0x2000 /* Next way */ | |
168 | andi. r0,r6,PPC47x_TLB0_VALID /* Valid entry ? */ | |
169 | beq 4f /* Nope, skip it */ | |
170 | rlwimi r7,r5,0,1,2 /* Insert way number */ | |
171 | rlwinm r6,r6,0,21,19 /* Clear V */ | |
172 | tlbwe r6,r7,0 /* Write it */ | |
173 | 4: bdnz 2b /* Loop for each way */ | |
174 | srwi r8,r8,1 /* Next boltmap bit */ | |
175 | 9: cmpwi cr1,r3,255 /* Last set done ? */ | |
176 | addi r3,r3,1 /* Next set */ | |
177 | beq cr1,1f /* End of loop */ | |
178 | andi. r0,r3,0x1f /* Need to load a new boltmap word ? */ | |
179 | bne 1b /* No, loop */ | |
180 | lwz r8,0(r10) /* Load boltmap entry */ | |
181 | addi r10,r10,4 /* Next word */ | |
182 | b 1b /* Then loop */ | |
183 | 1: isync /* Sync shadows */ | |
184 | wrtee r11 | |
185 | #else /* CONFIG_PPC_47x */ | |
186 | 1: trap | |
187 | EMIT_BUG_ENTRY 1b,__FILE__,__LINE__,0; | |
188 | #endif /* !CONFIG_PPC_47x */ | |
189 | blr | |
190 | ||
191 | #ifdef CONFIG_PPC_47x | |
21a06b04 | 192 | |
e7f75ad0 DK |
193 | /* |
194 | * _tlbivax_bcast is only on 47x. We don't bother doing a runtime | |
195 | * check though, it will blow up soon enough if we mistakenly try | |
196 | * to use it on a 440. | |
197 | */ | |
198 | _GLOBAL(_tlbivax_bcast) | |
199 | mfspr r5,SPRN_MMUCR | |
200 | mfmsr r10 | |
201 | rlwimi r5,r4,0,16,31 | |
202 | wrteei 0 | |
203 | mtspr SPRN_MMUCR,r5 | |
32412aa2 | 204 | isync |
1afc149d | 205 | PPC_TLBIVAX(0, R3) |
e7f75ad0 DK |
206 | isync |
207 | eieio | |
208 | tlbsync | |
21a06b04 DK |
209 | BEGIN_FTR_SECTION |
210 | b 1f | |
211 | END_FTR_SECTION_IFSET(CPU_FTR_476_DD2) | |
212 | sync | |
213 | wrtee r10 | |
214 | blr | |
215 | /* | |
216 | * DD2 HW could hang if in instruction fetch happens before msync completes. | |
217 | * Touch enough instruction cache lines to ensure cache hits | |
218 | */ | |
219 | 1: mflr r9 | |
220 | bl 2f | |
221 | 2: mflr r6 | |
222 | li r7,32 | |
1afc149d | 223 | PPC_ICBT(0,R6,R7) /* touch next cache line */ |
21a06b04 | 224 | add r6,r6,r7 |
1afc149d | 225 | PPC_ICBT(0,R6,R7) /* touch next cache line */ |
21a06b04 | 226 | add r6,r6,r7 |
1afc149d | 227 | PPC_ICBT(0,R6,R7) /* touch next cache line */ |
e7f75ad0 | 228 | sync |
21a06b04 DK |
229 | nop |
230 | nop | |
231 | nop | |
232 | nop | |
233 | nop | |
234 | nop | |
235 | nop | |
236 | nop | |
237 | mtlr r9 | |
e7f75ad0 DK |
238 | wrtee r10 |
239 | blr | |
240 | #endif /* CONFIG_PPC_47x */ | |
2a4aca11 BH |
241 | |
242 | #elif defined(CONFIG_FSL_BOOKE) | |
243 | /* | |
c3071951 KG |
244 | * FSL BookE implementations. |
245 | * | |
246 | * Since feature sections are using _SECTION_ELSE we need | |
247 | * to have the larger code path before the _SECTION_ELSE | |
2a4aca11 BH |
248 | */ |
249 | ||
250 | /* | |
251 | * Flush MMU TLB on the local processor | |
252 | */ | |
2a4aca11 | 253 | _GLOBAL(_tlbil_all) |
c3071951 KG |
254 | BEGIN_MMU_FTR_SECTION |
255 | li r3,(MMUCSR0_TLBFI)@l | |
256 | mtspr SPRN_MMUCSR0, r3 | |
257 | 1: | |
258 | mfspr r3,SPRN_MMUCSR0 | |
259 | andi. r3,r3,MMUCSR0_TLBFI@l | |
260 | bne 1b | |
261 | MMU_FTR_SECTION_ELSE | |
962cffbd | 262 | PPC_TLBILX_ALL(0,R0) |
c3071951 KG |
263 | ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX) |
264 | msync | |
265 | isync | |
266 | blr | |
267 | ||
268 | _GLOBAL(_tlbil_pid) | |
269 | BEGIN_MMU_FTR_SECTION | |
270 | slwi r3,r3,16 | |
271 | mfmsr r10 | |
272 | wrteei 0 | |
273 | mfspr r4,SPRN_MAS6 /* save MAS6 */ | |
274 | mtspr SPRN_MAS6,r3 | |
962cffbd | 275 | PPC_TLBILX_PID(0,R0) |
c3071951 KG |
276 | mtspr SPRN_MAS6,r4 /* restore MAS6 */ |
277 | wrtee r10 | |
278 | MMU_FTR_SECTION_ELSE | |
2a4aca11 BH |
279 | li r3,(MMUCSR0_TLBFI)@l |
280 | mtspr SPRN_MMUCSR0, r3 | |
281 | 1: | |
282 | mfspr r3,SPRN_MMUCSR0 | |
283 | andi. r3,r3,MMUCSR0_TLBFI@l | |
284 | bne 1b | |
c3071951 | 285 | ALT_MMU_FTR_SECTION_END_IFSET(MMU_FTR_USE_TLBILX) |
2a4aca11 BH |
286 | msync |
287 | isync | |
288 | blr | |
289 | ||
290 | /* | |
291 | * Flush MMU TLB for a particular address, but only on the local processor | |
292 | * (no broadcast) | |
293 | */ | |
d4e167da | 294 | _GLOBAL(__tlbil_va) |
2a4aca11 BH |
295 | mfmsr r10 |
296 | wrteei 0 | |
297 | slwi r4,r4,16 | |
c3071951 | 298 | ori r4,r4,(MAS6_ISIZE(BOOK3E_PAGESZ_4K))@l |
2a4aca11 | 299 | mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ |
c3071951 | 300 | BEGIN_MMU_FTR_SECTION |
2a4aca11 BH |
301 | tlbsx 0,r3 |
302 | mfspr r4,SPRN_MAS1 /* check valid */ | |
303 | andis. r3,r4,MAS1_VALID@h | |
304 | beq 1f | |
305 | rlwinm r4,r4,0,1,31 | |
306 | mtspr SPRN_MAS1,r4 | |
307 | tlbwe | |
c3071951 | 308 | MMU_FTR_SECTION_ELSE |
962cffbd | 309 | PPC_TLBILX_VA(0,R3) |
c3071951 | 310 | ALT_MMU_FTR_SECTION_END_IFCLR(MMU_FTR_USE_TLBILX) |
2a4aca11 BH |
311 | msync |
312 | isync | |
313 | 1: wrtee r10 | |
314 | blr | |
25d21ad6 BH |
315 | #elif defined(CONFIG_PPC_BOOK3E) |
316 | /* | |
317 | * New Book3E (>= 2.06) implementation | |
318 | * | |
319 | * Note: We may be able to get away without the interrupt masking stuff | |
320 | * if we save/restore MAS6 on exceptions that might modify it | |
321 | */ | |
322 | _GLOBAL(_tlbil_pid) | |
323 | slwi r4,r3,MAS6_SPID_SHIFT | |
324 | mfmsr r10 | |
325 | wrteei 0 | |
326 | mtspr SPRN_MAS6,r4 | |
962cffbd | 327 | PPC_TLBILX_PID(0,R0) |
25d21ad6 BH |
328 | wrtee r10 |
329 | msync | |
330 | isync | |
331 | blr | |
332 | ||
333 | _GLOBAL(_tlbil_pid_noind) | |
334 | slwi r4,r3,MAS6_SPID_SHIFT | |
335 | mfmsr r10 | |
336 | ori r4,r4,MAS6_SIND | |
337 | wrteei 0 | |
338 | mtspr SPRN_MAS6,r4 | |
962cffbd | 339 | PPC_TLBILX_PID(0,R0) |
25d21ad6 BH |
340 | wrtee r10 |
341 | msync | |
342 | isync | |
343 | blr | |
344 | ||
345 | _GLOBAL(_tlbil_all) | |
962cffbd | 346 | PPC_TLBILX_ALL(0,R0) |
25d21ad6 BH |
347 | msync |
348 | isync | |
349 | blr | |
350 | ||
351 | _GLOBAL(_tlbil_va) | |
352 | mfmsr r10 | |
353 | wrteei 0 | |
354 | cmpwi cr0,r6,0 | |
355 | slwi r4,r4,MAS6_SPID_SHIFT | |
356 | rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK | |
357 | beq 1f | |
358 | rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND | |
359 | 1: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ | |
962cffbd | 360 | PPC_TLBILX_VA(0,R3) |
25d21ad6 BH |
361 | msync |
362 | isync | |
363 | wrtee r10 | |
364 | blr | |
365 | ||
366 | _GLOBAL(_tlbivax_bcast) | |
367 | mfmsr r10 | |
368 | wrteei 0 | |
369 | cmpwi cr0,r6,0 | |
370 | slwi r4,r4,MAS6_SPID_SHIFT | |
371 | rlwimi r4,r5,MAS6_ISIZE_SHIFT,MAS6_ISIZE_MASK | |
372 | beq 1f | |
373 | rlwimi r4,r6,MAS6_SIND_SHIFT,MAS6_SIND | |
374 | 1: mtspr SPRN_MAS6,r4 /* assume AS=0 for now */ | |
962cffbd | 375 | PPC_TLBIVAX(0,R3) |
25d21ad6 BH |
376 | eieio |
377 | tlbsync | |
378 | sync | |
379 | wrtee r10 | |
380 | blr | |
381 | ||
382 | _GLOBAL(set_context) | |
383 | #ifdef CONFIG_BDI_SWITCH | |
384 | /* Context switch the PTE pointer for the Abatron BDI2000. | |
385 | * The PGDIR is the second parameter. | |
386 | */ | |
387 | lis r5, abatron_pteptrs@h | |
388 | ori r5, r5, abatron_pteptrs@l | |
389 | stw r4, 0x4(r5) | |
390 | #endif | |
391 | mtspr SPRN_PID,r3 | |
392 | isync /* Force context change */ | |
393 | blr | |
b62c31ae | 394 | #else |
2a4aca11 BH |
395 | #error Unsupported processor type ! |
396 | #endif | |
78f62237 | 397 | |
55fd766b | 398 | #if defined(CONFIG_PPC_FSL_BOOK3E) |
78f62237 KG |
399 | /* |
400 | * extern void loadcam_entry(unsigned int index) | |
401 | * | |
402 | * Load TLBCAM[index] entry in to the L2 CAM MMU | |
403 | */ | |
404 | _GLOBAL(loadcam_entry) | |
405 | LOAD_REG_ADDR(r4, TLBCAM) | |
406 | mulli r5,r3,TLBCAM_SIZE | |
407 | add r3,r5,r4 | |
408 | lwz r4,TLBCAM_MAS0(r3) | |
409 | mtspr SPRN_MAS0,r4 | |
410 | lwz r4,TLBCAM_MAS1(r3) | |
411 | mtspr SPRN_MAS1,r4 | |
412 | PPC_LL r4,TLBCAM_MAS2(r3) | |
413 | mtspr SPRN_MAS2,r4 | |
414 | lwz r4,TLBCAM_MAS3(r3) | |
415 | mtspr SPRN_MAS3,r4 | |
416 | BEGIN_MMU_FTR_SECTION | |
417 | lwz r4,TLBCAM_MAS7(r3) | |
418 | mtspr SPRN_MAS7,r4 | |
419 | END_MMU_FTR_SECTION_IFSET(MMU_FTR_BIG_PHYS) | |
420 | isync | |
421 | tlbwe | |
422 | isync | |
423 | blr | |
424 | #endif |