import PULS_20160108
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / drivers / misc / mediatek / kernel / mt_cache_v8.S
1 #include <linux/linkage.h>
2 #include <linux/init.h>
3 #include <asm/assembler.h>
4
5 .text
6 .equ SCTLR_C_BIT, 0x00000004
7 .equ SCTLR_I_BIT, 0x00001000
8 .equ DCISW, 0x0
9 .equ DCCISW, 0x1
10 .equ DCCSW, 0x2
11 .equ LOC_SHIFT, 24
12 .equ CLIDR_FIELD_WIDTH, 3
13 .equ LEVEL_SHIFT, 1
14
15 ENTRY(__enable_icache)
16 mrs x0, SCTLR_EL1
17 orr x0, x0, #SCTLR_I_BIT
18 msr SCTLR_EL1, x0
19 ret
20 ENDPROC(__enable_icache)
21
22 ENTRY(__disable_icache)
23 mrs x0, SCTLR_EL1
24 bic x0, x0, #SCTLR_I_BIT
25 msr SCTLR_EL1, x0
26 ret
27 ENDPROC(__disable_icache)
28
29 /* might pollute x0 */
30 .macro __dis_D
31 mrs x0, SCTLR_EL1
32 bic x0, x0, #SCTLR_C_BIT
33 dsb sy
34 msr SCTLR_EL1, x0
35 dsb sy
36 isb sy
37 .endm
38
39 ENTRY(__enable_dcache)
40 mrs x0, SCTLR_EL1
41 orr x0, x0, #SCTLR_C_BIT
42 dsb sy
43 msr SCTLR_EL1, x0
44 dsb sy
45 isb sy
46 ret
47 ENDPROC(__enable_dcache)
48
49 ENTRY(__disable_dcache)
50 mrs x0, SCTLR_EL1
51 bic x0, x0, #SCTLR_C_BIT
52 dsb sy
53 msr SCTLR_EL1, x0
54 dsb sy
55 isb sy
56 ret
57 ENDPROC(__disable_dcache)
58
59 ENTRY(__enable_cache)
60 mrs x0, SCTLR_EL1
61 orr x0, x0, #SCTLR_I_BIT
62 orr x0, x0, #SCTLR_C_BIT
63 dsb sy
64 msr SCTLR_EL1, x0
65 dsb sy
66 isb sy
67 ret
68 ENDPROC(__enable_cache)
69
70 ENTRY(__disable_cache)
71 mrs x0, SCTLR_EL1
72 bic x0, x0, #SCTLR_I_BIT
73 bic x0, x0, #SCTLR_C_BIT
74 dsb sy
75 msr SCTLR_EL1, x0
76 dsb sy
77 isb sy
78 ret
79 ENDPROC(__disable_cache)
80
81 /* ---------------------------------------------------------------
82 * Data cache operations by set/way to the level specified
83 *
84 * The main function, do_dcsw_op requires:
85 * x0: The operation type (0-2), as defined in arch.h
86 * x1: The first cache level to operate on
87 * x3: The last cache level to operate on
88 * x9: clidr_el1
89 * and will carry out the operation on each data cache from level 0
90 * to the level in x3 in sequence
91 *
92 * The dcsw_op macro sets up the x3 and x9 parameters based on
93 * clidr_el1 cache information before invoking the main function
94 * ---------------------------------------------------------------
95 */
96 ENTRY(do_dcsw_op)
97 lsl x3, x3, #1
98 cbz x3, exit
99 sub x1, x1, #1
100 lsl x1, x1, #1
101 mov x10, x1
102 adr x14, dcsw_loop_table // compute inner loop address
103 add x14, x14, x0, lsl #5 // inner loop is 8x32-bit instructions
104 mov x0, x9
105 mov w8, #1
106 loop:
107 add x2, x10, x10, lsr #1 // work out 3x current cache level
108 lsr x1, x0, x2 // extract cache type bits from clidr
109 and x1, x1, #7 // mask the bits for current cache only
110 cmp x1, #2 // see what cache we have at this level
111 b.lt level_done // nothing to do if no cache or icache
112
113 msr csselr_el1, x10 // select current cache level in csselr
114 isb // isb to sych the new cssr&csidr
115 mrs x1, ccsidr_el1 // read the new ccsidr
116 and x2, x1, #7 // extract the length of the cache lines
117 add x2, x2, #4 // add 4 (line length offset)
118 ubfx x4, x1, #3, #10 // maximum way number
119 clz w5, w4 // bit position of way size increment
120 lsl w9, w4, w5 // w9 = aligned max way number
121 lsl w16, w8, w5 // w16 = way number loop decrement
122 orr w9, w10, w9 // w9 = combine way and cache number
123 ubfx w6, w1, #13, #15 // w6 = max set number
124 lsl w17, w8, w2 // w17 = set number loop decrement
125 dsb sy // barrier before we start this level
126 br x14 // jump to DC operation specific loop
127
128 .macro dcsw_loop _op
129 loop2_\_op:
130 lsl w7, w6, w2 // w7 = aligned max set number
131
132 loop3_\_op:
133 orr w11, w9, w7 // combine cache, way and set number
134 dc \_op, x11
135 subs w7, w7, w17 // decrement set number
136 b.ge loop3_\_op
137
138 subs x9, x9, x16 // decrement way number
139 b.ge loop2_\_op
140
141 b level_done
142 .endm
143
144 level_done:
145 add x10, x10, #2 // increment cache number
146 cmp x3, x10
147 b.gt loop
148 msr csselr_el1, xzr // select cache level 0 in csselr
149 dsb sy // barrier to complete final cache operation
150 isb
151 exit:
152 ret
153 ENDPROC(do_dcsw_op)
154
155 dcsw_loop_table:
156 dcsw_loop isw
157 dcsw_loop cisw
158 dcsw_loop csw
159
160 .macro __inner_dcache_all mode
161 mov x0, \mode
162 mov x1, #1
163 mrs x9, clidr_el1
164 ubfx x3, x9, #24, #0x7 /* LOC as last cache level */
165 b do_dcsw_op
166 .endm
167
168 .macro __inner_dcache_L1 mode
169 mov x0, \mode
170 mov x1, #1
171 mov x3, #1
172 mrs x9, clidr_el1
173 b do_dcsw_op
174 .endm
175
176 .macro __inner_dcache_L2 mode
177 mov x0, \mode
178 mov x1, #2
179 mov x3, #2
180 mrs x9, clidr_el1
181 b do_dcsw_op
182 .endm
183
184 .macro __inner_dcache_L1_L2 mode
185 mov x0, \mode
186 mov x1, #1
187 mov x3, #2
188 mrs x9, clidr_el1
189 b do_dcsw_op
190 .endm
191
192 ENTRY(__inner_flush_dcache_all)
193 __inner_dcache_all #DCCISW
194 ENDPROC(__inner_flush_dcache_all)
195
196 ENTRY(__inner_flush_dcache_L1)
197 __inner_dcache_L1 #DCCISW
198 ENDPROC(__inner_flush_dcache_L1)
199
200 ENTRY(__inner_flush_dcache_L2)
201 __inner_dcache_L2 #DCCISW
202 ENDPROC(__inner_flush_dcache_L2)
203
204 ENTRY(__inner_clean_dcache_all)
205 __inner_dcache_all #DCCSW
206 ENDPROC(__inner_clean_dcache_all)
207
208 ENTRY(__inner_clean_dcache_L1)
209 __inner_dcache_L1 #DCCSW
210 ENDPROC(__inner_clean_dcache_L1)
211
212 ENTRY(__inner_clean_dcache_L2)
213 __inner_dcache_L2 #DCCSW
214 ENDPROC(__inner_clean_dcache_L2)
215
216 ENTRY(__inner_inv_dcache_all)
217 __inner_dcache_all #DCISW
218 ENDPROC(__inner_inv_dcache_all)
219
220 ENTRY(__inner_inv_dcache_L1)
221 __inner_dcache_L1 #DCISW
222 ENDPROC(__inner_clean_dcache_L1)
223
224 ENTRY(__inner_inv_dcache_L2)
225 __inner_dcache_L2 #DCISW
226 ENDPROC(__inner_clean_dcache_L2)
227
228 ENTRY(__disable_dcache__inner_flush_dcache_L1)
229 __dis_D
230 __inner_dcache_L1 #DCCISW
231 ENDPROC(__disable_dcache__inner_flush_dcache_L1)
232
233 ENTRY(__disable_dcache__inner_flush_dcache_L1__inner_flush_dcache_L2)
234 __dis_D
235 __inner_dcache_L1_L2 #DCCISW
236 ENDPROC(__disable_dcache__inner_flush_dcache_L1__inner_flush_dcache_L2)
237
238 ENTRY(__disable_dcache__inner_clean_dcache_L1__inner_clean_dcache_L2)
239 __dis_D
240 __inner_dcache_L1_L2 #DCCSW
241 ENDPROC(__disable_dcache__inner_clean_dcache_L1__inner_clean_dcache_L2)
242
243 ENTRY(__disable_dcache__inner_flush_dcache_L1__inner_clean_dcache_L2)
244 __dis_D
245 /* since we need to do different operations for L1/L2,
246 and our current implementation would jump from do_dcsw_op to caller(who invokes the last bl) directly,
247 we need to construct stack frame by ourself here.
248 We use two caller-saved registers, x12 & x13, to save lr & sp,
249 to prevent any memory access during cache operation
250 NOTICE: any macro or function MUST not corrupt x12 & x13 here
251 */
252 mov x12, x29
253 mov x13, x30
254 mov x29, sp
255 bl __inner_flush_dcache_L1
256 mov x29, x12
257 mov x30, x13
258 __inner_dcache_L2 #DCCSW
259 ENDPROC(__disable_dcache__inner_flush_dcache_L1__inner_clean_dcache_L2)