Merge branch 'bind_unbind' of git://git.kernel.org/pub/scm/linux/kernel/git/gregkh...
[GitHub/LineageOS/android_kernel_motorola_exynos9610.git] / drivers / gpu / drm / radeon / evergreen.c
1 /*
2 * Copyright 2010 Advanced Micro Devices, Inc.
3 *
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
10 *
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
13 *
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
21 *
22 * Authors: Alex Deucher
23 */
24 #include <linux/firmware.h>
25 #include <linux/slab.h>
26 #include <drm/drmP.h>
27 #include "radeon.h"
28 #include "radeon_asic.h"
29 #include "radeon_audio.h"
30 #include <drm/radeon_drm.h>
31 #include "evergreend.h"
32 #include "atom.h"
33 #include "avivod.h"
34 #include "evergreen_reg.h"
35 #include "evergreen_blit_shaders.h"
36 #include "radeon_ucode.h"
37
38 /*
39 * Indirect registers accessor
40 */
41 u32 eg_cg_rreg(struct radeon_device *rdev, u32 reg)
42 {
43 unsigned long flags;
44 u32 r;
45
46 spin_lock_irqsave(&rdev->cg_idx_lock, flags);
47 WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
48 r = RREG32(EVERGREEN_CG_IND_DATA);
49 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
50 return r;
51 }
52
53 void eg_cg_wreg(struct radeon_device *rdev, u32 reg, u32 v)
54 {
55 unsigned long flags;
56
57 spin_lock_irqsave(&rdev->cg_idx_lock, flags);
58 WREG32(EVERGREEN_CG_IND_ADDR, ((reg) & 0xffff));
59 WREG32(EVERGREEN_CG_IND_DATA, (v));
60 spin_unlock_irqrestore(&rdev->cg_idx_lock, flags);
61 }
62
63 u32 eg_pif_phy0_rreg(struct radeon_device *rdev, u32 reg)
64 {
65 unsigned long flags;
66 u32 r;
67
68 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
69 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
70 r = RREG32(EVERGREEN_PIF_PHY0_DATA);
71 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
72 return r;
73 }
74
75 void eg_pif_phy0_wreg(struct radeon_device *rdev, u32 reg, u32 v)
76 {
77 unsigned long flags;
78
79 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
80 WREG32(EVERGREEN_PIF_PHY0_INDEX, ((reg) & 0xffff));
81 WREG32(EVERGREEN_PIF_PHY0_DATA, (v));
82 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
83 }
84
85 u32 eg_pif_phy1_rreg(struct radeon_device *rdev, u32 reg)
86 {
87 unsigned long flags;
88 u32 r;
89
90 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
91 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
92 r = RREG32(EVERGREEN_PIF_PHY1_DATA);
93 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
94 return r;
95 }
96
97 void eg_pif_phy1_wreg(struct radeon_device *rdev, u32 reg, u32 v)
98 {
99 unsigned long flags;
100
101 spin_lock_irqsave(&rdev->pif_idx_lock, flags);
102 WREG32(EVERGREEN_PIF_PHY1_INDEX, ((reg) & 0xffff));
103 WREG32(EVERGREEN_PIF_PHY1_DATA, (v));
104 spin_unlock_irqrestore(&rdev->pif_idx_lock, flags);
105 }
106
107 static const u32 crtc_offsets[6] =
108 {
109 EVERGREEN_CRTC0_REGISTER_OFFSET,
110 EVERGREEN_CRTC1_REGISTER_OFFSET,
111 EVERGREEN_CRTC2_REGISTER_OFFSET,
112 EVERGREEN_CRTC3_REGISTER_OFFSET,
113 EVERGREEN_CRTC4_REGISTER_OFFSET,
114 EVERGREEN_CRTC5_REGISTER_OFFSET
115 };
116
117 #include "clearstate_evergreen.h"
118
119 static const u32 sumo_rlc_save_restore_register_list[] =
120 {
121 0x98fc,
122 0x9830,
123 0x9834,
124 0x9838,
125 0x9870,
126 0x9874,
127 0x8a14,
128 0x8b24,
129 0x8bcc,
130 0x8b10,
131 0x8d00,
132 0x8d04,
133 0x8c00,
134 0x8c04,
135 0x8c08,
136 0x8c0c,
137 0x8d8c,
138 0x8c20,
139 0x8c24,
140 0x8c28,
141 0x8c18,
142 0x8c1c,
143 0x8cf0,
144 0x8e2c,
145 0x8e38,
146 0x8c30,
147 0x9508,
148 0x9688,
149 0x9608,
150 0x960c,
151 0x9610,
152 0x9614,
153 0x88c4,
154 0x88d4,
155 0xa008,
156 0x900c,
157 0x9100,
158 0x913c,
159 0x98f8,
160 0x98f4,
161 0x9b7c,
162 0x3f8c,
163 0x8950,
164 0x8954,
165 0x8a18,
166 0x8b28,
167 0x9144,
168 0x9148,
169 0x914c,
170 0x3f90,
171 0x3f94,
172 0x915c,
173 0x9160,
174 0x9178,
175 0x917c,
176 0x9180,
177 0x918c,
178 0x9190,
179 0x9194,
180 0x9198,
181 0x919c,
182 0x91a8,
183 0x91ac,
184 0x91b0,
185 0x91b4,
186 0x91b8,
187 0x91c4,
188 0x91c8,
189 0x91cc,
190 0x91d0,
191 0x91d4,
192 0x91e0,
193 0x91e4,
194 0x91ec,
195 0x91f0,
196 0x91f4,
197 0x9200,
198 0x9204,
199 0x929c,
200 0x9150,
201 0x802c,
202 };
203
204 static void evergreen_gpu_init(struct radeon_device *rdev);
205 void evergreen_fini(struct radeon_device *rdev);
206 void evergreen_pcie_gen2_enable(struct radeon_device *rdev);
207 void evergreen_program_aspm(struct radeon_device *rdev);
208 extern void cayman_cp_int_cntl_setup(struct radeon_device *rdev,
209 int ring, u32 cp_int_cntl);
210 extern void cayman_vm_decode_fault(struct radeon_device *rdev,
211 u32 status, u32 addr);
212 void cik_init_cp_pg_table(struct radeon_device *rdev);
213
214 extern u32 si_get_csb_size(struct radeon_device *rdev);
215 extern void si_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
216 extern u32 cik_get_csb_size(struct radeon_device *rdev);
217 extern void cik_get_csb_buffer(struct radeon_device *rdev, volatile u32 *buffer);
218 extern void rv770_set_clk_bypass_mode(struct radeon_device *rdev);
219
220 static const u32 evergreen_golden_registers[] =
221 {
222 0x3f90, 0xffff0000, 0xff000000,
223 0x9148, 0xffff0000, 0xff000000,
224 0x3f94, 0xffff0000, 0xff000000,
225 0x914c, 0xffff0000, 0xff000000,
226 0x9b7c, 0xffffffff, 0x00000000,
227 0x8a14, 0xffffffff, 0x00000007,
228 0x8b10, 0xffffffff, 0x00000000,
229 0x960c, 0xffffffff, 0x54763210,
230 0x88c4, 0xffffffff, 0x000000c2,
231 0x88d4, 0xffffffff, 0x00000010,
232 0x8974, 0xffffffff, 0x00000000,
233 0xc78, 0x00000080, 0x00000080,
234 0x5eb4, 0xffffffff, 0x00000002,
235 0x5e78, 0xffffffff, 0x001000f0,
236 0x6104, 0x01000300, 0x00000000,
237 0x5bc0, 0x00300000, 0x00000000,
238 0x7030, 0xffffffff, 0x00000011,
239 0x7c30, 0xffffffff, 0x00000011,
240 0x10830, 0xffffffff, 0x00000011,
241 0x11430, 0xffffffff, 0x00000011,
242 0x12030, 0xffffffff, 0x00000011,
243 0x12c30, 0xffffffff, 0x00000011,
244 0xd02c, 0xffffffff, 0x08421000,
245 0x240c, 0xffffffff, 0x00000380,
246 0x8b24, 0xffffffff, 0x00ff0fff,
247 0x28a4c, 0x06000000, 0x06000000,
248 0x10c, 0x00000001, 0x00000001,
249 0x8d00, 0xffffffff, 0x100e4848,
250 0x8d04, 0xffffffff, 0x00164745,
251 0x8c00, 0xffffffff, 0xe4000003,
252 0x8c04, 0xffffffff, 0x40600060,
253 0x8c08, 0xffffffff, 0x001c001c,
254 0x8cf0, 0xffffffff, 0x08e00620,
255 0x8c20, 0xffffffff, 0x00800080,
256 0x8c24, 0xffffffff, 0x00800080,
257 0x8c18, 0xffffffff, 0x20202078,
258 0x8c1c, 0xffffffff, 0x00001010,
259 0x28350, 0xffffffff, 0x00000000,
260 0xa008, 0xffffffff, 0x00010000,
261 0x5c4, 0xffffffff, 0x00000001,
262 0x9508, 0xffffffff, 0x00000002,
263 0x913c, 0x0000000f, 0x0000000a
264 };
265
266 static const u32 evergreen_golden_registers2[] =
267 {
268 0x2f4c, 0xffffffff, 0x00000000,
269 0x54f4, 0xffffffff, 0x00000000,
270 0x54f0, 0xffffffff, 0x00000000,
271 0x5498, 0xffffffff, 0x00000000,
272 0x549c, 0xffffffff, 0x00000000,
273 0x5494, 0xffffffff, 0x00000000,
274 0x53cc, 0xffffffff, 0x00000000,
275 0x53c8, 0xffffffff, 0x00000000,
276 0x53c4, 0xffffffff, 0x00000000,
277 0x53c0, 0xffffffff, 0x00000000,
278 0x53bc, 0xffffffff, 0x00000000,
279 0x53b8, 0xffffffff, 0x00000000,
280 0x53b4, 0xffffffff, 0x00000000,
281 0x53b0, 0xffffffff, 0x00000000
282 };
283
284 static const u32 cypress_mgcg_init[] =
285 {
286 0x802c, 0xffffffff, 0xc0000000,
287 0x5448, 0xffffffff, 0x00000100,
288 0x55e4, 0xffffffff, 0x00000100,
289 0x160c, 0xffffffff, 0x00000100,
290 0x5644, 0xffffffff, 0x00000100,
291 0xc164, 0xffffffff, 0x00000100,
292 0x8a18, 0xffffffff, 0x00000100,
293 0x897c, 0xffffffff, 0x06000100,
294 0x8b28, 0xffffffff, 0x00000100,
295 0x9144, 0xffffffff, 0x00000100,
296 0x9a60, 0xffffffff, 0x00000100,
297 0x9868, 0xffffffff, 0x00000100,
298 0x8d58, 0xffffffff, 0x00000100,
299 0x9510, 0xffffffff, 0x00000100,
300 0x949c, 0xffffffff, 0x00000100,
301 0x9654, 0xffffffff, 0x00000100,
302 0x9030, 0xffffffff, 0x00000100,
303 0x9034, 0xffffffff, 0x00000100,
304 0x9038, 0xffffffff, 0x00000100,
305 0x903c, 0xffffffff, 0x00000100,
306 0x9040, 0xffffffff, 0x00000100,
307 0xa200, 0xffffffff, 0x00000100,
308 0xa204, 0xffffffff, 0x00000100,
309 0xa208, 0xffffffff, 0x00000100,
310 0xa20c, 0xffffffff, 0x00000100,
311 0x971c, 0xffffffff, 0x00000100,
312 0x977c, 0xffffffff, 0x00000100,
313 0x3f80, 0xffffffff, 0x00000100,
314 0xa210, 0xffffffff, 0x00000100,
315 0xa214, 0xffffffff, 0x00000100,
316 0x4d8, 0xffffffff, 0x00000100,
317 0x9784, 0xffffffff, 0x00000100,
318 0x9698, 0xffffffff, 0x00000100,
319 0x4d4, 0xffffffff, 0x00000200,
320 0x30cc, 0xffffffff, 0x00000100,
321 0xd0c0, 0xffffffff, 0xff000100,
322 0x802c, 0xffffffff, 0x40000000,
323 0x915c, 0xffffffff, 0x00010000,
324 0x9160, 0xffffffff, 0x00030002,
325 0x9178, 0xffffffff, 0x00070000,
326 0x917c, 0xffffffff, 0x00030002,
327 0x9180, 0xffffffff, 0x00050004,
328 0x918c, 0xffffffff, 0x00010006,
329 0x9190, 0xffffffff, 0x00090008,
330 0x9194, 0xffffffff, 0x00070000,
331 0x9198, 0xffffffff, 0x00030002,
332 0x919c, 0xffffffff, 0x00050004,
333 0x91a8, 0xffffffff, 0x00010006,
334 0x91ac, 0xffffffff, 0x00090008,
335 0x91b0, 0xffffffff, 0x00070000,
336 0x91b4, 0xffffffff, 0x00030002,
337 0x91b8, 0xffffffff, 0x00050004,
338 0x91c4, 0xffffffff, 0x00010006,
339 0x91c8, 0xffffffff, 0x00090008,
340 0x91cc, 0xffffffff, 0x00070000,
341 0x91d0, 0xffffffff, 0x00030002,
342 0x91d4, 0xffffffff, 0x00050004,
343 0x91e0, 0xffffffff, 0x00010006,
344 0x91e4, 0xffffffff, 0x00090008,
345 0x91e8, 0xffffffff, 0x00000000,
346 0x91ec, 0xffffffff, 0x00070000,
347 0x91f0, 0xffffffff, 0x00030002,
348 0x91f4, 0xffffffff, 0x00050004,
349 0x9200, 0xffffffff, 0x00010006,
350 0x9204, 0xffffffff, 0x00090008,
351 0x9208, 0xffffffff, 0x00070000,
352 0x920c, 0xffffffff, 0x00030002,
353 0x9210, 0xffffffff, 0x00050004,
354 0x921c, 0xffffffff, 0x00010006,
355 0x9220, 0xffffffff, 0x00090008,
356 0x9224, 0xffffffff, 0x00070000,
357 0x9228, 0xffffffff, 0x00030002,
358 0x922c, 0xffffffff, 0x00050004,
359 0x9238, 0xffffffff, 0x00010006,
360 0x923c, 0xffffffff, 0x00090008,
361 0x9240, 0xffffffff, 0x00070000,
362 0x9244, 0xffffffff, 0x00030002,
363 0x9248, 0xffffffff, 0x00050004,
364 0x9254, 0xffffffff, 0x00010006,
365 0x9258, 0xffffffff, 0x00090008,
366 0x925c, 0xffffffff, 0x00070000,
367 0x9260, 0xffffffff, 0x00030002,
368 0x9264, 0xffffffff, 0x00050004,
369 0x9270, 0xffffffff, 0x00010006,
370 0x9274, 0xffffffff, 0x00090008,
371 0x9278, 0xffffffff, 0x00070000,
372 0x927c, 0xffffffff, 0x00030002,
373 0x9280, 0xffffffff, 0x00050004,
374 0x928c, 0xffffffff, 0x00010006,
375 0x9290, 0xffffffff, 0x00090008,
376 0x9294, 0xffffffff, 0x00000000,
377 0x929c, 0xffffffff, 0x00000001,
378 0x802c, 0xffffffff, 0x40010000,
379 0x915c, 0xffffffff, 0x00010000,
380 0x9160, 0xffffffff, 0x00030002,
381 0x9178, 0xffffffff, 0x00070000,
382 0x917c, 0xffffffff, 0x00030002,
383 0x9180, 0xffffffff, 0x00050004,
384 0x918c, 0xffffffff, 0x00010006,
385 0x9190, 0xffffffff, 0x00090008,
386 0x9194, 0xffffffff, 0x00070000,
387 0x9198, 0xffffffff, 0x00030002,
388 0x919c, 0xffffffff, 0x00050004,
389 0x91a8, 0xffffffff, 0x00010006,
390 0x91ac, 0xffffffff, 0x00090008,
391 0x91b0, 0xffffffff, 0x00070000,
392 0x91b4, 0xffffffff, 0x00030002,
393 0x91b8, 0xffffffff, 0x00050004,
394 0x91c4, 0xffffffff, 0x00010006,
395 0x91c8, 0xffffffff, 0x00090008,
396 0x91cc, 0xffffffff, 0x00070000,
397 0x91d0, 0xffffffff, 0x00030002,
398 0x91d4, 0xffffffff, 0x00050004,
399 0x91e0, 0xffffffff, 0x00010006,
400 0x91e4, 0xffffffff, 0x00090008,
401 0x91e8, 0xffffffff, 0x00000000,
402 0x91ec, 0xffffffff, 0x00070000,
403 0x91f0, 0xffffffff, 0x00030002,
404 0x91f4, 0xffffffff, 0x00050004,
405 0x9200, 0xffffffff, 0x00010006,
406 0x9204, 0xffffffff, 0x00090008,
407 0x9208, 0xffffffff, 0x00070000,
408 0x920c, 0xffffffff, 0x00030002,
409 0x9210, 0xffffffff, 0x00050004,
410 0x921c, 0xffffffff, 0x00010006,
411 0x9220, 0xffffffff, 0x00090008,
412 0x9224, 0xffffffff, 0x00070000,
413 0x9228, 0xffffffff, 0x00030002,
414 0x922c, 0xffffffff, 0x00050004,
415 0x9238, 0xffffffff, 0x00010006,
416 0x923c, 0xffffffff, 0x00090008,
417 0x9240, 0xffffffff, 0x00070000,
418 0x9244, 0xffffffff, 0x00030002,
419 0x9248, 0xffffffff, 0x00050004,
420 0x9254, 0xffffffff, 0x00010006,
421 0x9258, 0xffffffff, 0x00090008,
422 0x925c, 0xffffffff, 0x00070000,
423 0x9260, 0xffffffff, 0x00030002,
424 0x9264, 0xffffffff, 0x00050004,
425 0x9270, 0xffffffff, 0x00010006,
426 0x9274, 0xffffffff, 0x00090008,
427 0x9278, 0xffffffff, 0x00070000,
428 0x927c, 0xffffffff, 0x00030002,
429 0x9280, 0xffffffff, 0x00050004,
430 0x928c, 0xffffffff, 0x00010006,
431 0x9290, 0xffffffff, 0x00090008,
432 0x9294, 0xffffffff, 0x00000000,
433 0x929c, 0xffffffff, 0x00000001,
434 0x802c, 0xffffffff, 0xc0000000
435 };
436
437 static const u32 redwood_mgcg_init[] =
438 {
439 0x802c, 0xffffffff, 0xc0000000,
440 0x5448, 0xffffffff, 0x00000100,
441 0x55e4, 0xffffffff, 0x00000100,
442 0x160c, 0xffffffff, 0x00000100,
443 0x5644, 0xffffffff, 0x00000100,
444 0xc164, 0xffffffff, 0x00000100,
445 0x8a18, 0xffffffff, 0x00000100,
446 0x897c, 0xffffffff, 0x06000100,
447 0x8b28, 0xffffffff, 0x00000100,
448 0x9144, 0xffffffff, 0x00000100,
449 0x9a60, 0xffffffff, 0x00000100,
450 0x9868, 0xffffffff, 0x00000100,
451 0x8d58, 0xffffffff, 0x00000100,
452 0x9510, 0xffffffff, 0x00000100,
453 0x949c, 0xffffffff, 0x00000100,
454 0x9654, 0xffffffff, 0x00000100,
455 0x9030, 0xffffffff, 0x00000100,
456 0x9034, 0xffffffff, 0x00000100,
457 0x9038, 0xffffffff, 0x00000100,
458 0x903c, 0xffffffff, 0x00000100,
459 0x9040, 0xffffffff, 0x00000100,
460 0xa200, 0xffffffff, 0x00000100,
461 0xa204, 0xffffffff, 0x00000100,
462 0xa208, 0xffffffff, 0x00000100,
463 0xa20c, 0xffffffff, 0x00000100,
464 0x971c, 0xffffffff, 0x00000100,
465 0x977c, 0xffffffff, 0x00000100,
466 0x3f80, 0xffffffff, 0x00000100,
467 0xa210, 0xffffffff, 0x00000100,
468 0xa214, 0xffffffff, 0x00000100,
469 0x4d8, 0xffffffff, 0x00000100,
470 0x9784, 0xffffffff, 0x00000100,
471 0x9698, 0xffffffff, 0x00000100,
472 0x4d4, 0xffffffff, 0x00000200,
473 0x30cc, 0xffffffff, 0x00000100,
474 0xd0c0, 0xffffffff, 0xff000100,
475 0x802c, 0xffffffff, 0x40000000,
476 0x915c, 0xffffffff, 0x00010000,
477 0x9160, 0xffffffff, 0x00030002,
478 0x9178, 0xffffffff, 0x00070000,
479 0x917c, 0xffffffff, 0x00030002,
480 0x9180, 0xffffffff, 0x00050004,
481 0x918c, 0xffffffff, 0x00010006,
482 0x9190, 0xffffffff, 0x00090008,
483 0x9194, 0xffffffff, 0x00070000,
484 0x9198, 0xffffffff, 0x00030002,
485 0x919c, 0xffffffff, 0x00050004,
486 0x91a8, 0xffffffff, 0x00010006,
487 0x91ac, 0xffffffff, 0x00090008,
488 0x91b0, 0xffffffff, 0x00070000,
489 0x91b4, 0xffffffff, 0x00030002,
490 0x91b8, 0xffffffff, 0x00050004,
491 0x91c4, 0xffffffff, 0x00010006,
492 0x91c8, 0xffffffff, 0x00090008,
493 0x91cc, 0xffffffff, 0x00070000,
494 0x91d0, 0xffffffff, 0x00030002,
495 0x91d4, 0xffffffff, 0x00050004,
496 0x91e0, 0xffffffff, 0x00010006,
497 0x91e4, 0xffffffff, 0x00090008,
498 0x91e8, 0xffffffff, 0x00000000,
499 0x91ec, 0xffffffff, 0x00070000,
500 0x91f0, 0xffffffff, 0x00030002,
501 0x91f4, 0xffffffff, 0x00050004,
502 0x9200, 0xffffffff, 0x00010006,
503 0x9204, 0xffffffff, 0x00090008,
504 0x9294, 0xffffffff, 0x00000000,
505 0x929c, 0xffffffff, 0x00000001,
506 0x802c, 0xffffffff, 0xc0000000
507 };
508
509 static const u32 cedar_golden_registers[] =
510 {
511 0x3f90, 0xffff0000, 0xff000000,
512 0x9148, 0xffff0000, 0xff000000,
513 0x3f94, 0xffff0000, 0xff000000,
514 0x914c, 0xffff0000, 0xff000000,
515 0x9b7c, 0xffffffff, 0x00000000,
516 0x8a14, 0xffffffff, 0x00000007,
517 0x8b10, 0xffffffff, 0x00000000,
518 0x960c, 0xffffffff, 0x54763210,
519 0x88c4, 0xffffffff, 0x000000c2,
520 0x88d4, 0xffffffff, 0x00000000,
521 0x8974, 0xffffffff, 0x00000000,
522 0xc78, 0x00000080, 0x00000080,
523 0x5eb4, 0xffffffff, 0x00000002,
524 0x5e78, 0xffffffff, 0x001000f0,
525 0x6104, 0x01000300, 0x00000000,
526 0x5bc0, 0x00300000, 0x00000000,
527 0x7030, 0xffffffff, 0x00000011,
528 0x7c30, 0xffffffff, 0x00000011,
529 0x10830, 0xffffffff, 0x00000011,
530 0x11430, 0xffffffff, 0x00000011,
531 0xd02c, 0xffffffff, 0x08421000,
532 0x240c, 0xffffffff, 0x00000380,
533 0x8b24, 0xffffffff, 0x00ff0fff,
534 0x28a4c, 0x06000000, 0x06000000,
535 0x10c, 0x00000001, 0x00000001,
536 0x8d00, 0xffffffff, 0x100e4848,
537 0x8d04, 0xffffffff, 0x00164745,
538 0x8c00, 0xffffffff, 0xe4000003,
539 0x8c04, 0xffffffff, 0x40600060,
540 0x8c08, 0xffffffff, 0x001c001c,
541 0x8cf0, 0xffffffff, 0x08e00410,
542 0x8c20, 0xffffffff, 0x00800080,
543 0x8c24, 0xffffffff, 0x00800080,
544 0x8c18, 0xffffffff, 0x20202078,
545 0x8c1c, 0xffffffff, 0x00001010,
546 0x28350, 0xffffffff, 0x00000000,
547 0xa008, 0xffffffff, 0x00010000,
548 0x5c4, 0xffffffff, 0x00000001,
549 0x9508, 0xffffffff, 0x00000002
550 };
551
552 static const u32 cedar_mgcg_init[] =
553 {
554 0x802c, 0xffffffff, 0xc0000000,
555 0x5448, 0xffffffff, 0x00000100,
556 0x55e4, 0xffffffff, 0x00000100,
557 0x160c, 0xffffffff, 0x00000100,
558 0x5644, 0xffffffff, 0x00000100,
559 0xc164, 0xffffffff, 0x00000100,
560 0x8a18, 0xffffffff, 0x00000100,
561 0x897c, 0xffffffff, 0x06000100,
562 0x8b28, 0xffffffff, 0x00000100,
563 0x9144, 0xffffffff, 0x00000100,
564 0x9a60, 0xffffffff, 0x00000100,
565 0x9868, 0xffffffff, 0x00000100,
566 0x8d58, 0xffffffff, 0x00000100,
567 0x9510, 0xffffffff, 0x00000100,
568 0x949c, 0xffffffff, 0x00000100,
569 0x9654, 0xffffffff, 0x00000100,
570 0x9030, 0xffffffff, 0x00000100,
571 0x9034, 0xffffffff, 0x00000100,
572 0x9038, 0xffffffff, 0x00000100,
573 0x903c, 0xffffffff, 0x00000100,
574 0x9040, 0xffffffff, 0x00000100,
575 0xa200, 0xffffffff, 0x00000100,
576 0xa204, 0xffffffff, 0x00000100,
577 0xa208, 0xffffffff, 0x00000100,
578 0xa20c, 0xffffffff, 0x00000100,
579 0x971c, 0xffffffff, 0x00000100,
580 0x977c, 0xffffffff, 0x00000100,
581 0x3f80, 0xffffffff, 0x00000100,
582 0xa210, 0xffffffff, 0x00000100,
583 0xa214, 0xffffffff, 0x00000100,
584 0x4d8, 0xffffffff, 0x00000100,
585 0x9784, 0xffffffff, 0x00000100,
586 0x9698, 0xffffffff, 0x00000100,
587 0x4d4, 0xffffffff, 0x00000200,
588 0x30cc, 0xffffffff, 0x00000100,
589 0xd0c0, 0xffffffff, 0xff000100,
590 0x802c, 0xffffffff, 0x40000000,
591 0x915c, 0xffffffff, 0x00010000,
592 0x9178, 0xffffffff, 0x00050000,
593 0x917c, 0xffffffff, 0x00030002,
594 0x918c, 0xffffffff, 0x00010004,
595 0x9190, 0xffffffff, 0x00070006,
596 0x9194, 0xffffffff, 0x00050000,
597 0x9198, 0xffffffff, 0x00030002,
598 0x91a8, 0xffffffff, 0x00010004,
599 0x91ac, 0xffffffff, 0x00070006,
600 0x91e8, 0xffffffff, 0x00000000,
601 0x9294, 0xffffffff, 0x00000000,
602 0x929c, 0xffffffff, 0x00000001,
603 0x802c, 0xffffffff, 0xc0000000
604 };
605
606 static const u32 juniper_mgcg_init[] =
607 {
608 0x802c, 0xffffffff, 0xc0000000,
609 0x5448, 0xffffffff, 0x00000100,
610 0x55e4, 0xffffffff, 0x00000100,
611 0x160c, 0xffffffff, 0x00000100,
612 0x5644, 0xffffffff, 0x00000100,
613 0xc164, 0xffffffff, 0x00000100,
614 0x8a18, 0xffffffff, 0x00000100,
615 0x897c, 0xffffffff, 0x06000100,
616 0x8b28, 0xffffffff, 0x00000100,
617 0x9144, 0xffffffff, 0x00000100,
618 0x9a60, 0xffffffff, 0x00000100,
619 0x9868, 0xffffffff, 0x00000100,
620 0x8d58, 0xffffffff, 0x00000100,
621 0x9510, 0xffffffff, 0x00000100,
622 0x949c, 0xffffffff, 0x00000100,
623 0x9654, 0xffffffff, 0x00000100,
624 0x9030, 0xffffffff, 0x00000100,
625 0x9034, 0xffffffff, 0x00000100,
626 0x9038, 0xffffffff, 0x00000100,
627 0x903c, 0xffffffff, 0x00000100,
628 0x9040, 0xffffffff, 0x00000100,
629 0xa200, 0xffffffff, 0x00000100,
630 0xa204, 0xffffffff, 0x00000100,
631 0xa208, 0xffffffff, 0x00000100,
632 0xa20c, 0xffffffff, 0x00000100,
633 0x971c, 0xffffffff, 0x00000100,
634 0xd0c0, 0xffffffff, 0xff000100,
635 0x802c, 0xffffffff, 0x40000000,
636 0x915c, 0xffffffff, 0x00010000,
637 0x9160, 0xffffffff, 0x00030002,
638 0x9178, 0xffffffff, 0x00070000,
639 0x917c, 0xffffffff, 0x00030002,
640 0x9180, 0xffffffff, 0x00050004,
641 0x918c, 0xffffffff, 0x00010006,
642 0x9190, 0xffffffff, 0x00090008,
643 0x9194, 0xffffffff, 0x00070000,
644 0x9198, 0xffffffff, 0x00030002,
645 0x919c, 0xffffffff, 0x00050004,
646 0x91a8, 0xffffffff, 0x00010006,
647 0x91ac, 0xffffffff, 0x00090008,
648 0x91b0, 0xffffffff, 0x00070000,
649 0x91b4, 0xffffffff, 0x00030002,
650 0x91b8, 0xffffffff, 0x00050004,
651 0x91c4, 0xffffffff, 0x00010006,
652 0x91c8, 0xffffffff, 0x00090008,
653 0x91cc, 0xffffffff, 0x00070000,
654 0x91d0, 0xffffffff, 0x00030002,
655 0x91d4, 0xffffffff, 0x00050004,
656 0x91e0, 0xffffffff, 0x00010006,
657 0x91e4, 0xffffffff, 0x00090008,
658 0x91e8, 0xffffffff, 0x00000000,
659 0x91ec, 0xffffffff, 0x00070000,
660 0x91f0, 0xffffffff, 0x00030002,
661 0x91f4, 0xffffffff, 0x00050004,
662 0x9200, 0xffffffff, 0x00010006,
663 0x9204, 0xffffffff, 0x00090008,
664 0x9208, 0xffffffff, 0x00070000,
665 0x920c, 0xffffffff, 0x00030002,
666 0x9210, 0xffffffff, 0x00050004,
667 0x921c, 0xffffffff, 0x00010006,
668 0x9220, 0xffffffff, 0x00090008,
669 0x9224, 0xffffffff, 0x00070000,
670 0x9228, 0xffffffff, 0x00030002,
671 0x922c, 0xffffffff, 0x00050004,
672 0x9238, 0xffffffff, 0x00010006,
673 0x923c, 0xffffffff, 0x00090008,
674 0x9240, 0xffffffff, 0x00070000,
675 0x9244, 0xffffffff, 0x00030002,
676 0x9248, 0xffffffff, 0x00050004,
677 0x9254, 0xffffffff, 0x00010006,
678 0x9258, 0xffffffff, 0x00090008,
679 0x925c, 0xffffffff, 0x00070000,
680 0x9260, 0xffffffff, 0x00030002,
681 0x9264, 0xffffffff, 0x00050004,
682 0x9270, 0xffffffff, 0x00010006,
683 0x9274, 0xffffffff, 0x00090008,
684 0x9278, 0xffffffff, 0x00070000,
685 0x927c, 0xffffffff, 0x00030002,
686 0x9280, 0xffffffff, 0x00050004,
687 0x928c, 0xffffffff, 0x00010006,
688 0x9290, 0xffffffff, 0x00090008,
689 0x9294, 0xffffffff, 0x00000000,
690 0x929c, 0xffffffff, 0x00000001,
691 0x802c, 0xffffffff, 0xc0000000,
692 0x977c, 0xffffffff, 0x00000100,
693 0x3f80, 0xffffffff, 0x00000100,
694 0xa210, 0xffffffff, 0x00000100,
695 0xa214, 0xffffffff, 0x00000100,
696 0x4d8, 0xffffffff, 0x00000100,
697 0x9784, 0xffffffff, 0x00000100,
698 0x9698, 0xffffffff, 0x00000100,
699 0x4d4, 0xffffffff, 0x00000200,
700 0x30cc, 0xffffffff, 0x00000100,
701 0x802c, 0xffffffff, 0xc0000000
702 };
703
704 static const u32 supersumo_golden_registers[] =
705 {
706 0x5eb4, 0xffffffff, 0x00000002,
707 0x5c4, 0xffffffff, 0x00000001,
708 0x7030, 0xffffffff, 0x00000011,
709 0x7c30, 0xffffffff, 0x00000011,
710 0x6104, 0x01000300, 0x00000000,
711 0x5bc0, 0x00300000, 0x00000000,
712 0x8c04, 0xffffffff, 0x40600060,
713 0x8c08, 0xffffffff, 0x001c001c,
714 0x8c20, 0xffffffff, 0x00800080,
715 0x8c24, 0xffffffff, 0x00800080,
716 0x8c18, 0xffffffff, 0x20202078,
717 0x8c1c, 0xffffffff, 0x00001010,
718 0x918c, 0xffffffff, 0x00010006,
719 0x91a8, 0xffffffff, 0x00010006,
720 0x91c4, 0xffffffff, 0x00010006,
721 0x91e0, 0xffffffff, 0x00010006,
722 0x9200, 0xffffffff, 0x00010006,
723 0x9150, 0xffffffff, 0x6e944040,
724 0x917c, 0xffffffff, 0x00030002,
725 0x9180, 0xffffffff, 0x00050004,
726 0x9198, 0xffffffff, 0x00030002,
727 0x919c, 0xffffffff, 0x00050004,
728 0x91b4, 0xffffffff, 0x00030002,
729 0x91b8, 0xffffffff, 0x00050004,
730 0x91d0, 0xffffffff, 0x00030002,
731 0x91d4, 0xffffffff, 0x00050004,
732 0x91f0, 0xffffffff, 0x00030002,
733 0x91f4, 0xffffffff, 0x00050004,
734 0x915c, 0xffffffff, 0x00010000,
735 0x9160, 0xffffffff, 0x00030002,
736 0x3f90, 0xffff0000, 0xff000000,
737 0x9178, 0xffffffff, 0x00070000,
738 0x9194, 0xffffffff, 0x00070000,
739 0x91b0, 0xffffffff, 0x00070000,
740 0x91cc, 0xffffffff, 0x00070000,
741 0x91ec, 0xffffffff, 0x00070000,
742 0x9148, 0xffff0000, 0xff000000,
743 0x9190, 0xffffffff, 0x00090008,
744 0x91ac, 0xffffffff, 0x00090008,
745 0x91c8, 0xffffffff, 0x00090008,
746 0x91e4, 0xffffffff, 0x00090008,
747 0x9204, 0xffffffff, 0x00090008,
748 0x3f94, 0xffff0000, 0xff000000,
749 0x914c, 0xffff0000, 0xff000000,
750 0x929c, 0xffffffff, 0x00000001,
751 0x8a18, 0xffffffff, 0x00000100,
752 0x8b28, 0xffffffff, 0x00000100,
753 0x9144, 0xffffffff, 0x00000100,
754 0x5644, 0xffffffff, 0x00000100,
755 0x9b7c, 0xffffffff, 0x00000000,
756 0x8030, 0xffffffff, 0x0000100a,
757 0x8a14, 0xffffffff, 0x00000007,
758 0x8b24, 0xffffffff, 0x00ff0fff,
759 0x8b10, 0xffffffff, 0x00000000,
760 0x28a4c, 0x06000000, 0x06000000,
761 0x4d8, 0xffffffff, 0x00000100,
762 0x913c, 0xffff000f, 0x0100000a,
763 0x960c, 0xffffffff, 0x54763210,
764 0x88c4, 0xffffffff, 0x000000c2,
765 0x88d4, 0xffffffff, 0x00000010,
766 0x8974, 0xffffffff, 0x00000000,
767 0xc78, 0x00000080, 0x00000080,
768 0x5e78, 0xffffffff, 0x001000f0,
769 0xd02c, 0xffffffff, 0x08421000,
770 0xa008, 0xffffffff, 0x00010000,
771 0x8d00, 0xffffffff, 0x100e4848,
772 0x8d04, 0xffffffff, 0x00164745,
773 0x8c00, 0xffffffff, 0xe4000003,
774 0x8cf0, 0x1fffffff, 0x08e00620,
775 0x28350, 0xffffffff, 0x00000000,
776 0x9508, 0xffffffff, 0x00000002
777 };
778
779 static const u32 sumo_golden_registers[] =
780 {
781 0x900c, 0x00ffffff, 0x0017071f,
782 0x8c18, 0xffffffff, 0x10101060,
783 0x8c1c, 0xffffffff, 0x00001010,
784 0x8c30, 0x0000000f, 0x00000005,
785 0x9688, 0x0000000f, 0x00000007
786 };
787
788 static const u32 wrestler_golden_registers[] =
789 {
790 0x5eb4, 0xffffffff, 0x00000002,
791 0x5c4, 0xffffffff, 0x00000001,
792 0x7030, 0xffffffff, 0x00000011,
793 0x7c30, 0xffffffff, 0x00000011,
794 0x6104, 0x01000300, 0x00000000,
795 0x5bc0, 0x00300000, 0x00000000,
796 0x918c, 0xffffffff, 0x00010006,
797 0x91a8, 0xffffffff, 0x00010006,
798 0x9150, 0xffffffff, 0x6e944040,
799 0x917c, 0xffffffff, 0x00030002,
800 0x9198, 0xffffffff, 0x00030002,
801 0x915c, 0xffffffff, 0x00010000,
802 0x3f90, 0xffff0000, 0xff000000,
803 0x9178, 0xffffffff, 0x00070000,
804 0x9194, 0xffffffff, 0x00070000,
805 0x9148, 0xffff0000, 0xff000000,
806 0x9190, 0xffffffff, 0x00090008,
807 0x91ac, 0xffffffff, 0x00090008,
808 0x3f94, 0xffff0000, 0xff000000,
809 0x914c, 0xffff0000, 0xff000000,
810 0x929c, 0xffffffff, 0x00000001,
811 0x8a18, 0xffffffff, 0x00000100,
812 0x8b28, 0xffffffff, 0x00000100,
813 0x9144, 0xffffffff, 0x00000100,
814 0x9b7c, 0xffffffff, 0x00000000,
815 0x8030, 0xffffffff, 0x0000100a,
816 0x8a14, 0xffffffff, 0x00000001,
817 0x8b24, 0xffffffff, 0x00ff0fff,
818 0x8b10, 0xffffffff, 0x00000000,
819 0x28a4c, 0x06000000, 0x06000000,
820 0x4d8, 0xffffffff, 0x00000100,
821 0x913c, 0xffff000f, 0x0100000a,
822 0x960c, 0xffffffff, 0x54763210,
823 0x88c4, 0xffffffff, 0x000000c2,
824 0x88d4, 0xffffffff, 0x00000010,
825 0x8974, 0xffffffff, 0x00000000,
826 0xc78, 0x00000080, 0x00000080,
827 0x5e78, 0xffffffff, 0x001000f0,
828 0xd02c, 0xffffffff, 0x08421000,
829 0xa008, 0xffffffff, 0x00010000,
830 0x8d00, 0xffffffff, 0x100e4848,
831 0x8d04, 0xffffffff, 0x00164745,
832 0x8c00, 0xffffffff, 0xe4000003,
833 0x8cf0, 0x1fffffff, 0x08e00410,
834 0x28350, 0xffffffff, 0x00000000,
835 0x9508, 0xffffffff, 0x00000002,
836 0x900c, 0xffffffff, 0x0017071f,
837 0x8c18, 0xffffffff, 0x10101060,
838 0x8c1c, 0xffffffff, 0x00001010
839 };
840
841 static const u32 barts_golden_registers[] =
842 {
843 0x5eb4, 0xffffffff, 0x00000002,
844 0x5e78, 0x8f311ff1, 0x001000f0,
845 0x3f90, 0xffff0000, 0xff000000,
846 0x9148, 0xffff0000, 0xff000000,
847 0x3f94, 0xffff0000, 0xff000000,
848 0x914c, 0xffff0000, 0xff000000,
849 0xc78, 0x00000080, 0x00000080,
850 0xbd4, 0x70073777, 0x00010001,
851 0xd02c, 0xbfffff1f, 0x08421000,
852 0xd0b8, 0x03773777, 0x02011003,
853 0x5bc0, 0x00200000, 0x50100000,
854 0x98f8, 0x33773777, 0x02011003,
855 0x98fc, 0xffffffff, 0x76543210,
856 0x7030, 0x31000311, 0x00000011,
857 0x2f48, 0x00000007, 0x02011003,
858 0x6b28, 0x00000010, 0x00000012,
859 0x7728, 0x00000010, 0x00000012,
860 0x10328, 0x00000010, 0x00000012,
861 0x10f28, 0x00000010, 0x00000012,
862 0x11b28, 0x00000010, 0x00000012,
863 0x12728, 0x00000010, 0x00000012,
864 0x240c, 0x000007ff, 0x00000380,
865 0x8a14, 0xf000001f, 0x00000007,
866 0x8b24, 0x3fff3fff, 0x00ff0fff,
867 0x8b10, 0x0000ff0f, 0x00000000,
868 0x28a4c, 0x07ffffff, 0x06000000,
869 0x10c, 0x00000001, 0x00010003,
870 0xa02c, 0xffffffff, 0x0000009b,
871 0x913c, 0x0000000f, 0x0100000a,
872 0x8d00, 0xffff7f7f, 0x100e4848,
873 0x8d04, 0x00ffffff, 0x00164745,
874 0x8c00, 0xfffc0003, 0xe4000003,
875 0x8c04, 0xf8ff00ff, 0x40600060,
876 0x8c08, 0x00ff00ff, 0x001c001c,
877 0x8cf0, 0x1fff1fff, 0x08e00620,
878 0x8c20, 0x0fff0fff, 0x00800080,
879 0x8c24, 0x0fff0fff, 0x00800080,
880 0x8c18, 0xffffffff, 0x20202078,
881 0x8c1c, 0x0000ffff, 0x00001010,
882 0x28350, 0x00000f01, 0x00000000,
883 0x9508, 0x3700001f, 0x00000002,
884 0x960c, 0xffffffff, 0x54763210,
885 0x88c4, 0x001f3ae3, 0x000000c2,
886 0x88d4, 0x0000001f, 0x00000010,
887 0x8974, 0xffffffff, 0x00000000
888 };
889
890 static const u32 turks_golden_registers[] =
891 {
892 0x5eb4, 0xffffffff, 0x00000002,
893 0x5e78, 0x8f311ff1, 0x001000f0,
894 0x8c8, 0x00003000, 0x00001070,
895 0x8cc, 0x000fffff, 0x00040035,
896 0x3f90, 0xffff0000, 0xfff00000,
897 0x9148, 0xffff0000, 0xfff00000,
898 0x3f94, 0xffff0000, 0xfff00000,
899 0x914c, 0xffff0000, 0xfff00000,
900 0xc78, 0x00000080, 0x00000080,
901 0xbd4, 0x00073007, 0x00010002,
902 0xd02c, 0xbfffff1f, 0x08421000,
903 0xd0b8, 0x03773777, 0x02010002,
904 0x5bc0, 0x00200000, 0x50100000,
905 0x98f8, 0x33773777, 0x00010002,
906 0x98fc, 0xffffffff, 0x33221100,
907 0x7030, 0x31000311, 0x00000011,
908 0x2f48, 0x33773777, 0x00010002,
909 0x6b28, 0x00000010, 0x00000012,
910 0x7728, 0x00000010, 0x00000012,
911 0x10328, 0x00000010, 0x00000012,
912 0x10f28, 0x00000010, 0x00000012,
913 0x11b28, 0x00000010, 0x00000012,
914 0x12728, 0x00000010, 0x00000012,
915 0x240c, 0x000007ff, 0x00000380,
916 0x8a14, 0xf000001f, 0x00000007,
917 0x8b24, 0x3fff3fff, 0x00ff0fff,
918 0x8b10, 0x0000ff0f, 0x00000000,
919 0x28a4c, 0x07ffffff, 0x06000000,
920 0x10c, 0x00000001, 0x00010003,
921 0xa02c, 0xffffffff, 0x0000009b,
922 0x913c, 0x0000000f, 0x0100000a,
923 0x8d00, 0xffff7f7f, 0x100e4848,
924 0x8d04, 0x00ffffff, 0x00164745,
925 0x8c00, 0xfffc0003, 0xe4000003,
926 0x8c04, 0xf8ff00ff, 0x40600060,
927 0x8c08, 0x00ff00ff, 0x001c001c,
928 0x8cf0, 0x1fff1fff, 0x08e00410,
929 0x8c20, 0x0fff0fff, 0x00800080,
930 0x8c24, 0x0fff0fff, 0x00800080,
931 0x8c18, 0xffffffff, 0x20202078,
932 0x8c1c, 0x0000ffff, 0x00001010,
933 0x28350, 0x00000f01, 0x00000000,
934 0x9508, 0x3700001f, 0x00000002,
935 0x960c, 0xffffffff, 0x54763210,
936 0x88c4, 0x001f3ae3, 0x000000c2,
937 0x88d4, 0x0000001f, 0x00000010,
938 0x8974, 0xffffffff, 0x00000000
939 };
940
941 static const u32 caicos_golden_registers[] =
942 {
943 0x5eb4, 0xffffffff, 0x00000002,
944 0x5e78, 0x8f311ff1, 0x001000f0,
945 0x8c8, 0x00003420, 0x00001450,
946 0x8cc, 0x000fffff, 0x00040035,
947 0x3f90, 0xffff0000, 0xfffc0000,
948 0x9148, 0xffff0000, 0xfffc0000,
949 0x3f94, 0xffff0000, 0xfffc0000,
950 0x914c, 0xffff0000, 0xfffc0000,
951 0xc78, 0x00000080, 0x00000080,
952 0xbd4, 0x00073007, 0x00010001,
953 0xd02c, 0xbfffff1f, 0x08421000,
954 0xd0b8, 0x03773777, 0x02010001,
955 0x5bc0, 0x00200000, 0x50100000,
956 0x98f8, 0x33773777, 0x02010001,
957 0x98fc, 0xffffffff, 0x33221100,
958 0x7030, 0x31000311, 0x00000011,
959 0x2f48, 0x33773777, 0x02010001,
960 0x6b28, 0x00000010, 0x00000012,
961 0x7728, 0x00000010, 0x00000012,
962 0x10328, 0x00000010, 0x00000012,
963 0x10f28, 0x00000010, 0x00000012,
964 0x11b28, 0x00000010, 0x00000012,
965 0x12728, 0x00000010, 0x00000012,
966 0x240c, 0x000007ff, 0x00000380,
967 0x8a14, 0xf000001f, 0x00000001,
968 0x8b24, 0x3fff3fff, 0x00ff0fff,
969 0x8b10, 0x0000ff0f, 0x00000000,
970 0x28a4c, 0x07ffffff, 0x06000000,
971 0x10c, 0x00000001, 0x00010003,
972 0xa02c, 0xffffffff, 0x0000009b,
973 0x913c, 0x0000000f, 0x0100000a,
974 0x8d00, 0xffff7f7f, 0x100e4848,
975 0x8d04, 0x00ffffff, 0x00164745,
976 0x8c00, 0xfffc0003, 0xe4000003,
977 0x8c04, 0xf8ff00ff, 0x40600060,
978 0x8c08, 0x00ff00ff, 0x001c001c,
979 0x8cf0, 0x1fff1fff, 0x08e00410,
980 0x8c20, 0x0fff0fff, 0x00800080,
981 0x8c24, 0x0fff0fff, 0x00800080,
982 0x8c18, 0xffffffff, 0x20202078,
983 0x8c1c, 0x0000ffff, 0x00001010,
984 0x28350, 0x00000f01, 0x00000000,
985 0x9508, 0x3700001f, 0x00000002,
986 0x960c, 0xffffffff, 0x54763210,
987 0x88c4, 0x001f3ae3, 0x000000c2,
988 0x88d4, 0x0000001f, 0x00000010,
989 0x8974, 0xffffffff, 0x00000000
990 };
991
992 static void evergreen_init_golden_registers(struct radeon_device *rdev)
993 {
994 switch (rdev->family) {
995 case CHIP_CYPRESS:
996 case CHIP_HEMLOCK:
997 radeon_program_register_sequence(rdev,
998 evergreen_golden_registers,
999 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1000 radeon_program_register_sequence(rdev,
1001 evergreen_golden_registers2,
1002 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1003 radeon_program_register_sequence(rdev,
1004 cypress_mgcg_init,
1005 (const u32)ARRAY_SIZE(cypress_mgcg_init));
1006 break;
1007 case CHIP_JUNIPER:
1008 radeon_program_register_sequence(rdev,
1009 evergreen_golden_registers,
1010 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1011 radeon_program_register_sequence(rdev,
1012 evergreen_golden_registers2,
1013 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1014 radeon_program_register_sequence(rdev,
1015 juniper_mgcg_init,
1016 (const u32)ARRAY_SIZE(juniper_mgcg_init));
1017 break;
1018 case CHIP_REDWOOD:
1019 radeon_program_register_sequence(rdev,
1020 evergreen_golden_registers,
1021 (const u32)ARRAY_SIZE(evergreen_golden_registers));
1022 radeon_program_register_sequence(rdev,
1023 evergreen_golden_registers2,
1024 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1025 radeon_program_register_sequence(rdev,
1026 redwood_mgcg_init,
1027 (const u32)ARRAY_SIZE(redwood_mgcg_init));
1028 break;
1029 case CHIP_CEDAR:
1030 radeon_program_register_sequence(rdev,
1031 cedar_golden_registers,
1032 (const u32)ARRAY_SIZE(cedar_golden_registers));
1033 radeon_program_register_sequence(rdev,
1034 evergreen_golden_registers2,
1035 (const u32)ARRAY_SIZE(evergreen_golden_registers2));
1036 radeon_program_register_sequence(rdev,
1037 cedar_mgcg_init,
1038 (const u32)ARRAY_SIZE(cedar_mgcg_init));
1039 break;
1040 case CHIP_PALM:
1041 radeon_program_register_sequence(rdev,
1042 wrestler_golden_registers,
1043 (const u32)ARRAY_SIZE(wrestler_golden_registers));
1044 break;
1045 case CHIP_SUMO:
1046 radeon_program_register_sequence(rdev,
1047 supersumo_golden_registers,
1048 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1049 break;
1050 case CHIP_SUMO2:
1051 radeon_program_register_sequence(rdev,
1052 supersumo_golden_registers,
1053 (const u32)ARRAY_SIZE(supersumo_golden_registers));
1054 radeon_program_register_sequence(rdev,
1055 sumo_golden_registers,
1056 (const u32)ARRAY_SIZE(sumo_golden_registers));
1057 break;
1058 case CHIP_BARTS:
1059 radeon_program_register_sequence(rdev,
1060 barts_golden_registers,
1061 (const u32)ARRAY_SIZE(barts_golden_registers));
1062 break;
1063 case CHIP_TURKS:
1064 radeon_program_register_sequence(rdev,
1065 turks_golden_registers,
1066 (const u32)ARRAY_SIZE(turks_golden_registers));
1067 break;
1068 case CHIP_CAICOS:
1069 radeon_program_register_sequence(rdev,
1070 caicos_golden_registers,
1071 (const u32)ARRAY_SIZE(caicos_golden_registers));
1072 break;
1073 default:
1074 break;
1075 }
1076 }
1077
1078 /**
1079 * evergreen_get_allowed_info_register - fetch the register for the info ioctl
1080 *
1081 * @rdev: radeon_device pointer
1082 * @reg: register offset in bytes
1083 * @val: register value
1084 *
1085 * Returns 0 for success or -EINVAL for an invalid register
1086 *
1087 */
1088 int evergreen_get_allowed_info_register(struct radeon_device *rdev,
1089 u32 reg, u32 *val)
1090 {
1091 switch (reg) {
1092 case GRBM_STATUS:
1093 case GRBM_STATUS_SE0:
1094 case GRBM_STATUS_SE1:
1095 case SRBM_STATUS:
1096 case SRBM_STATUS2:
1097 case DMA_STATUS_REG:
1098 case UVD_STATUS:
1099 *val = RREG32(reg);
1100 return 0;
1101 default:
1102 return -EINVAL;
1103 }
1104 }
1105
1106 void evergreen_tiling_fields(unsigned tiling_flags, unsigned *bankw,
1107 unsigned *bankh, unsigned *mtaspect,
1108 unsigned *tile_split)
1109 {
1110 *bankw = (tiling_flags >> RADEON_TILING_EG_BANKW_SHIFT) & RADEON_TILING_EG_BANKW_MASK;
1111 *bankh = (tiling_flags >> RADEON_TILING_EG_BANKH_SHIFT) & RADEON_TILING_EG_BANKH_MASK;
1112 *mtaspect = (tiling_flags >> RADEON_TILING_EG_MACRO_TILE_ASPECT_SHIFT) & RADEON_TILING_EG_MACRO_TILE_ASPECT_MASK;
1113 *tile_split = (tiling_flags >> RADEON_TILING_EG_TILE_SPLIT_SHIFT) & RADEON_TILING_EG_TILE_SPLIT_MASK;
1114 switch (*bankw) {
1115 default:
1116 case 1: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_1; break;
1117 case 2: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_2; break;
1118 case 4: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_4; break;
1119 case 8: *bankw = EVERGREEN_ADDR_SURF_BANK_WIDTH_8; break;
1120 }
1121 switch (*bankh) {
1122 default:
1123 case 1: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_1; break;
1124 case 2: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_2; break;
1125 case 4: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_4; break;
1126 case 8: *bankh = EVERGREEN_ADDR_SURF_BANK_HEIGHT_8; break;
1127 }
1128 switch (*mtaspect) {
1129 default:
1130 case 1: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_1; break;
1131 case 2: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_2; break;
1132 case 4: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_4; break;
1133 case 8: *mtaspect = EVERGREEN_ADDR_SURF_MACRO_TILE_ASPECT_8; break;
1134 }
1135 }
1136
1137 static int sumo_set_uvd_clock(struct radeon_device *rdev, u32 clock,
1138 u32 cntl_reg, u32 status_reg)
1139 {
1140 int r, i;
1141 struct atom_clock_dividers dividers;
1142
1143 r = radeon_atom_get_clock_dividers(rdev, COMPUTE_ENGINE_PLL_PARAM,
1144 clock, false, &dividers);
1145 if (r)
1146 return r;
1147
1148 WREG32_P(cntl_reg, dividers.post_div, ~(DCLK_DIR_CNTL_EN|DCLK_DIVIDER_MASK));
1149
1150 for (i = 0; i < 100; i++) {
1151 if (RREG32(status_reg) & DCLK_STATUS)
1152 break;
1153 mdelay(10);
1154 }
1155 if (i == 100)
1156 return -ETIMEDOUT;
1157
1158 return 0;
1159 }
1160
1161 int sumo_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1162 {
1163 int r = 0;
1164 u32 cg_scratch = RREG32(CG_SCRATCH1);
1165
1166 r = sumo_set_uvd_clock(rdev, vclk, CG_VCLK_CNTL, CG_VCLK_STATUS);
1167 if (r)
1168 goto done;
1169 cg_scratch &= 0xffff0000;
1170 cg_scratch |= vclk / 100; /* Mhz */
1171
1172 r = sumo_set_uvd_clock(rdev, dclk, CG_DCLK_CNTL, CG_DCLK_STATUS);
1173 if (r)
1174 goto done;
1175 cg_scratch &= 0x0000ffff;
1176 cg_scratch |= (dclk / 100) << 16; /* Mhz */
1177
1178 done:
1179 WREG32(CG_SCRATCH1, cg_scratch);
1180
1181 return r;
1182 }
1183
1184 int evergreen_set_uvd_clocks(struct radeon_device *rdev, u32 vclk, u32 dclk)
1185 {
1186 /* start off with something large */
1187 unsigned fb_div = 0, vclk_div = 0, dclk_div = 0;
1188 int r;
1189
1190 /* bypass vclk and dclk with bclk */
1191 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1192 VCLK_SRC_SEL(1) | DCLK_SRC_SEL(1),
1193 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1194
1195 /* put PLL in bypass mode */
1196 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_BYPASS_EN_MASK, ~UPLL_BYPASS_EN_MASK);
1197
1198 if (!vclk || !dclk) {
1199 /* keep the Bypass mode, put PLL to sleep */
1200 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1201 return 0;
1202 }
1203
1204 r = radeon_uvd_calc_upll_dividers(rdev, vclk, dclk, 125000, 250000,
1205 16384, 0x03FFFFFF, 0, 128, 5,
1206 &fb_div, &vclk_div, &dclk_div);
1207 if (r)
1208 return r;
1209
1210 /* set VCO_MODE to 1 */
1211 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_VCO_MODE_MASK, ~UPLL_VCO_MODE_MASK);
1212
1213 /* toggle UPLL_SLEEP to 1 then back to 0 */
1214 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_SLEEP_MASK, ~UPLL_SLEEP_MASK);
1215 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_SLEEP_MASK);
1216
1217 /* deassert UPLL_RESET */
1218 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1219
1220 mdelay(1);
1221
1222 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1223 if (r)
1224 return r;
1225
1226 /* assert UPLL_RESET again */
1227 WREG32_P(CG_UPLL_FUNC_CNTL, UPLL_RESET_MASK, ~UPLL_RESET_MASK);
1228
1229 /* disable spread spectrum. */
1230 WREG32_P(CG_UPLL_SPREAD_SPECTRUM, 0, ~SSEN_MASK);
1231
1232 /* set feedback divider */
1233 WREG32_P(CG_UPLL_FUNC_CNTL_3, UPLL_FB_DIV(fb_div), ~UPLL_FB_DIV_MASK);
1234
1235 /* set ref divider to 0 */
1236 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_REF_DIV_MASK);
1237
1238 if (fb_div < 307200)
1239 WREG32_P(CG_UPLL_FUNC_CNTL_4, 0, ~UPLL_SPARE_ISPARE9);
1240 else
1241 WREG32_P(CG_UPLL_FUNC_CNTL_4, UPLL_SPARE_ISPARE9, ~UPLL_SPARE_ISPARE9);
1242
1243 /* set PDIV_A and PDIV_B */
1244 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1245 UPLL_PDIV_A(vclk_div) | UPLL_PDIV_B(dclk_div),
1246 ~(UPLL_PDIV_A_MASK | UPLL_PDIV_B_MASK));
1247
1248 /* give the PLL some time to settle */
1249 mdelay(15);
1250
1251 /* deassert PLL_RESET */
1252 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_RESET_MASK);
1253
1254 mdelay(15);
1255
1256 /* switch from bypass mode to normal mode */
1257 WREG32_P(CG_UPLL_FUNC_CNTL, 0, ~UPLL_BYPASS_EN_MASK);
1258
1259 r = radeon_uvd_send_upll_ctlreq(rdev, CG_UPLL_FUNC_CNTL);
1260 if (r)
1261 return r;
1262
1263 /* switch VCLK and DCLK selection */
1264 WREG32_P(CG_UPLL_FUNC_CNTL_2,
1265 VCLK_SRC_SEL(2) | DCLK_SRC_SEL(2),
1266 ~(VCLK_SRC_SEL_MASK | DCLK_SRC_SEL_MASK));
1267
1268 mdelay(100);
1269
1270 return 0;
1271 }
1272
1273 void evergreen_fix_pci_max_read_req_size(struct radeon_device *rdev)
1274 {
1275 int readrq;
1276 u16 v;
1277
1278 readrq = pcie_get_readrq(rdev->pdev);
1279 v = ffs(readrq) - 8;
1280 /* if bios or OS sets MAX_READ_REQUEST_SIZE to an invalid value, fix it
1281 * to avoid hangs or perfomance issues
1282 */
1283 if ((v == 0) || (v == 6) || (v == 7))
1284 pcie_set_readrq(rdev->pdev, 512);
1285 }
1286
1287 void dce4_program_fmt(struct drm_encoder *encoder)
1288 {
1289 struct drm_device *dev = encoder->dev;
1290 struct radeon_device *rdev = dev->dev_private;
1291 struct radeon_encoder *radeon_encoder = to_radeon_encoder(encoder);
1292 struct radeon_crtc *radeon_crtc = to_radeon_crtc(encoder->crtc);
1293 struct drm_connector *connector = radeon_get_connector_for_encoder(encoder);
1294 int bpc = 0;
1295 u32 tmp = 0;
1296 enum radeon_connector_dither dither = RADEON_FMT_DITHER_DISABLE;
1297
1298 if (connector) {
1299 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1300 bpc = radeon_get_monitor_bpc(connector);
1301 dither = radeon_connector->dither;
1302 }
1303
1304 /* LVDS/eDP FMT is set up by atom */
1305 if (radeon_encoder->devices & ATOM_DEVICE_LCD_SUPPORT)
1306 return;
1307
1308 /* not needed for analog */
1309 if ((radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC1) ||
1310 (radeon_encoder->encoder_id == ENCODER_OBJECT_ID_INTERNAL_KLDSCP_DAC2))
1311 return;
1312
1313 if (bpc == 0)
1314 return;
1315
1316 switch (bpc) {
1317 case 6:
1318 if (dither == RADEON_FMT_DITHER_ENABLE)
1319 /* XXX sort out optimal dither settings */
1320 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1321 FMT_SPATIAL_DITHER_EN);
1322 else
1323 tmp |= FMT_TRUNCATE_EN;
1324 break;
1325 case 8:
1326 if (dither == RADEON_FMT_DITHER_ENABLE)
1327 /* XXX sort out optimal dither settings */
1328 tmp |= (FMT_FRAME_RANDOM_ENABLE | FMT_HIGHPASS_RANDOM_ENABLE |
1329 FMT_RGB_RANDOM_ENABLE |
1330 FMT_SPATIAL_DITHER_EN | FMT_SPATIAL_DITHER_DEPTH);
1331 else
1332 tmp |= (FMT_TRUNCATE_EN | FMT_TRUNCATE_DEPTH);
1333 break;
1334 case 10:
1335 default:
1336 /* not needed */
1337 break;
1338 }
1339
1340 WREG32(FMT_BIT_DEPTH_CONTROL + radeon_crtc->crtc_offset, tmp);
1341 }
1342
1343 static bool dce4_is_in_vblank(struct radeon_device *rdev, int crtc)
1344 {
1345 if (RREG32(EVERGREEN_CRTC_STATUS + crtc_offsets[crtc]) & EVERGREEN_CRTC_V_BLANK)
1346 return true;
1347 else
1348 return false;
1349 }
1350
1351 static bool dce4_is_counter_moving(struct radeon_device *rdev, int crtc)
1352 {
1353 u32 pos1, pos2;
1354
1355 pos1 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1356 pos2 = RREG32(EVERGREEN_CRTC_STATUS_POSITION + crtc_offsets[crtc]);
1357
1358 if (pos1 != pos2)
1359 return true;
1360 else
1361 return false;
1362 }
1363
1364 /**
1365 * dce4_wait_for_vblank - vblank wait asic callback.
1366 *
1367 * @rdev: radeon_device pointer
1368 * @crtc: crtc to wait for vblank on
1369 *
1370 * Wait for vblank on the requested crtc (evergreen+).
1371 */
1372 void dce4_wait_for_vblank(struct radeon_device *rdev, int crtc)
1373 {
1374 unsigned i = 0;
1375
1376 if (crtc >= rdev->num_crtc)
1377 return;
1378
1379 if (!(RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[crtc]) & EVERGREEN_CRTC_MASTER_EN))
1380 return;
1381
1382 /* depending on when we hit vblank, we may be close to active; if so,
1383 * wait for another frame.
1384 */
1385 while (dce4_is_in_vblank(rdev, crtc)) {
1386 if (i++ % 100 == 0) {
1387 if (!dce4_is_counter_moving(rdev, crtc))
1388 break;
1389 }
1390 }
1391
1392 while (!dce4_is_in_vblank(rdev, crtc)) {
1393 if (i++ % 100 == 0) {
1394 if (!dce4_is_counter_moving(rdev, crtc))
1395 break;
1396 }
1397 }
1398 }
1399
1400 /**
1401 * evergreen_page_flip - pageflip callback.
1402 *
1403 * @rdev: radeon_device pointer
1404 * @crtc_id: crtc to cleanup pageflip on
1405 * @crtc_base: new address of the crtc (GPU MC address)
1406 *
1407 * Triggers the actual pageflip by updating the primary
1408 * surface base address (evergreen+).
1409 */
1410 void evergreen_page_flip(struct radeon_device *rdev, int crtc_id, u64 crtc_base,
1411 bool async)
1412 {
1413 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1414
1415 /* update the scanout addresses */
1416 WREG32(EVERGREEN_GRPH_FLIP_CONTROL + radeon_crtc->crtc_offset,
1417 async ? EVERGREEN_GRPH_SURFACE_UPDATE_H_RETRACE_EN : 0);
1418 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + radeon_crtc->crtc_offset,
1419 upper_32_bits(crtc_base));
1420 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset,
1421 (u32)crtc_base);
1422 /* post the write */
1423 RREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + radeon_crtc->crtc_offset);
1424 }
1425
1426 /**
1427 * evergreen_page_flip_pending - check if page flip is still pending
1428 *
1429 * @rdev: radeon_device pointer
1430 * @crtc_id: crtc to check
1431 *
1432 * Returns the current update pending status.
1433 */
1434 bool evergreen_page_flip_pending(struct radeon_device *rdev, int crtc_id)
1435 {
1436 struct radeon_crtc *radeon_crtc = rdev->mode_info.crtcs[crtc_id];
1437
1438 /* Return current update_pending status: */
1439 return !!(RREG32(EVERGREEN_GRPH_UPDATE + radeon_crtc->crtc_offset) &
1440 EVERGREEN_GRPH_SURFACE_UPDATE_PENDING);
1441 }
1442
1443 /* get temperature in millidegrees */
1444 int evergreen_get_temp(struct radeon_device *rdev)
1445 {
1446 u32 temp, toffset;
1447 int actual_temp = 0;
1448
1449 if (rdev->family == CHIP_JUNIPER) {
1450 toffset = (RREG32(CG_THERMAL_CTRL) & TOFFSET_MASK) >>
1451 TOFFSET_SHIFT;
1452 temp = (RREG32(CG_TS0_STATUS) & TS0_ADC_DOUT_MASK) >>
1453 TS0_ADC_DOUT_SHIFT;
1454
1455 if (toffset & 0x100)
1456 actual_temp = temp / 2 - (0x200 - toffset);
1457 else
1458 actual_temp = temp / 2 + toffset;
1459
1460 actual_temp = actual_temp * 1000;
1461
1462 } else {
1463 temp = (RREG32(CG_MULT_THERMAL_STATUS) & ASIC_T_MASK) >>
1464 ASIC_T_SHIFT;
1465
1466 if (temp & 0x400)
1467 actual_temp = -256;
1468 else if (temp & 0x200)
1469 actual_temp = 255;
1470 else if (temp & 0x100) {
1471 actual_temp = temp & 0x1ff;
1472 actual_temp |= ~0x1ff;
1473 } else
1474 actual_temp = temp & 0xff;
1475
1476 actual_temp = (actual_temp * 1000) / 2;
1477 }
1478
1479 return actual_temp;
1480 }
1481
1482 int sumo_get_temp(struct radeon_device *rdev)
1483 {
1484 u32 temp = RREG32(CG_THERMAL_STATUS) & 0xff;
1485 int actual_temp = temp - 49;
1486
1487 return actual_temp * 1000;
1488 }
1489
1490 /**
1491 * sumo_pm_init_profile - Initialize power profiles callback.
1492 *
1493 * @rdev: radeon_device pointer
1494 *
1495 * Initialize the power states used in profile mode
1496 * (sumo, trinity, SI).
1497 * Used for profile mode only.
1498 */
1499 void sumo_pm_init_profile(struct radeon_device *rdev)
1500 {
1501 int idx;
1502
1503 /* default */
1504 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1505 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1506 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1507 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 0;
1508
1509 /* low,mid sh/mh */
1510 if (rdev->flags & RADEON_IS_MOBILITY)
1511 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1512 else
1513 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1514
1515 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1516 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1517 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1518 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1519
1520 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1521 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1522 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1523 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1524
1525 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1526 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1527 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1528 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 0;
1529
1530 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1531 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1532 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1533 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 0;
1534
1535 /* high sh/mh */
1536 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1537 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1538 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1539 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1540 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx =
1541 rdev->pm.power_state[idx].num_clock_modes - 1;
1542
1543 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1544 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1545 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1546 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx =
1547 rdev->pm.power_state[idx].num_clock_modes - 1;
1548 }
1549
1550 /**
1551 * btc_pm_init_profile - Initialize power profiles callback.
1552 *
1553 * @rdev: radeon_device pointer
1554 *
1555 * Initialize the power states used in profile mode
1556 * (BTC, cayman).
1557 * Used for profile mode only.
1558 */
1559 void btc_pm_init_profile(struct radeon_device *rdev)
1560 {
1561 int idx;
1562
1563 /* default */
1564 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_ps_idx = rdev->pm.default_power_state_index;
1565 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_ps_idx = rdev->pm.default_power_state_index;
1566 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_off_cm_idx = 0;
1567 rdev->pm.profiles[PM_PROFILE_DEFAULT_IDX].dpms_on_cm_idx = 2;
1568 /* starting with BTC, there is one state that is used for both
1569 * MH and SH. Difference is that we always use the high clock index for
1570 * mclk.
1571 */
1572 if (rdev->flags & RADEON_IS_MOBILITY)
1573 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_BATTERY, 0);
1574 else
1575 idx = radeon_pm_get_type_index(rdev, POWER_STATE_TYPE_PERFORMANCE, 0);
1576 /* low sh */
1577 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_ps_idx = idx;
1578 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_ps_idx = idx;
1579 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_off_cm_idx = 0;
1580 rdev->pm.profiles[PM_PROFILE_LOW_SH_IDX].dpms_on_cm_idx = 0;
1581 /* mid sh */
1582 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_ps_idx = idx;
1583 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_ps_idx = idx;
1584 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_off_cm_idx = 0;
1585 rdev->pm.profiles[PM_PROFILE_MID_SH_IDX].dpms_on_cm_idx = 1;
1586 /* high sh */
1587 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_ps_idx = idx;
1588 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_ps_idx = idx;
1589 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_off_cm_idx = 0;
1590 rdev->pm.profiles[PM_PROFILE_HIGH_SH_IDX].dpms_on_cm_idx = 2;
1591 /* low mh */
1592 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_ps_idx = idx;
1593 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_ps_idx = idx;
1594 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_off_cm_idx = 0;
1595 rdev->pm.profiles[PM_PROFILE_LOW_MH_IDX].dpms_on_cm_idx = 0;
1596 /* mid mh */
1597 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_ps_idx = idx;
1598 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_ps_idx = idx;
1599 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_off_cm_idx = 0;
1600 rdev->pm.profiles[PM_PROFILE_MID_MH_IDX].dpms_on_cm_idx = 1;
1601 /* high mh */
1602 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_ps_idx = idx;
1603 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_ps_idx = idx;
1604 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_off_cm_idx = 0;
1605 rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx = 2;
1606 }
1607
1608 /**
1609 * evergreen_pm_misc - set additional pm hw parameters callback.
1610 *
1611 * @rdev: radeon_device pointer
1612 *
1613 * Set non-clock parameters associated with a power state
1614 * (voltage, etc.) (evergreen+).
1615 */
1616 void evergreen_pm_misc(struct radeon_device *rdev)
1617 {
1618 int req_ps_idx = rdev->pm.requested_power_state_index;
1619 int req_cm_idx = rdev->pm.requested_clock_mode_index;
1620 struct radeon_power_state *ps = &rdev->pm.power_state[req_ps_idx];
1621 struct radeon_voltage *voltage = &ps->clock_info[req_cm_idx].voltage;
1622
1623 if (voltage->type == VOLTAGE_SW) {
1624 /* 0xff0x are flags rather then an actual voltage */
1625 if ((voltage->voltage & 0xff00) == 0xff00)
1626 return;
1627 if (voltage->voltage && (voltage->voltage != rdev->pm.current_vddc)) {
1628 radeon_atom_set_voltage(rdev, voltage->voltage, SET_VOLTAGE_TYPE_ASIC_VDDC);
1629 rdev->pm.current_vddc = voltage->voltage;
1630 DRM_DEBUG("Setting: vddc: %d\n", voltage->voltage);
1631 }
1632
1633 /* starting with BTC, there is one state that is used for both
1634 * MH and SH. Difference is that we always use the high clock index for
1635 * mclk and vddci.
1636 */
1637 if ((rdev->pm.pm_method == PM_METHOD_PROFILE) &&
1638 (rdev->family >= CHIP_BARTS) &&
1639 rdev->pm.active_crtc_count &&
1640 ((rdev->pm.profile_index == PM_PROFILE_MID_MH_IDX) ||
1641 (rdev->pm.profile_index == PM_PROFILE_LOW_MH_IDX)))
1642 voltage = &rdev->pm.power_state[req_ps_idx].
1643 clock_info[rdev->pm.profiles[PM_PROFILE_HIGH_MH_IDX].dpms_on_cm_idx].voltage;
1644
1645 /* 0xff0x are flags rather then an actual voltage */
1646 if ((voltage->vddci & 0xff00) == 0xff00)
1647 return;
1648 if (voltage->vddci && (voltage->vddci != rdev->pm.current_vddci)) {
1649 radeon_atom_set_voltage(rdev, voltage->vddci, SET_VOLTAGE_TYPE_ASIC_VDDCI);
1650 rdev->pm.current_vddci = voltage->vddci;
1651 DRM_DEBUG("Setting: vddci: %d\n", voltage->vddci);
1652 }
1653 }
1654 }
1655
1656 /**
1657 * evergreen_pm_prepare - pre-power state change callback.
1658 *
1659 * @rdev: radeon_device pointer
1660 *
1661 * Prepare for a power state change (evergreen+).
1662 */
1663 void evergreen_pm_prepare(struct radeon_device *rdev)
1664 {
1665 struct drm_device *ddev = rdev->ddev;
1666 struct drm_crtc *crtc;
1667 struct radeon_crtc *radeon_crtc;
1668 u32 tmp;
1669
1670 /* disable any active CRTCs */
1671 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1672 radeon_crtc = to_radeon_crtc(crtc);
1673 if (radeon_crtc->enabled) {
1674 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1675 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1676 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1677 }
1678 }
1679 }
1680
1681 /**
1682 * evergreen_pm_finish - post-power state change callback.
1683 *
1684 * @rdev: radeon_device pointer
1685 *
1686 * Clean up after a power state change (evergreen+).
1687 */
1688 void evergreen_pm_finish(struct radeon_device *rdev)
1689 {
1690 struct drm_device *ddev = rdev->ddev;
1691 struct drm_crtc *crtc;
1692 struct radeon_crtc *radeon_crtc;
1693 u32 tmp;
1694
1695 /* enable any active CRTCs */
1696 list_for_each_entry(crtc, &ddev->mode_config.crtc_list, head) {
1697 radeon_crtc = to_radeon_crtc(crtc);
1698 if (radeon_crtc->enabled) {
1699 tmp = RREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset);
1700 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
1701 WREG32(EVERGREEN_CRTC_CONTROL + radeon_crtc->crtc_offset, tmp);
1702 }
1703 }
1704 }
1705
1706 /**
1707 * evergreen_hpd_sense - hpd sense callback.
1708 *
1709 * @rdev: radeon_device pointer
1710 * @hpd: hpd (hotplug detect) pin
1711 *
1712 * Checks if a digital monitor is connected (evergreen+).
1713 * Returns true if connected, false if not connected.
1714 */
1715 bool evergreen_hpd_sense(struct radeon_device *rdev, enum radeon_hpd_id hpd)
1716 {
1717 bool connected = false;
1718
1719 switch (hpd) {
1720 case RADEON_HPD_1:
1721 if (RREG32(DC_HPD1_INT_STATUS) & DC_HPDx_SENSE)
1722 connected = true;
1723 break;
1724 case RADEON_HPD_2:
1725 if (RREG32(DC_HPD2_INT_STATUS) & DC_HPDx_SENSE)
1726 connected = true;
1727 break;
1728 case RADEON_HPD_3:
1729 if (RREG32(DC_HPD3_INT_STATUS) & DC_HPDx_SENSE)
1730 connected = true;
1731 break;
1732 case RADEON_HPD_4:
1733 if (RREG32(DC_HPD4_INT_STATUS) & DC_HPDx_SENSE)
1734 connected = true;
1735 break;
1736 case RADEON_HPD_5:
1737 if (RREG32(DC_HPD5_INT_STATUS) & DC_HPDx_SENSE)
1738 connected = true;
1739 break;
1740 case RADEON_HPD_6:
1741 if (RREG32(DC_HPD6_INT_STATUS) & DC_HPDx_SENSE)
1742 connected = true;
1743 break;
1744 default:
1745 break;
1746 }
1747
1748 return connected;
1749 }
1750
1751 /**
1752 * evergreen_hpd_set_polarity - hpd set polarity callback.
1753 *
1754 * @rdev: radeon_device pointer
1755 * @hpd: hpd (hotplug detect) pin
1756 *
1757 * Set the polarity of the hpd pin (evergreen+).
1758 */
1759 void evergreen_hpd_set_polarity(struct radeon_device *rdev,
1760 enum radeon_hpd_id hpd)
1761 {
1762 u32 tmp;
1763 bool connected = evergreen_hpd_sense(rdev, hpd);
1764
1765 switch (hpd) {
1766 case RADEON_HPD_1:
1767 tmp = RREG32(DC_HPD1_INT_CONTROL);
1768 if (connected)
1769 tmp &= ~DC_HPDx_INT_POLARITY;
1770 else
1771 tmp |= DC_HPDx_INT_POLARITY;
1772 WREG32(DC_HPD1_INT_CONTROL, tmp);
1773 break;
1774 case RADEON_HPD_2:
1775 tmp = RREG32(DC_HPD2_INT_CONTROL);
1776 if (connected)
1777 tmp &= ~DC_HPDx_INT_POLARITY;
1778 else
1779 tmp |= DC_HPDx_INT_POLARITY;
1780 WREG32(DC_HPD2_INT_CONTROL, tmp);
1781 break;
1782 case RADEON_HPD_3:
1783 tmp = RREG32(DC_HPD3_INT_CONTROL);
1784 if (connected)
1785 tmp &= ~DC_HPDx_INT_POLARITY;
1786 else
1787 tmp |= DC_HPDx_INT_POLARITY;
1788 WREG32(DC_HPD3_INT_CONTROL, tmp);
1789 break;
1790 case RADEON_HPD_4:
1791 tmp = RREG32(DC_HPD4_INT_CONTROL);
1792 if (connected)
1793 tmp &= ~DC_HPDx_INT_POLARITY;
1794 else
1795 tmp |= DC_HPDx_INT_POLARITY;
1796 WREG32(DC_HPD4_INT_CONTROL, tmp);
1797 break;
1798 case RADEON_HPD_5:
1799 tmp = RREG32(DC_HPD5_INT_CONTROL);
1800 if (connected)
1801 tmp &= ~DC_HPDx_INT_POLARITY;
1802 else
1803 tmp |= DC_HPDx_INT_POLARITY;
1804 WREG32(DC_HPD5_INT_CONTROL, tmp);
1805 break;
1806 case RADEON_HPD_6:
1807 tmp = RREG32(DC_HPD6_INT_CONTROL);
1808 if (connected)
1809 tmp &= ~DC_HPDx_INT_POLARITY;
1810 else
1811 tmp |= DC_HPDx_INT_POLARITY;
1812 WREG32(DC_HPD6_INT_CONTROL, tmp);
1813 break;
1814 default:
1815 break;
1816 }
1817 }
1818
1819 /**
1820 * evergreen_hpd_init - hpd setup callback.
1821 *
1822 * @rdev: radeon_device pointer
1823 *
1824 * Setup the hpd pins used by the card (evergreen+).
1825 * Enable the pin, set the polarity, and enable the hpd interrupts.
1826 */
1827 void evergreen_hpd_init(struct radeon_device *rdev)
1828 {
1829 struct drm_device *dev = rdev->ddev;
1830 struct drm_connector *connector;
1831 unsigned enabled = 0;
1832 u32 tmp = DC_HPDx_CONNECTION_TIMER(0x9c4) |
1833 DC_HPDx_RX_INT_TIMER(0xfa) | DC_HPDx_EN;
1834
1835 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1836 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1837
1838 if (connector->connector_type == DRM_MODE_CONNECTOR_eDP ||
1839 connector->connector_type == DRM_MODE_CONNECTOR_LVDS) {
1840 /* don't try to enable hpd on eDP or LVDS avoid breaking the
1841 * aux dp channel on imac and help (but not completely fix)
1842 * https://bugzilla.redhat.com/show_bug.cgi?id=726143
1843 * also avoid interrupt storms during dpms.
1844 */
1845 continue;
1846 }
1847 switch (radeon_connector->hpd.hpd) {
1848 case RADEON_HPD_1:
1849 WREG32(DC_HPD1_CONTROL, tmp);
1850 break;
1851 case RADEON_HPD_2:
1852 WREG32(DC_HPD2_CONTROL, tmp);
1853 break;
1854 case RADEON_HPD_3:
1855 WREG32(DC_HPD3_CONTROL, tmp);
1856 break;
1857 case RADEON_HPD_4:
1858 WREG32(DC_HPD4_CONTROL, tmp);
1859 break;
1860 case RADEON_HPD_5:
1861 WREG32(DC_HPD5_CONTROL, tmp);
1862 break;
1863 case RADEON_HPD_6:
1864 WREG32(DC_HPD6_CONTROL, tmp);
1865 break;
1866 default:
1867 break;
1868 }
1869 radeon_hpd_set_polarity(rdev, radeon_connector->hpd.hpd);
1870 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1871 enabled |= 1 << radeon_connector->hpd.hpd;
1872 }
1873 radeon_irq_kms_enable_hpd(rdev, enabled);
1874 }
1875
1876 /**
1877 * evergreen_hpd_fini - hpd tear down callback.
1878 *
1879 * @rdev: radeon_device pointer
1880 *
1881 * Tear down the hpd pins used by the card (evergreen+).
1882 * Disable the hpd interrupts.
1883 */
1884 void evergreen_hpd_fini(struct radeon_device *rdev)
1885 {
1886 struct drm_device *dev = rdev->ddev;
1887 struct drm_connector *connector;
1888 unsigned disabled = 0;
1889
1890 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
1891 struct radeon_connector *radeon_connector = to_radeon_connector(connector);
1892 switch (radeon_connector->hpd.hpd) {
1893 case RADEON_HPD_1:
1894 WREG32(DC_HPD1_CONTROL, 0);
1895 break;
1896 case RADEON_HPD_2:
1897 WREG32(DC_HPD2_CONTROL, 0);
1898 break;
1899 case RADEON_HPD_3:
1900 WREG32(DC_HPD3_CONTROL, 0);
1901 break;
1902 case RADEON_HPD_4:
1903 WREG32(DC_HPD4_CONTROL, 0);
1904 break;
1905 case RADEON_HPD_5:
1906 WREG32(DC_HPD5_CONTROL, 0);
1907 break;
1908 case RADEON_HPD_6:
1909 WREG32(DC_HPD6_CONTROL, 0);
1910 break;
1911 default:
1912 break;
1913 }
1914 if (radeon_connector->hpd.hpd != RADEON_HPD_NONE)
1915 disabled |= 1 << radeon_connector->hpd.hpd;
1916 }
1917 radeon_irq_kms_disable_hpd(rdev, disabled);
1918 }
1919
1920 /* watermark setup */
1921
1922 static u32 evergreen_line_buffer_adjust(struct radeon_device *rdev,
1923 struct radeon_crtc *radeon_crtc,
1924 struct drm_display_mode *mode,
1925 struct drm_display_mode *other_mode)
1926 {
1927 u32 tmp, buffer_alloc, i;
1928 u32 pipe_offset = radeon_crtc->crtc_id * 0x20;
1929 /*
1930 * Line Buffer Setup
1931 * There are 3 line buffers, each one shared by 2 display controllers.
1932 * DC_LB_MEMORY_SPLIT controls how that line buffer is shared between
1933 * the display controllers. The paritioning is done via one of four
1934 * preset allocations specified in bits 2:0:
1935 * first display controller
1936 * 0 - first half of lb (3840 * 2)
1937 * 1 - first 3/4 of lb (5760 * 2)
1938 * 2 - whole lb (7680 * 2), other crtc must be disabled
1939 * 3 - first 1/4 of lb (1920 * 2)
1940 * second display controller
1941 * 4 - second half of lb (3840 * 2)
1942 * 5 - second 3/4 of lb (5760 * 2)
1943 * 6 - whole lb (7680 * 2), other crtc must be disabled
1944 * 7 - last 1/4 of lb (1920 * 2)
1945 */
1946 /* this can get tricky if we have two large displays on a paired group
1947 * of crtcs. Ideally for multiple large displays we'd assign them to
1948 * non-linked crtcs for maximum line buffer allocation.
1949 */
1950 if (radeon_crtc->base.enabled && mode) {
1951 if (other_mode) {
1952 tmp = 0; /* 1/2 */
1953 buffer_alloc = 1;
1954 } else {
1955 tmp = 2; /* whole */
1956 buffer_alloc = 2;
1957 }
1958 } else {
1959 tmp = 0;
1960 buffer_alloc = 0;
1961 }
1962
1963 /* second controller of the pair uses second half of the lb */
1964 if (radeon_crtc->crtc_id % 2)
1965 tmp += 4;
1966 WREG32(DC_LB_MEMORY_SPLIT + radeon_crtc->crtc_offset, tmp);
1967
1968 if (ASIC_IS_DCE41(rdev) || ASIC_IS_DCE5(rdev)) {
1969 WREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset,
1970 DMIF_BUFFERS_ALLOCATED(buffer_alloc));
1971 for (i = 0; i < rdev->usec_timeout; i++) {
1972 if (RREG32(PIPE0_DMIF_BUFFER_CONTROL + pipe_offset) &
1973 DMIF_BUFFERS_ALLOCATED_COMPLETED)
1974 break;
1975 udelay(1);
1976 }
1977 }
1978
1979 if (radeon_crtc->base.enabled && mode) {
1980 switch (tmp) {
1981 case 0:
1982 case 4:
1983 default:
1984 if (ASIC_IS_DCE5(rdev))
1985 return 4096 * 2;
1986 else
1987 return 3840 * 2;
1988 case 1:
1989 case 5:
1990 if (ASIC_IS_DCE5(rdev))
1991 return 6144 * 2;
1992 else
1993 return 5760 * 2;
1994 case 2:
1995 case 6:
1996 if (ASIC_IS_DCE5(rdev))
1997 return 8192 * 2;
1998 else
1999 return 7680 * 2;
2000 case 3:
2001 case 7:
2002 if (ASIC_IS_DCE5(rdev))
2003 return 2048 * 2;
2004 else
2005 return 1920 * 2;
2006 }
2007 }
2008
2009 /* controller not enabled, so no lb used */
2010 return 0;
2011 }
2012
2013 u32 evergreen_get_number_of_dram_channels(struct radeon_device *rdev)
2014 {
2015 u32 tmp = RREG32(MC_SHARED_CHMAP);
2016
2017 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
2018 case 0:
2019 default:
2020 return 1;
2021 case 1:
2022 return 2;
2023 case 2:
2024 return 4;
2025 case 3:
2026 return 8;
2027 }
2028 }
2029
2030 struct evergreen_wm_params {
2031 u32 dram_channels; /* number of dram channels */
2032 u32 yclk; /* bandwidth per dram data pin in kHz */
2033 u32 sclk; /* engine clock in kHz */
2034 u32 disp_clk; /* display clock in kHz */
2035 u32 src_width; /* viewport width */
2036 u32 active_time; /* active display time in ns */
2037 u32 blank_time; /* blank time in ns */
2038 bool interlaced; /* mode is interlaced */
2039 fixed20_12 vsc; /* vertical scale ratio */
2040 u32 num_heads; /* number of active crtcs */
2041 u32 bytes_per_pixel; /* bytes per pixel display + overlay */
2042 u32 lb_size; /* line buffer allocated to pipe */
2043 u32 vtaps; /* vertical scaler taps */
2044 };
2045
2046 static u32 evergreen_dram_bandwidth(struct evergreen_wm_params *wm)
2047 {
2048 /* Calculate DRAM Bandwidth and the part allocated to display. */
2049 fixed20_12 dram_efficiency; /* 0.7 */
2050 fixed20_12 yclk, dram_channels, bandwidth;
2051 fixed20_12 a;
2052
2053 a.full = dfixed_const(1000);
2054 yclk.full = dfixed_const(wm->yclk);
2055 yclk.full = dfixed_div(yclk, a);
2056 dram_channels.full = dfixed_const(wm->dram_channels * 4);
2057 a.full = dfixed_const(10);
2058 dram_efficiency.full = dfixed_const(7);
2059 dram_efficiency.full = dfixed_div(dram_efficiency, a);
2060 bandwidth.full = dfixed_mul(dram_channels, yclk);
2061 bandwidth.full = dfixed_mul(bandwidth, dram_efficiency);
2062
2063 return dfixed_trunc(bandwidth);
2064 }
2065
2066 static u32 evergreen_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2067 {
2068 /* Calculate DRAM Bandwidth and the part allocated to display. */
2069 fixed20_12 disp_dram_allocation; /* 0.3 to 0.7 */
2070 fixed20_12 yclk, dram_channels, bandwidth;
2071 fixed20_12 a;
2072
2073 a.full = dfixed_const(1000);
2074 yclk.full = dfixed_const(wm->yclk);
2075 yclk.full = dfixed_div(yclk, a);
2076 dram_channels.full = dfixed_const(wm->dram_channels * 4);
2077 a.full = dfixed_const(10);
2078 disp_dram_allocation.full = dfixed_const(3); /* XXX worse case value 0.3 */
2079 disp_dram_allocation.full = dfixed_div(disp_dram_allocation, a);
2080 bandwidth.full = dfixed_mul(dram_channels, yclk);
2081 bandwidth.full = dfixed_mul(bandwidth, disp_dram_allocation);
2082
2083 return dfixed_trunc(bandwidth);
2084 }
2085
2086 static u32 evergreen_data_return_bandwidth(struct evergreen_wm_params *wm)
2087 {
2088 /* Calculate the display Data return Bandwidth */
2089 fixed20_12 return_efficiency; /* 0.8 */
2090 fixed20_12 sclk, bandwidth;
2091 fixed20_12 a;
2092
2093 a.full = dfixed_const(1000);
2094 sclk.full = dfixed_const(wm->sclk);
2095 sclk.full = dfixed_div(sclk, a);
2096 a.full = dfixed_const(10);
2097 return_efficiency.full = dfixed_const(8);
2098 return_efficiency.full = dfixed_div(return_efficiency, a);
2099 a.full = dfixed_const(32);
2100 bandwidth.full = dfixed_mul(a, sclk);
2101 bandwidth.full = dfixed_mul(bandwidth, return_efficiency);
2102
2103 return dfixed_trunc(bandwidth);
2104 }
2105
2106 static u32 evergreen_dmif_request_bandwidth(struct evergreen_wm_params *wm)
2107 {
2108 /* Calculate the DMIF Request Bandwidth */
2109 fixed20_12 disp_clk_request_efficiency; /* 0.8 */
2110 fixed20_12 disp_clk, bandwidth;
2111 fixed20_12 a;
2112
2113 a.full = dfixed_const(1000);
2114 disp_clk.full = dfixed_const(wm->disp_clk);
2115 disp_clk.full = dfixed_div(disp_clk, a);
2116 a.full = dfixed_const(10);
2117 disp_clk_request_efficiency.full = dfixed_const(8);
2118 disp_clk_request_efficiency.full = dfixed_div(disp_clk_request_efficiency, a);
2119 a.full = dfixed_const(32);
2120 bandwidth.full = dfixed_mul(a, disp_clk);
2121 bandwidth.full = dfixed_mul(bandwidth, disp_clk_request_efficiency);
2122
2123 return dfixed_trunc(bandwidth);
2124 }
2125
2126 static u32 evergreen_available_bandwidth(struct evergreen_wm_params *wm)
2127 {
2128 /* Calculate the Available bandwidth. Display can use this temporarily but not in average. */
2129 u32 dram_bandwidth = evergreen_dram_bandwidth(wm);
2130 u32 data_return_bandwidth = evergreen_data_return_bandwidth(wm);
2131 u32 dmif_req_bandwidth = evergreen_dmif_request_bandwidth(wm);
2132
2133 return min(dram_bandwidth, min(data_return_bandwidth, dmif_req_bandwidth));
2134 }
2135
2136 static u32 evergreen_average_bandwidth(struct evergreen_wm_params *wm)
2137 {
2138 /* Calculate the display mode Average Bandwidth
2139 * DisplayMode should contain the source and destination dimensions,
2140 * timing, etc.
2141 */
2142 fixed20_12 bpp;
2143 fixed20_12 line_time;
2144 fixed20_12 src_width;
2145 fixed20_12 bandwidth;
2146 fixed20_12 a;
2147
2148 a.full = dfixed_const(1000);
2149 line_time.full = dfixed_const(wm->active_time + wm->blank_time);
2150 line_time.full = dfixed_div(line_time, a);
2151 bpp.full = dfixed_const(wm->bytes_per_pixel);
2152 src_width.full = dfixed_const(wm->src_width);
2153 bandwidth.full = dfixed_mul(src_width, bpp);
2154 bandwidth.full = dfixed_mul(bandwidth, wm->vsc);
2155 bandwidth.full = dfixed_div(bandwidth, line_time);
2156
2157 return dfixed_trunc(bandwidth);
2158 }
2159
2160 static u32 evergreen_latency_watermark(struct evergreen_wm_params *wm)
2161 {
2162 /* First calcualte the latency in ns */
2163 u32 mc_latency = 2000; /* 2000 ns. */
2164 u32 available_bandwidth = evergreen_available_bandwidth(wm);
2165 u32 worst_chunk_return_time = (512 * 8 * 1000) / available_bandwidth;
2166 u32 cursor_line_pair_return_time = (128 * 4 * 1000) / available_bandwidth;
2167 u32 dc_latency = 40000000 / wm->disp_clk; /* dc pipe latency */
2168 u32 other_heads_data_return_time = ((wm->num_heads + 1) * worst_chunk_return_time) +
2169 (wm->num_heads * cursor_line_pair_return_time);
2170 u32 latency = mc_latency + other_heads_data_return_time + dc_latency;
2171 u32 max_src_lines_per_dst_line, lb_fill_bw, line_fill_time;
2172 fixed20_12 a, b, c;
2173
2174 if (wm->num_heads == 0)
2175 return 0;
2176
2177 a.full = dfixed_const(2);
2178 b.full = dfixed_const(1);
2179 if ((wm->vsc.full > a.full) ||
2180 ((wm->vsc.full > b.full) && (wm->vtaps >= 3)) ||
2181 (wm->vtaps >= 5) ||
2182 ((wm->vsc.full >= a.full) && wm->interlaced))
2183 max_src_lines_per_dst_line = 4;
2184 else
2185 max_src_lines_per_dst_line = 2;
2186
2187 a.full = dfixed_const(available_bandwidth);
2188 b.full = dfixed_const(wm->num_heads);
2189 a.full = dfixed_div(a, b);
2190
2191 lb_fill_bw = min(dfixed_trunc(a), wm->disp_clk * wm->bytes_per_pixel / 1000);
2192
2193 a.full = dfixed_const(max_src_lines_per_dst_line * wm->src_width * wm->bytes_per_pixel);
2194 b.full = dfixed_const(1000);
2195 c.full = dfixed_const(lb_fill_bw);
2196 b.full = dfixed_div(c, b);
2197 a.full = dfixed_div(a, b);
2198 line_fill_time = dfixed_trunc(a);
2199
2200 if (line_fill_time < wm->active_time)
2201 return latency;
2202 else
2203 return latency + (line_fill_time - wm->active_time);
2204
2205 }
2206
2207 static bool evergreen_average_bandwidth_vs_dram_bandwidth_for_display(struct evergreen_wm_params *wm)
2208 {
2209 if (evergreen_average_bandwidth(wm) <=
2210 (evergreen_dram_bandwidth_for_display(wm) / wm->num_heads))
2211 return true;
2212 else
2213 return false;
2214 };
2215
2216 static bool evergreen_average_bandwidth_vs_available_bandwidth(struct evergreen_wm_params *wm)
2217 {
2218 if (evergreen_average_bandwidth(wm) <=
2219 (evergreen_available_bandwidth(wm) / wm->num_heads))
2220 return true;
2221 else
2222 return false;
2223 };
2224
2225 static bool evergreen_check_latency_hiding(struct evergreen_wm_params *wm)
2226 {
2227 u32 lb_partitions = wm->lb_size / wm->src_width;
2228 u32 line_time = wm->active_time + wm->blank_time;
2229 u32 latency_tolerant_lines;
2230 u32 latency_hiding;
2231 fixed20_12 a;
2232
2233 a.full = dfixed_const(1);
2234 if (wm->vsc.full > a.full)
2235 latency_tolerant_lines = 1;
2236 else {
2237 if (lb_partitions <= (wm->vtaps + 1))
2238 latency_tolerant_lines = 1;
2239 else
2240 latency_tolerant_lines = 2;
2241 }
2242
2243 latency_hiding = (latency_tolerant_lines * line_time + wm->blank_time);
2244
2245 if (evergreen_latency_watermark(wm) <= latency_hiding)
2246 return true;
2247 else
2248 return false;
2249 }
2250
2251 static void evergreen_program_watermarks(struct radeon_device *rdev,
2252 struct radeon_crtc *radeon_crtc,
2253 u32 lb_size, u32 num_heads)
2254 {
2255 struct drm_display_mode *mode = &radeon_crtc->base.mode;
2256 struct evergreen_wm_params wm_low, wm_high;
2257 u32 dram_channels;
2258 u32 active_time;
2259 u32 line_time = 0;
2260 u32 latency_watermark_a = 0, latency_watermark_b = 0;
2261 u32 priority_a_mark = 0, priority_b_mark = 0;
2262 u32 priority_a_cnt = PRIORITY_OFF;
2263 u32 priority_b_cnt = PRIORITY_OFF;
2264 u32 pipe_offset = radeon_crtc->crtc_id * 16;
2265 u32 tmp, arb_control3;
2266 fixed20_12 a, b, c;
2267
2268 if (radeon_crtc->base.enabled && num_heads && mode) {
2269 active_time = (u32) div_u64((u64)mode->crtc_hdisplay * 1000000,
2270 (u32)mode->clock);
2271 line_time = (u32) div_u64((u64)mode->crtc_htotal * 1000000,
2272 (u32)mode->clock);
2273 line_time = min(line_time, (u32)65535);
2274 priority_a_cnt = 0;
2275 priority_b_cnt = 0;
2276 dram_channels = evergreen_get_number_of_dram_channels(rdev);
2277
2278 /* watermark for high clocks */
2279 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2280 wm_high.yclk =
2281 radeon_dpm_get_mclk(rdev, false) * 10;
2282 wm_high.sclk =
2283 radeon_dpm_get_sclk(rdev, false) * 10;
2284 } else {
2285 wm_high.yclk = rdev->pm.current_mclk * 10;
2286 wm_high.sclk = rdev->pm.current_sclk * 10;
2287 }
2288
2289 wm_high.disp_clk = mode->clock;
2290 wm_high.src_width = mode->crtc_hdisplay;
2291 wm_high.active_time = active_time;
2292 wm_high.blank_time = line_time - wm_high.active_time;
2293 wm_high.interlaced = false;
2294 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2295 wm_high.interlaced = true;
2296 wm_high.vsc = radeon_crtc->vsc;
2297 wm_high.vtaps = 1;
2298 if (radeon_crtc->rmx_type != RMX_OFF)
2299 wm_high.vtaps = 2;
2300 wm_high.bytes_per_pixel = 4; /* XXX: get this from fb config */
2301 wm_high.lb_size = lb_size;
2302 wm_high.dram_channels = dram_channels;
2303 wm_high.num_heads = num_heads;
2304
2305 /* watermark for low clocks */
2306 if ((rdev->pm.pm_method == PM_METHOD_DPM) && rdev->pm.dpm_enabled) {
2307 wm_low.yclk =
2308 radeon_dpm_get_mclk(rdev, true) * 10;
2309 wm_low.sclk =
2310 radeon_dpm_get_sclk(rdev, true) * 10;
2311 } else {
2312 wm_low.yclk = rdev->pm.current_mclk * 10;
2313 wm_low.sclk = rdev->pm.current_sclk * 10;
2314 }
2315
2316 wm_low.disp_clk = mode->clock;
2317 wm_low.src_width = mode->crtc_hdisplay;
2318 wm_low.active_time = active_time;
2319 wm_low.blank_time = line_time - wm_low.active_time;
2320 wm_low.interlaced = false;
2321 if (mode->flags & DRM_MODE_FLAG_INTERLACE)
2322 wm_low.interlaced = true;
2323 wm_low.vsc = radeon_crtc->vsc;
2324 wm_low.vtaps = 1;
2325 if (radeon_crtc->rmx_type != RMX_OFF)
2326 wm_low.vtaps = 2;
2327 wm_low.bytes_per_pixel = 4; /* XXX: get this from fb config */
2328 wm_low.lb_size = lb_size;
2329 wm_low.dram_channels = dram_channels;
2330 wm_low.num_heads = num_heads;
2331
2332 /* set for high clocks */
2333 latency_watermark_a = min(evergreen_latency_watermark(&wm_high), (u32)65535);
2334 /* set for low clocks */
2335 latency_watermark_b = min(evergreen_latency_watermark(&wm_low), (u32)65535);
2336
2337 /* possibly force display priority to high */
2338 /* should really do this at mode validation time... */
2339 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_high) ||
2340 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_high) ||
2341 !evergreen_check_latency_hiding(&wm_high) ||
2342 (rdev->disp_priority == 2)) {
2343 DRM_DEBUG_KMS("force priority a to high\n");
2344 priority_a_cnt |= PRIORITY_ALWAYS_ON;
2345 }
2346 if (!evergreen_average_bandwidth_vs_dram_bandwidth_for_display(&wm_low) ||
2347 !evergreen_average_bandwidth_vs_available_bandwidth(&wm_low) ||
2348 !evergreen_check_latency_hiding(&wm_low) ||
2349 (rdev->disp_priority == 2)) {
2350 DRM_DEBUG_KMS("force priority b to high\n");
2351 priority_b_cnt |= PRIORITY_ALWAYS_ON;
2352 }
2353
2354 a.full = dfixed_const(1000);
2355 b.full = dfixed_const(mode->clock);
2356 b.full = dfixed_div(b, a);
2357 c.full = dfixed_const(latency_watermark_a);
2358 c.full = dfixed_mul(c, b);
2359 c.full = dfixed_mul(c, radeon_crtc->hsc);
2360 c.full = dfixed_div(c, a);
2361 a.full = dfixed_const(16);
2362 c.full = dfixed_div(c, a);
2363 priority_a_mark = dfixed_trunc(c);
2364 priority_a_cnt |= priority_a_mark & PRIORITY_MARK_MASK;
2365
2366 a.full = dfixed_const(1000);
2367 b.full = dfixed_const(mode->clock);
2368 b.full = dfixed_div(b, a);
2369 c.full = dfixed_const(latency_watermark_b);
2370 c.full = dfixed_mul(c, b);
2371 c.full = dfixed_mul(c, radeon_crtc->hsc);
2372 c.full = dfixed_div(c, a);
2373 a.full = dfixed_const(16);
2374 c.full = dfixed_div(c, a);
2375 priority_b_mark = dfixed_trunc(c);
2376 priority_b_cnt |= priority_b_mark & PRIORITY_MARK_MASK;
2377
2378 /* Save number of lines the linebuffer leads before the scanout */
2379 radeon_crtc->lb_vblank_lead_lines = DIV_ROUND_UP(lb_size, mode->crtc_hdisplay);
2380 }
2381
2382 /* select wm A */
2383 arb_control3 = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2384 tmp = arb_control3;
2385 tmp &= ~LATENCY_WATERMARK_MASK(3);
2386 tmp |= LATENCY_WATERMARK_MASK(1);
2387 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2388 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2389 (LATENCY_LOW_WATERMARK(latency_watermark_a) |
2390 LATENCY_HIGH_WATERMARK(line_time)));
2391 /* select wm B */
2392 tmp = RREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset);
2393 tmp &= ~LATENCY_WATERMARK_MASK(3);
2394 tmp |= LATENCY_WATERMARK_MASK(2);
2395 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, tmp);
2396 WREG32(PIPE0_LATENCY_CONTROL + pipe_offset,
2397 (LATENCY_LOW_WATERMARK(latency_watermark_b) |
2398 LATENCY_HIGH_WATERMARK(line_time)));
2399 /* restore original selection */
2400 WREG32(PIPE0_ARBITRATION_CONTROL3 + pipe_offset, arb_control3);
2401
2402 /* write the priority marks */
2403 WREG32(PRIORITY_A_CNT + radeon_crtc->crtc_offset, priority_a_cnt);
2404 WREG32(PRIORITY_B_CNT + radeon_crtc->crtc_offset, priority_b_cnt);
2405
2406 /* save values for DPM */
2407 radeon_crtc->line_time = line_time;
2408 radeon_crtc->wm_high = latency_watermark_a;
2409 radeon_crtc->wm_low = latency_watermark_b;
2410 }
2411
2412 /**
2413 * evergreen_bandwidth_update - update display watermarks callback.
2414 *
2415 * @rdev: radeon_device pointer
2416 *
2417 * Update the display watermarks based on the requested mode(s)
2418 * (evergreen+).
2419 */
2420 void evergreen_bandwidth_update(struct radeon_device *rdev)
2421 {
2422 struct drm_display_mode *mode0 = NULL;
2423 struct drm_display_mode *mode1 = NULL;
2424 u32 num_heads = 0, lb_size;
2425 int i;
2426
2427 if (!rdev->mode_info.mode_config_initialized)
2428 return;
2429
2430 radeon_update_display_priority(rdev);
2431
2432 for (i = 0; i < rdev->num_crtc; i++) {
2433 if (rdev->mode_info.crtcs[i]->base.enabled)
2434 num_heads++;
2435 }
2436 for (i = 0; i < rdev->num_crtc; i += 2) {
2437 mode0 = &rdev->mode_info.crtcs[i]->base.mode;
2438 mode1 = &rdev->mode_info.crtcs[i+1]->base.mode;
2439 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i], mode0, mode1);
2440 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i], lb_size, num_heads);
2441 lb_size = evergreen_line_buffer_adjust(rdev, rdev->mode_info.crtcs[i+1], mode1, mode0);
2442 evergreen_program_watermarks(rdev, rdev->mode_info.crtcs[i+1], lb_size, num_heads);
2443 }
2444 }
2445
2446 /**
2447 * evergreen_mc_wait_for_idle - wait for MC idle callback.
2448 *
2449 * @rdev: radeon_device pointer
2450 *
2451 * Wait for the MC (memory controller) to be idle.
2452 * (evergreen+).
2453 * Returns 0 if the MC is idle, -1 if not.
2454 */
2455 int evergreen_mc_wait_for_idle(struct radeon_device *rdev)
2456 {
2457 unsigned i;
2458 u32 tmp;
2459
2460 for (i = 0; i < rdev->usec_timeout; i++) {
2461 /* read MC_STATUS */
2462 tmp = RREG32(SRBM_STATUS) & 0x1F00;
2463 if (!tmp)
2464 return 0;
2465 udelay(1);
2466 }
2467 return -1;
2468 }
2469
2470 /*
2471 * GART
2472 */
2473 void evergreen_pcie_gart_tlb_flush(struct radeon_device *rdev)
2474 {
2475 unsigned i;
2476 u32 tmp;
2477
2478 WREG32(HDP_MEM_COHERENCY_FLUSH_CNTL, 0x1);
2479
2480 WREG32(VM_CONTEXT0_REQUEST_RESPONSE, REQUEST_TYPE(1));
2481 for (i = 0; i < rdev->usec_timeout; i++) {
2482 /* read MC_STATUS */
2483 tmp = RREG32(VM_CONTEXT0_REQUEST_RESPONSE);
2484 tmp = (tmp & RESPONSE_TYPE_MASK) >> RESPONSE_TYPE_SHIFT;
2485 if (tmp == 2) {
2486 pr_warn("[drm] r600 flush TLB failed\n");
2487 return;
2488 }
2489 if (tmp) {
2490 return;
2491 }
2492 udelay(1);
2493 }
2494 }
2495
2496 static int evergreen_pcie_gart_enable(struct radeon_device *rdev)
2497 {
2498 u32 tmp;
2499 int r;
2500
2501 if (rdev->gart.robj == NULL) {
2502 dev_err(rdev->dev, "No VRAM object for PCIE GART.\n");
2503 return -EINVAL;
2504 }
2505 r = radeon_gart_table_vram_pin(rdev);
2506 if (r)
2507 return r;
2508 /* Setup L2 cache */
2509 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2510 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2511 EFFECTIVE_L2_QUEUE_SIZE(7));
2512 WREG32(VM_L2_CNTL2, 0);
2513 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2514 /* Setup TLB control */
2515 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2516 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2517 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2518 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2519 if (rdev->flags & RADEON_IS_IGP) {
2520 WREG32(FUS_MC_VM_MD_L1_TLB0_CNTL, tmp);
2521 WREG32(FUS_MC_VM_MD_L1_TLB1_CNTL, tmp);
2522 WREG32(FUS_MC_VM_MD_L1_TLB2_CNTL, tmp);
2523 } else {
2524 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2525 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2526 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2527 if ((rdev->family == CHIP_JUNIPER) ||
2528 (rdev->family == CHIP_CYPRESS) ||
2529 (rdev->family == CHIP_HEMLOCK) ||
2530 (rdev->family == CHIP_BARTS))
2531 WREG32(MC_VM_MD_L1_TLB3_CNTL, tmp);
2532 }
2533 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2534 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2535 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2536 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2537 WREG32(VM_CONTEXT0_PAGE_TABLE_START_ADDR, rdev->mc.gtt_start >> 12);
2538 WREG32(VM_CONTEXT0_PAGE_TABLE_END_ADDR, rdev->mc.gtt_end >> 12);
2539 WREG32(VM_CONTEXT0_PAGE_TABLE_BASE_ADDR, rdev->gart.table_addr >> 12);
2540 WREG32(VM_CONTEXT0_CNTL, ENABLE_CONTEXT | PAGE_TABLE_DEPTH(0) |
2541 RANGE_PROTECTION_FAULT_ENABLE_DEFAULT);
2542 WREG32(VM_CONTEXT0_PROTECTION_FAULT_DEFAULT_ADDR,
2543 (u32)(rdev->dummy_page.addr >> 12));
2544 WREG32(VM_CONTEXT1_CNTL, 0);
2545
2546 evergreen_pcie_gart_tlb_flush(rdev);
2547 DRM_INFO("PCIE GART of %uM enabled (table at 0x%016llX).\n",
2548 (unsigned)(rdev->mc.gtt_size >> 20),
2549 (unsigned long long)rdev->gart.table_addr);
2550 rdev->gart.ready = true;
2551 return 0;
2552 }
2553
2554 static void evergreen_pcie_gart_disable(struct radeon_device *rdev)
2555 {
2556 u32 tmp;
2557
2558 /* Disable all tables */
2559 WREG32(VM_CONTEXT0_CNTL, 0);
2560 WREG32(VM_CONTEXT1_CNTL, 0);
2561
2562 /* Setup L2 cache */
2563 WREG32(VM_L2_CNTL, ENABLE_L2_FRAGMENT_PROCESSING |
2564 EFFECTIVE_L2_QUEUE_SIZE(7));
2565 WREG32(VM_L2_CNTL2, 0);
2566 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2567 /* Setup TLB control */
2568 tmp = EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2569 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2570 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2571 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2572 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2573 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2574 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2575 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2576 radeon_gart_table_vram_unpin(rdev);
2577 }
2578
2579 static void evergreen_pcie_gart_fini(struct radeon_device *rdev)
2580 {
2581 evergreen_pcie_gart_disable(rdev);
2582 radeon_gart_table_vram_free(rdev);
2583 radeon_gart_fini(rdev);
2584 }
2585
2586
2587 static void evergreen_agp_enable(struct radeon_device *rdev)
2588 {
2589 u32 tmp;
2590
2591 /* Setup L2 cache */
2592 WREG32(VM_L2_CNTL, ENABLE_L2_CACHE | ENABLE_L2_FRAGMENT_PROCESSING |
2593 ENABLE_L2_PTE_CACHE_LRU_UPDATE_BY_WRITE |
2594 EFFECTIVE_L2_QUEUE_SIZE(7));
2595 WREG32(VM_L2_CNTL2, 0);
2596 WREG32(VM_L2_CNTL3, BANK_SELECT(0) | CACHE_UPDATE_MODE(2));
2597 /* Setup TLB control */
2598 tmp = ENABLE_L1_TLB | ENABLE_L1_FRAGMENT_PROCESSING |
2599 SYSTEM_ACCESS_MODE_NOT_IN_SYS |
2600 SYSTEM_APERTURE_UNMAPPED_ACCESS_PASS_THRU |
2601 EFFECTIVE_L1_TLB_SIZE(5) | EFFECTIVE_L1_QUEUE_SIZE(5);
2602 WREG32(MC_VM_MD_L1_TLB0_CNTL, tmp);
2603 WREG32(MC_VM_MD_L1_TLB1_CNTL, tmp);
2604 WREG32(MC_VM_MD_L1_TLB2_CNTL, tmp);
2605 WREG32(MC_VM_MB_L1_TLB0_CNTL, tmp);
2606 WREG32(MC_VM_MB_L1_TLB1_CNTL, tmp);
2607 WREG32(MC_VM_MB_L1_TLB2_CNTL, tmp);
2608 WREG32(MC_VM_MB_L1_TLB3_CNTL, tmp);
2609 WREG32(VM_CONTEXT0_CNTL, 0);
2610 WREG32(VM_CONTEXT1_CNTL, 0);
2611 }
2612
2613 static const unsigned ni_dig_offsets[] =
2614 {
2615 NI_DIG0_REGISTER_OFFSET,
2616 NI_DIG1_REGISTER_OFFSET,
2617 NI_DIG2_REGISTER_OFFSET,
2618 NI_DIG3_REGISTER_OFFSET,
2619 NI_DIG4_REGISTER_OFFSET,
2620 NI_DIG5_REGISTER_OFFSET
2621 };
2622
2623 static const unsigned ni_tx_offsets[] =
2624 {
2625 NI_DCIO_UNIPHY0_UNIPHY_TX_CONTROL1,
2626 NI_DCIO_UNIPHY1_UNIPHY_TX_CONTROL1,
2627 NI_DCIO_UNIPHY2_UNIPHY_TX_CONTROL1,
2628 NI_DCIO_UNIPHY3_UNIPHY_TX_CONTROL1,
2629 NI_DCIO_UNIPHY4_UNIPHY_TX_CONTROL1,
2630 NI_DCIO_UNIPHY5_UNIPHY_TX_CONTROL1
2631 };
2632
2633 static const unsigned evergreen_dp_offsets[] =
2634 {
2635 EVERGREEN_DP0_REGISTER_OFFSET,
2636 EVERGREEN_DP1_REGISTER_OFFSET,
2637 EVERGREEN_DP2_REGISTER_OFFSET,
2638 EVERGREEN_DP3_REGISTER_OFFSET,
2639 EVERGREEN_DP4_REGISTER_OFFSET,
2640 EVERGREEN_DP5_REGISTER_OFFSET
2641 };
2642
2643
2644 /*
2645 * Assumption is that EVERGREEN_CRTC_MASTER_EN enable for requested crtc
2646 * We go from crtc to connector and it is not relible since it
2647 * should be an opposite direction .If crtc is enable then
2648 * find the dig_fe which selects this crtc and insure that it enable.
2649 * if such dig_fe is found then find dig_be which selects found dig_be and
2650 * insure that it enable and in DP_SST mode.
2651 * if UNIPHY_PLL_CONTROL1.enable then we should disconnect timing
2652 * from dp symbols clocks .
2653 */
2654 static bool evergreen_is_dp_sst_stream_enabled(struct radeon_device *rdev,
2655 unsigned crtc_id, unsigned *ret_dig_fe)
2656 {
2657 unsigned i;
2658 unsigned dig_fe;
2659 unsigned dig_be;
2660 unsigned dig_en_be;
2661 unsigned uniphy_pll;
2662 unsigned digs_fe_selected;
2663 unsigned dig_be_mode;
2664 unsigned dig_fe_mask;
2665 bool is_enabled = false;
2666 bool found_crtc = false;
2667
2668 /* loop through all running dig_fe to find selected crtc */
2669 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2670 dig_fe = RREG32(NI_DIG_FE_CNTL + ni_dig_offsets[i]);
2671 if (dig_fe & NI_DIG_FE_CNTL_SYMCLK_FE_ON &&
2672 crtc_id == NI_DIG_FE_CNTL_SOURCE_SELECT(dig_fe)) {
2673 /* found running pipe */
2674 found_crtc = true;
2675 dig_fe_mask = 1 << i;
2676 dig_fe = i;
2677 break;
2678 }
2679 }
2680
2681 if (found_crtc) {
2682 /* loop through all running dig_be to find selected dig_fe */
2683 for (i = 0; i < ARRAY_SIZE(ni_dig_offsets); i++) {
2684 dig_be = RREG32(NI_DIG_BE_CNTL + ni_dig_offsets[i]);
2685 /* if dig_fe_selected by dig_be? */
2686 digs_fe_selected = NI_DIG_BE_CNTL_FE_SOURCE_SELECT(dig_be);
2687 dig_be_mode = NI_DIG_FE_CNTL_MODE(dig_be);
2688 if (dig_fe_mask & digs_fe_selected &&
2689 /* if dig_be in sst mode? */
2690 dig_be_mode == NI_DIG_BE_DPSST) {
2691 dig_en_be = RREG32(NI_DIG_BE_EN_CNTL +
2692 ni_dig_offsets[i]);
2693 uniphy_pll = RREG32(NI_DCIO_UNIPHY0_PLL_CONTROL1 +
2694 ni_tx_offsets[i]);
2695 /* dig_be enable and tx is running */
2696 if (dig_en_be & NI_DIG_BE_EN_CNTL_ENABLE &&
2697 dig_en_be & NI_DIG_BE_EN_CNTL_SYMBCLK_ON &&
2698 uniphy_pll & NI_DCIO_UNIPHY0_PLL_CONTROL1_ENABLE) {
2699 is_enabled = true;
2700 *ret_dig_fe = dig_fe;
2701 break;
2702 }
2703 }
2704 }
2705 }
2706
2707 return is_enabled;
2708 }
2709
2710 /*
2711 * Blank dig when in dp sst mode
2712 * Dig ignores crtc timing
2713 */
2714 static void evergreen_blank_dp_output(struct radeon_device *rdev,
2715 unsigned dig_fe)
2716 {
2717 unsigned stream_ctrl;
2718 unsigned fifo_ctrl;
2719 unsigned counter = 0;
2720
2721 if (dig_fe >= ARRAY_SIZE(evergreen_dp_offsets)) {
2722 DRM_ERROR("invalid dig_fe %d\n", dig_fe);
2723 return;
2724 }
2725
2726 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2727 evergreen_dp_offsets[dig_fe]);
2728 if (!(stream_ctrl & EVERGREEN_DP_VID_STREAM_CNTL_ENABLE)) {
2729 DRM_ERROR("dig %d , should be enable\n", dig_fe);
2730 return;
2731 }
2732
2733 stream_ctrl &=~EVERGREEN_DP_VID_STREAM_CNTL_ENABLE;
2734 WREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2735 evergreen_dp_offsets[dig_fe], stream_ctrl);
2736
2737 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2738 evergreen_dp_offsets[dig_fe]);
2739 while (counter < 32 && stream_ctrl & EVERGREEN_DP_VID_STREAM_STATUS) {
2740 msleep(1);
2741 counter++;
2742 stream_ctrl = RREG32(EVERGREEN_DP_VID_STREAM_CNTL +
2743 evergreen_dp_offsets[dig_fe]);
2744 }
2745 if (counter >= 32 )
2746 DRM_ERROR("counter exceeds %d\n", counter);
2747
2748 fifo_ctrl = RREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe]);
2749 fifo_ctrl |= EVERGREEN_DP_STEER_FIFO_RESET;
2750 WREG32(EVERGREEN_DP_STEER_FIFO + evergreen_dp_offsets[dig_fe], fifo_ctrl);
2751
2752 }
2753
2754 void evergreen_mc_stop(struct radeon_device *rdev, struct evergreen_mc_save *save)
2755 {
2756 u32 crtc_enabled, tmp, frame_count, blackout;
2757 int i, j;
2758 unsigned dig_fe;
2759
2760 if (!ASIC_IS_NODCE(rdev)) {
2761 save->vga_render_control = RREG32(VGA_RENDER_CONTROL);
2762 save->vga_hdp_control = RREG32(VGA_HDP_CONTROL);
2763
2764 /* disable VGA render */
2765 WREG32(VGA_RENDER_CONTROL, 0);
2766 }
2767 /* blank the display controllers */
2768 for (i = 0; i < rdev->num_crtc; i++) {
2769 crtc_enabled = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN;
2770 if (crtc_enabled) {
2771 save->crtc_enabled[i] = true;
2772 if (ASIC_IS_DCE6(rdev)) {
2773 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2774 if (!(tmp & EVERGREEN_CRTC_BLANK_DATA_EN)) {
2775 radeon_wait_for_vblank(rdev, i);
2776 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2777 tmp |= EVERGREEN_CRTC_BLANK_DATA_EN;
2778 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2779 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2780 }
2781 } else {
2782 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2783 if (!(tmp & EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE)) {
2784 radeon_wait_for_vblank(rdev, i);
2785 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2786 tmp |= EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2787 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2788 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2789 }
2790 }
2791 /* wait for the next frame */
2792 frame_count = radeon_get_vblank_counter(rdev, i);
2793 for (j = 0; j < rdev->usec_timeout; j++) {
2794 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2795 break;
2796 udelay(1);
2797 }
2798 /*we should disable dig if it drives dp sst*/
2799 /*but we are in radeon_device_init and the topology is unknown*/
2800 /*and it is available after radeon_modeset_init*/
2801 /*the following method radeon_atom_encoder_dpms_dig*/
2802 /*does the job if we initialize it properly*/
2803 /*for now we do it this manually*/
2804 /**/
2805 if (ASIC_IS_DCE5(rdev) &&
2806 evergreen_is_dp_sst_stream_enabled(rdev, i ,&dig_fe))
2807 evergreen_blank_dp_output(rdev, dig_fe);
2808 /*we could remove 6 lines below*/
2809 /* XXX this is a hack to avoid strange behavior with EFI on certain systems */
2810 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2811 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2812 tmp &= ~EVERGREEN_CRTC_MASTER_EN;
2813 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2814 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2815 save->crtc_enabled[i] = false;
2816 /* ***** */
2817 } else {
2818 save->crtc_enabled[i] = false;
2819 }
2820 }
2821
2822 radeon_mc_wait_for_idle(rdev);
2823
2824 blackout = RREG32(MC_SHARED_BLACKOUT_CNTL);
2825 if ((blackout & BLACKOUT_MODE_MASK) != 1) {
2826 /* Block CPU access */
2827 WREG32(BIF_FB_EN, 0);
2828 /* blackout the MC */
2829 blackout &= ~BLACKOUT_MODE_MASK;
2830 WREG32(MC_SHARED_BLACKOUT_CNTL, blackout | 1);
2831 }
2832 /* wait for the MC to settle */
2833 udelay(100);
2834
2835 /* lock double buffered regs */
2836 for (i = 0; i < rdev->num_crtc; i++) {
2837 if (save->crtc_enabled[i]) {
2838 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2839 if (!(tmp & EVERGREEN_GRPH_UPDATE_LOCK)) {
2840 tmp |= EVERGREEN_GRPH_UPDATE_LOCK;
2841 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2842 }
2843 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2844 if (!(tmp & 1)) {
2845 tmp |= 1;
2846 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2847 }
2848 }
2849 }
2850 }
2851
2852 void evergreen_mc_resume(struct radeon_device *rdev, struct evergreen_mc_save *save)
2853 {
2854 u32 tmp, frame_count;
2855 int i, j;
2856
2857 /* update crtc base addresses */
2858 for (i = 0; i < rdev->num_crtc; i++) {
2859 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2860 upper_32_bits(rdev->mc.vram_start));
2861 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS_HIGH + crtc_offsets[i],
2862 upper_32_bits(rdev->mc.vram_start));
2863 WREG32(EVERGREEN_GRPH_PRIMARY_SURFACE_ADDRESS + crtc_offsets[i],
2864 (u32)rdev->mc.vram_start);
2865 WREG32(EVERGREEN_GRPH_SECONDARY_SURFACE_ADDRESS + crtc_offsets[i],
2866 (u32)rdev->mc.vram_start);
2867 }
2868
2869 if (!ASIC_IS_NODCE(rdev)) {
2870 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS_HIGH, upper_32_bits(rdev->mc.vram_start));
2871 WREG32(EVERGREEN_VGA_MEMORY_BASE_ADDRESS, (u32)rdev->mc.vram_start);
2872 }
2873
2874 /* unlock regs and wait for update */
2875 for (i = 0; i < rdev->num_crtc; i++) {
2876 if (save->crtc_enabled[i]) {
2877 tmp = RREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i]);
2878 if ((tmp & 0x7) != 0) {
2879 tmp &= ~0x7;
2880 WREG32(EVERGREEN_MASTER_UPDATE_MODE + crtc_offsets[i], tmp);
2881 }
2882 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2883 if (tmp & EVERGREEN_GRPH_UPDATE_LOCK) {
2884 tmp &= ~EVERGREEN_GRPH_UPDATE_LOCK;
2885 WREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i], tmp);
2886 }
2887 tmp = RREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i]);
2888 if (tmp & 1) {
2889 tmp &= ~1;
2890 WREG32(EVERGREEN_MASTER_UPDATE_LOCK + crtc_offsets[i], tmp);
2891 }
2892 for (j = 0; j < rdev->usec_timeout; j++) {
2893 tmp = RREG32(EVERGREEN_GRPH_UPDATE + crtc_offsets[i]);
2894 if ((tmp & EVERGREEN_GRPH_SURFACE_UPDATE_PENDING) == 0)
2895 break;
2896 udelay(1);
2897 }
2898 }
2899 }
2900
2901 /* unblackout the MC */
2902 tmp = RREG32(MC_SHARED_BLACKOUT_CNTL);
2903 tmp &= ~BLACKOUT_MODE_MASK;
2904 WREG32(MC_SHARED_BLACKOUT_CNTL, tmp);
2905 /* allow CPU access */
2906 WREG32(BIF_FB_EN, FB_READ_EN | FB_WRITE_EN);
2907
2908 for (i = 0; i < rdev->num_crtc; i++) {
2909 if (save->crtc_enabled[i]) {
2910 if (ASIC_IS_DCE6(rdev)) {
2911 tmp = RREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i]);
2912 tmp &= ~EVERGREEN_CRTC_BLANK_DATA_EN;
2913 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2914 WREG32(EVERGREEN_CRTC_BLANK_CONTROL + crtc_offsets[i], tmp);
2915 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2916 } else {
2917 tmp = RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]);
2918 tmp &= ~EVERGREEN_CRTC_DISP_READ_REQUEST_DISABLE;
2919 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 1);
2920 WREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i], tmp);
2921 WREG32(EVERGREEN_CRTC_UPDATE_LOCK + crtc_offsets[i], 0);
2922 }
2923 /* wait for the next frame */
2924 frame_count = radeon_get_vblank_counter(rdev, i);
2925 for (j = 0; j < rdev->usec_timeout; j++) {
2926 if (radeon_get_vblank_counter(rdev, i) != frame_count)
2927 break;
2928 udelay(1);
2929 }
2930 }
2931 }
2932 if (!ASIC_IS_NODCE(rdev)) {
2933 /* Unlock vga access */
2934 WREG32(VGA_HDP_CONTROL, save->vga_hdp_control);
2935 mdelay(1);
2936 WREG32(VGA_RENDER_CONTROL, save->vga_render_control);
2937 }
2938 }
2939
2940 void evergreen_mc_program(struct radeon_device *rdev)
2941 {
2942 struct evergreen_mc_save save;
2943 u32 tmp;
2944 int i, j;
2945
2946 /* Initialize HDP */
2947 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
2948 WREG32((0x2c14 + j), 0x00000000);
2949 WREG32((0x2c18 + j), 0x00000000);
2950 WREG32((0x2c1c + j), 0x00000000);
2951 WREG32((0x2c20 + j), 0x00000000);
2952 WREG32((0x2c24 + j), 0x00000000);
2953 }
2954 WREG32(HDP_REG_COHERENCY_FLUSH_CNTL, 0);
2955
2956 evergreen_mc_stop(rdev, &save);
2957 if (evergreen_mc_wait_for_idle(rdev)) {
2958 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
2959 }
2960 /* Lockout access through VGA aperture*/
2961 WREG32(VGA_HDP_CONTROL, VGA_MEMORY_DISABLE);
2962 /* Update configuration */
2963 if (rdev->flags & RADEON_IS_AGP) {
2964 if (rdev->mc.vram_start < rdev->mc.gtt_start) {
2965 /* VRAM before AGP */
2966 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2967 rdev->mc.vram_start >> 12);
2968 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2969 rdev->mc.gtt_end >> 12);
2970 } else {
2971 /* VRAM after AGP */
2972 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2973 rdev->mc.gtt_start >> 12);
2974 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2975 rdev->mc.vram_end >> 12);
2976 }
2977 } else {
2978 WREG32(MC_VM_SYSTEM_APERTURE_LOW_ADDR,
2979 rdev->mc.vram_start >> 12);
2980 WREG32(MC_VM_SYSTEM_APERTURE_HIGH_ADDR,
2981 rdev->mc.vram_end >> 12);
2982 }
2983 WREG32(MC_VM_SYSTEM_APERTURE_DEFAULT_ADDR, rdev->vram_scratch.gpu_addr >> 12);
2984 /* llano/ontario only */
2985 if ((rdev->family == CHIP_PALM) ||
2986 (rdev->family == CHIP_SUMO) ||
2987 (rdev->family == CHIP_SUMO2)) {
2988 tmp = RREG32(MC_FUS_VM_FB_OFFSET) & 0x000FFFFF;
2989 tmp |= ((rdev->mc.vram_end >> 20) & 0xF) << 24;
2990 tmp |= ((rdev->mc.vram_start >> 20) & 0xF) << 20;
2991 WREG32(MC_FUS_VM_FB_OFFSET, tmp);
2992 }
2993 tmp = ((rdev->mc.vram_end >> 24) & 0xFFFF) << 16;
2994 tmp |= ((rdev->mc.vram_start >> 24) & 0xFFFF);
2995 WREG32(MC_VM_FB_LOCATION, tmp);
2996 WREG32(HDP_NONSURFACE_BASE, (rdev->mc.vram_start >> 8));
2997 WREG32(HDP_NONSURFACE_INFO, (2 << 7) | (1 << 30));
2998 WREG32(HDP_NONSURFACE_SIZE, 0x3FFFFFFF);
2999 if (rdev->flags & RADEON_IS_AGP) {
3000 WREG32(MC_VM_AGP_TOP, rdev->mc.gtt_end >> 16);
3001 WREG32(MC_VM_AGP_BOT, rdev->mc.gtt_start >> 16);
3002 WREG32(MC_VM_AGP_BASE, rdev->mc.agp_base >> 22);
3003 } else {
3004 WREG32(MC_VM_AGP_BASE, 0);
3005 WREG32(MC_VM_AGP_TOP, 0x0FFFFFFF);
3006 WREG32(MC_VM_AGP_BOT, 0x0FFFFFFF);
3007 }
3008 if (evergreen_mc_wait_for_idle(rdev)) {
3009 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
3010 }
3011 evergreen_mc_resume(rdev, &save);
3012 /* we need to own VRAM, so turn off the VGA renderer here
3013 * to stop it overwriting our objects */
3014 rv515_vga_render_disable(rdev);
3015 }
3016
3017 /*
3018 * CP.
3019 */
3020 void evergreen_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib)
3021 {
3022 struct radeon_ring *ring = &rdev->ring[ib->ring];
3023 u32 next_rptr;
3024
3025 /* set to DX10/11 mode */
3026 radeon_ring_write(ring, PACKET3(PACKET3_MODE_CONTROL, 0));
3027 radeon_ring_write(ring, 1);
3028
3029 if (ring->rptr_save_reg) {
3030 next_rptr = ring->wptr + 3 + 4;
3031 radeon_ring_write(ring, PACKET3(PACKET3_SET_CONFIG_REG, 1));
3032 radeon_ring_write(ring, ((ring->rptr_save_reg -
3033 PACKET3_SET_CONFIG_REG_START) >> 2));
3034 radeon_ring_write(ring, next_rptr);
3035 } else if (rdev->wb.enabled) {
3036 next_rptr = ring->wptr + 5 + 4;
3037 radeon_ring_write(ring, PACKET3(PACKET3_MEM_WRITE, 3));
3038 radeon_ring_write(ring, ring->next_rptr_gpu_addr & 0xfffffffc);
3039 radeon_ring_write(ring, (upper_32_bits(ring->next_rptr_gpu_addr) & 0xff) | (1 << 18));
3040 radeon_ring_write(ring, next_rptr);
3041 radeon_ring_write(ring, 0);
3042 }
3043
3044 radeon_ring_write(ring, PACKET3(PACKET3_INDIRECT_BUFFER, 2));
3045 radeon_ring_write(ring,
3046 #ifdef __BIG_ENDIAN
3047 (2 << 0) |
3048 #endif
3049 (ib->gpu_addr & 0xFFFFFFFC));
3050 radeon_ring_write(ring, upper_32_bits(ib->gpu_addr) & 0xFF);
3051 radeon_ring_write(ring, ib->length_dw);
3052 }
3053
3054
3055 static int evergreen_cp_load_microcode(struct radeon_device *rdev)
3056 {
3057 const __be32 *fw_data;
3058 int i;
3059
3060 if (!rdev->me_fw || !rdev->pfp_fw)
3061 return -EINVAL;
3062
3063 r700_cp_stop(rdev);
3064 WREG32(CP_RB_CNTL,
3065 #ifdef __BIG_ENDIAN
3066 BUF_SWAP_32BIT |
3067 #endif
3068 RB_NO_UPDATE | RB_BLKSZ(15) | RB_BUFSZ(3));
3069
3070 fw_data = (const __be32 *)rdev->pfp_fw->data;
3071 WREG32(CP_PFP_UCODE_ADDR, 0);
3072 for (i = 0; i < EVERGREEN_PFP_UCODE_SIZE; i++)
3073 WREG32(CP_PFP_UCODE_DATA, be32_to_cpup(fw_data++));
3074 WREG32(CP_PFP_UCODE_ADDR, 0);
3075
3076 fw_data = (const __be32 *)rdev->me_fw->data;
3077 WREG32(CP_ME_RAM_WADDR, 0);
3078 for (i = 0; i < EVERGREEN_PM4_UCODE_SIZE; i++)
3079 WREG32(CP_ME_RAM_DATA, be32_to_cpup(fw_data++));
3080
3081 WREG32(CP_PFP_UCODE_ADDR, 0);
3082 WREG32(CP_ME_RAM_WADDR, 0);
3083 WREG32(CP_ME_RAM_RADDR, 0);
3084 return 0;
3085 }
3086
3087 static int evergreen_cp_start(struct radeon_device *rdev)
3088 {
3089 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3090 int r, i;
3091 uint32_t cp_me;
3092
3093 r = radeon_ring_lock(rdev, ring, 7);
3094 if (r) {
3095 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3096 return r;
3097 }
3098 radeon_ring_write(ring, PACKET3(PACKET3_ME_INITIALIZE, 5));
3099 radeon_ring_write(ring, 0x1);
3100 radeon_ring_write(ring, 0x0);
3101 radeon_ring_write(ring, rdev->config.evergreen.max_hw_contexts - 1);
3102 radeon_ring_write(ring, PACKET3_ME_INITIALIZE_DEVICE_ID(1));
3103 radeon_ring_write(ring, 0);
3104 radeon_ring_write(ring, 0);
3105 radeon_ring_unlock_commit(rdev, ring, false);
3106
3107 cp_me = 0xff;
3108 WREG32(CP_ME_CNTL, cp_me);
3109
3110 r = radeon_ring_lock(rdev, ring, evergreen_default_size + 19);
3111 if (r) {
3112 DRM_ERROR("radeon: cp failed to lock ring (%d).\n", r);
3113 return r;
3114 }
3115
3116 /* setup clear context state */
3117 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3118 radeon_ring_write(ring, PACKET3_PREAMBLE_BEGIN_CLEAR_STATE);
3119
3120 for (i = 0; i < evergreen_default_size; i++)
3121 radeon_ring_write(ring, evergreen_default_state[i]);
3122
3123 radeon_ring_write(ring, PACKET3(PACKET3_PREAMBLE_CNTL, 0));
3124 radeon_ring_write(ring, PACKET3_PREAMBLE_END_CLEAR_STATE);
3125
3126 /* set clear context state */
3127 radeon_ring_write(ring, PACKET3(PACKET3_CLEAR_STATE, 0));
3128 radeon_ring_write(ring, 0);
3129
3130 /* SQ_VTX_BASE_VTX_LOC */
3131 radeon_ring_write(ring, 0xc0026f00);
3132 radeon_ring_write(ring, 0x00000000);
3133 radeon_ring_write(ring, 0x00000000);
3134 radeon_ring_write(ring, 0x00000000);
3135
3136 /* Clear consts */
3137 radeon_ring_write(ring, 0xc0036f00);
3138 radeon_ring_write(ring, 0x00000bc4);
3139 radeon_ring_write(ring, 0xffffffff);
3140 radeon_ring_write(ring, 0xffffffff);
3141 radeon_ring_write(ring, 0xffffffff);
3142
3143 radeon_ring_write(ring, 0xc0026900);
3144 radeon_ring_write(ring, 0x00000316);
3145 radeon_ring_write(ring, 0x0000000e); /* VGT_VERTEX_REUSE_BLOCK_CNTL */
3146 radeon_ring_write(ring, 0x00000010); /* */
3147
3148 radeon_ring_unlock_commit(rdev, ring, false);
3149
3150 return 0;
3151 }
3152
3153 static int evergreen_cp_resume(struct radeon_device *rdev)
3154 {
3155 struct radeon_ring *ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
3156 u32 tmp;
3157 u32 rb_bufsz;
3158 int r;
3159
3160 /* Reset cp; if cp is reset, then PA, SH, VGT also need to be reset */
3161 WREG32(GRBM_SOFT_RESET, (SOFT_RESET_CP |
3162 SOFT_RESET_PA |
3163 SOFT_RESET_SH |
3164 SOFT_RESET_VGT |
3165 SOFT_RESET_SPI |
3166 SOFT_RESET_SX));
3167 RREG32(GRBM_SOFT_RESET);
3168 mdelay(15);
3169 WREG32(GRBM_SOFT_RESET, 0);
3170 RREG32(GRBM_SOFT_RESET);
3171
3172 /* Set ring buffer size */
3173 rb_bufsz = order_base_2(ring->ring_size / 8);
3174 tmp = (order_base_2(RADEON_GPU_PAGE_SIZE/8) << 8) | rb_bufsz;
3175 #ifdef __BIG_ENDIAN
3176 tmp |= BUF_SWAP_32BIT;
3177 #endif
3178 WREG32(CP_RB_CNTL, tmp);
3179 WREG32(CP_SEM_WAIT_TIMER, 0x0);
3180 WREG32(CP_SEM_INCOMPLETE_TIMER_CNTL, 0x0);
3181
3182 /* Set the write pointer delay */
3183 WREG32(CP_RB_WPTR_DELAY, 0);
3184
3185 /* Initialize the ring buffer's read and write pointers */
3186 WREG32(CP_RB_CNTL, tmp | RB_RPTR_WR_ENA);
3187 WREG32(CP_RB_RPTR_WR, 0);
3188 ring->wptr = 0;
3189 WREG32(CP_RB_WPTR, ring->wptr);
3190
3191 /* set the wb address whether it's enabled or not */
3192 WREG32(CP_RB_RPTR_ADDR,
3193 ((rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFFFFFFFC));
3194 WREG32(CP_RB_RPTR_ADDR_HI, upper_32_bits(rdev->wb.gpu_addr + RADEON_WB_CP_RPTR_OFFSET) & 0xFF);
3195 WREG32(SCRATCH_ADDR, ((rdev->wb.gpu_addr + RADEON_WB_SCRATCH_OFFSET) >> 8) & 0xFFFFFFFF);
3196
3197 if (rdev->wb.enabled)
3198 WREG32(SCRATCH_UMSK, 0xff);
3199 else {
3200 tmp |= RB_NO_UPDATE;
3201 WREG32(SCRATCH_UMSK, 0);
3202 }
3203
3204 mdelay(1);
3205 WREG32(CP_RB_CNTL, tmp);
3206
3207 WREG32(CP_RB_BASE, ring->gpu_addr >> 8);
3208 WREG32(CP_DEBUG, (1 << 27) | (1 << 28));
3209
3210 evergreen_cp_start(rdev);
3211 ring->ready = true;
3212 r = radeon_ring_test(rdev, RADEON_RING_TYPE_GFX_INDEX, ring);
3213 if (r) {
3214 ring->ready = false;
3215 return r;
3216 }
3217 return 0;
3218 }
3219
3220 /*
3221 * Core functions
3222 */
3223 static void evergreen_gpu_init(struct radeon_device *rdev)
3224 {
3225 u32 gb_addr_config;
3226 u32 mc_shared_chmap, mc_arb_ramcfg;
3227 u32 sx_debug_1;
3228 u32 smx_dc_ctl0;
3229 u32 sq_config;
3230 u32 sq_lds_resource_mgmt;
3231 u32 sq_gpr_resource_mgmt_1;
3232 u32 sq_gpr_resource_mgmt_2;
3233 u32 sq_gpr_resource_mgmt_3;
3234 u32 sq_thread_resource_mgmt;
3235 u32 sq_thread_resource_mgmt_2;
3236 u32 sq_stack_resource_mgmt_1;
3237 u32 sq_stack_resource_mgmt_2;
3238 u32 sq_stack_resource_mgmt_3;
3239 u32 vgt_cache_invalidation;
3240 u32 hdp_host_path_cntl, tmp;
3241 u32 disabled_rb_mask;
3242 int i, j, ps_thread_count;
3243
3244 switch (rdev->family) {
3245 case CHIP_CYPRESS:
3246 case CHIP_HEMLOCK:
3247 rdev->config.evergreen.num_ses = 2;
3248 rdev->config.evergreen.max_pipes = 4;
3249 rdev->config.evergreen.max_tile_pipes = 8;
3250 rdev->config.evergreen.max_simds = 10;
3251 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3252 rdev->config.evergreen.max_gprs = 256;
3253 rdev->config.evergreen.max_threads = 248;
3254 rdev->config.evergreen.max_gs_threads = 32;
3255 rdev->config.evergreen.max_stack_entries = 512;
3256 rdev->config.evergreen.sx_num_of_sets = 4;
3257 rdev->config.evergreen.sx_max_export_size = 256;
3258 rdev->config.evergreen.sx_max_export_pos_size = 64;
3259 rdev->config.evergreen.sx_max_export_smx_size = 192;
3260 rdev->config.evergreen.max_hw_contexts = 8;
3261 rdev->config.evergreen.sq_num_cf_insts = 2;
3262
3263 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3264 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3265 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3266 gb_addr_config = CYPRESS_GB_ADDR_CONFIG_GOLDEN;
3267 break;
3268 case CHIP_JUNIPER:
3269 rdev->config.evergreen.num_ses = 1;
3270 rdev->config.evergreen.max_pipes = 4;
3271 rdev->config.evergreen.max_tile_pipes = 4;
3272 rdev->config.evergreen.max_simds = 10;
3273 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3274 rdev->config.evergreen.max_gprs = 256;
3275 rdev->config.evergreen.max_threads = 248;
3276 rdev->config.evergreen.max_gs_threads = 32;
3277 rdev->config.evergreen.max_stack_entries = 512;
3278 rdev->config.evergreen.sx_num_of_sets = 4;
3279 rdev->config.evergreen.sx_max_export_size = 256;
3280 rdev->config.evergreen.sx_max_export_pos_size = 64;
3281 rdev->config.evergreen.sx_max_export_smx_size = 192;
3282 rdev->config.evergreen.max_hw_contexts = 8;
3283 rdev->config.evergreen.sq_num_cf_insts = 2;
3284
3285 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3286 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3287 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3288 gb_addr_config = JUNIPER_GB_ADDR_CONFIG_GOLDEN;
3289 break;
3290 case CHIP_REDWOOD:
3291 rdev->config.evergreen.num_ses = 1;
3292 rdev->config.evergreen.max_pipes = 4;
3293 rdev->config.evergreen.max_tile_pipes = 4;
3294 rdev->config.evergreen.max_simds = 5;
3295 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3296 rdev->config.evergreen.max_gprs = 256;
3297 rdev->config.evergreen.max_threads = 248;
3298 rdev->config.evergreen.max_gs_threads = 32;
3299 rdev->config.evergreen.max_stack_entries = 256;
3300 rdev->config.evergreen.sx_num_of_sets = 4;
3301 rdev->config.evergreen.sx_max_export_size = 256;
3302 rdev->config.evergreen.sx_max_export_pos_size = 64;
3303 rdev->config.evergreen.sx_max_export_smx_size = 192;
3304 rdev->config.evergreen.max_hw_contexts = 8;
3305 rdev->config.evergreen.sq_num_cf_insts = 2;
3306
3307 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3308 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3309 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3310 gb_addr_config = REDWOOD_GB_ADDR_CONFIG_GOLDEN;
3311 break;
3312 case CHIP_CEDAR:
3313 default:
3314 rdev->config.evergreen.num_ses = 1;
3315 rdev->config.evergreen.max_pipes = 2;
3316 rdev->config.evergreen.max_tile_pipes = 2;
3317 rdev->config.evergreen.max_simds = 2;
3318 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3319 rdev->config.evergreen.max_gprs = 256;
3320 rdev->config.evergreen.max_threads = 192;
3321 rdev->config.evergreen.max_gs_threads = 16;
3322 rdev->config.evergreen.max_stack_entries = 256;
3323 rdev->config.evergreen.sx_num_of_sets = 4;
3324 rdev->config.evergreen.sx_max_export_size = 128;
3325 rdev->config.evergreen.sx_max_export_pos_size = 32;
3326 rdev->config.evergreen.sx_max_export_smx_size = 96;
3327 rdev->config.evergreen.max_hw_contexts = 4;
3328 rdev->config.evergreen.sq_num_cf_insts = 1;
3329
3330 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3331 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3332 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3333 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3334 break;
3335 case CHIP_PALM:
3336 rdev->config.evergreen.num_ses = 1;
3337 rdev->config.evergreen.max_pipes = 2;
3338 rdev->config.evergreen.max_tile_pipes = 2;
3339 rdev->config.evergreen.max_simds = 2;
3340 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3341 rdev->config.evergreen.max_gprs = 256;
3342 rdev->config.evergreen.max_threads = 192;
3343 rdev->config.evergreen.max_gs_threads = 16;
3344 rdev->config.evergreen.max_stack_entries = 256;
3345 rdev->config.evergreen.sx_num_of_sets = 4;
3346 rdev->config.evergreen.sx_max_export_size = 128;
3347 rdev->config.evergreen.sx_max_export_pos_size = 32;
3348 rdev->config.evergreen.sx_max_export_smx_size = 96;
3349 rdev->config.evergreen.max_hw_contexts = 4;
3350 rdev->config.evergreen.sq_num_cf_insts = 1;
3351
3352 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3353 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3354 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3355 gb_addr_config = CEDAR_GB_ADDR_CONFIG_GOLDEN;
3356 break;
3357 case CHIP_SUMO:
3358 rdev->config.evergreen.num_ses = 1;
3359 rdev->config.evergreen.max_pipes = 4;
3360 rdev->config.evergreen.max_tile_pipes = 4;
3361 if (rdev->pdev->device == 0x9648)
3362 rdev->config.evergreen.max_simds = 3;
3363 else if ((rdev->pdev->device == 0x9647) ||
3364 (rdev->pdev->device == 0x964a))
3365 rdev->config.evergreen.max_simds = 4;
3366 else
3367 rdev->config.evergreen.max_simds = 5;
3368 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3369 rdev->config.evergreen.max_gprs = 256;
3370 rdev->config.evergreen.max_threads = 248;
3371 rdev->config.evergreen.max_gs_threads = 32;
3372 rdev->config.evergreen.max_stack_entries = 256;
3373 rdev->config.evergreen.sx_num_of_sets = 4;
3374 rdev->config.evergreen.sx_max_export_size = 256;
3375 rdev->config.evergreen.sx_max_export_pos_size = 64;
3376 rdev->config.evergreen.sx_max_export_smx_size = 192;
3377 rdev->config.evergreen.max_hw_contexts = 8;
3378 rdev->config.evergreen.sq_num_cf_insts = 2;
3379
3380 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3381 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3382 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3383 gb_addr_config = SUMO_GB_ADDR_CONFIG_GOLDEN;
3384 break;
3385 case CHIP_SUMO2:
3386 rdev->config.evergreen.num_ses = 1;
3387 rdev->config.evergreen.max_pipes = 4;
3388 rdev->config.evergreen.max_tile_pipes = 4;
3389 rdev->config.evergreen.max_simds = 2;
3390 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3391 rdev->config.evergreen.max_gprs = 256;
3392 rdev->config.evergreen.max_threads = 248;
3393 rdev->config.evergreen.max_gs_threads = 32;
3394 rdev->config.evergreen.max_stack_entries = 512;
3395 rdev->config.evergreen.sx_num_of_sets = 4;
3396 rdev->config.evergreen.sx_max_export_size = 256;
3397 rdev->config.evergreen.sx_max_export_pos_size = 64;
3398 rdev->config.evergreen.sx_max_export_smx_size = 192;
3399 rdev->config.evergreen.max_hw_contexts = 4;
3400 rdev->config.evergreen.sq_num_cf_insts = 2;
3401
3402 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3403 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3404 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3405 gb_addr_config = SUMO2_GB_ADDR_CONFIG_GOLDEN;
3406 break;
3407 case CHIP_BARTS:
3408 rdev->config.evergreen.num_ses = 2;
3409 rdev->config.evergreen.max_pipes = 4;
3410 rdev->config.evergreen.max_tile_pipes = 8;
3411 rdev->config.evergreen.max_simds = 7;
3412 rdev->config.evergreen.max_backends = 4 * rdev->config.evergreen.num_ses;
3413 rdev->config.evergreen.max_gprs = 256;
3414 rdev->config.evergreen.max_threads = 248;
3415 rdev->config.evergreen.max_gs_threads = 32;
3416 rdev->config.evergreen.max_stack_entries = 512;
3417 rdev->config.evergreen.sx_num_of_sets = 4;
3418 rdev->config.evergreen.sx_max_export_size = 256;
3419 rdev->config.evergreen.sx_max_export_pos_size = 64;
3420 rdev->config.evergreen.sx_max_export_smx_size = 192;
3421 rdev->config.evergreen.max_hw_contexts = 8;
3422 rdev->config.evergreen.sq_num_cf_insts = 2;
3423
3424 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3425 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3426 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3427 gb_addr_config = BARTS_GB_ADDR_CONFIG_GOLDEN;
3428 break;
3429 case CHIP_TURKS:
3430 rdev->config.evergreen.num_ses = 1;
3431 rdev->config.evergreen.max_pipes = 4;
3432 rdev->config.evergreen.max_tile_pipes = 4;
3433 rdev->config.evergreen.max_simds = 6;
3434 rdev->config.evergreen.max_backends = 2 * rdev->config.evergreen.num_ses;
3435 rdev->config.evergreen.max_gprs = 256;
3436 rdev->config.evergreen.max_threads = 248;
3437 rdev->config.evergreen.max_gs_threads = 32;
3438 rdev->config.evergreen.max_stack_entries = 256;
3439 rdev->config.evergreen.sx_num_of_sets = 4;
3440 rdev->config.evergreen.sx_max_export_size = 256;
3441 rdev->config.evergreen.sx_max_export_pos_size = 64;
3442 rdev->config.evergreen.sx_max_export_smx_size = 192;
3443 rdev->config.evergreen.max_hw_contexts = 8;
3444 rdev->config.evergreen.sq_num_cf_insts = 2;
3445
3446 rdev->config.evergreen.sc_prim_fifo_size = 0x100;
3447 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3448 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3449 gb_addr_config = TURKS_GB_ADDR_CONFIG_GOLDEN;
3450 break;
3451 case CHIP_CAICOS:
3452 rdev->config.evergreen.num_ses = 1;
3453 rdev->config.evergreen.max_pipes = 2;
3454 rdev->config.evergreen.max_tile_pipes = 2;
3455 rdev->config.evergreen.max_simds = 2;
3456 rdev->config.evergreen.max_backends = 1 * rdev->config.evergreen.num_ses;
3457 rdev->config.evergreen.max_gprs = 256;
3458 rdev->config.evergreen.max_threads = 192;
3459 rdev->config.evergreen.max_gs_threads = 16;
3460 rdev->config.evergreen.max_stack_entries = 256;
3461 rdev->config.evergreen.sx_num_of_sets = 4;
3462 rdev->config.evergreen.sx_max_export_size = 128;
3463 rdev->config.evergreen.sx_max_export_pos_size = 32;
3464 rdev->config.evergreen.sx_max_export_smx_size = 96;
3465 rdev->config.evergreen.max_hw_contexts = 4;
3466 rdev->config.evergreen.sq_num_cf_insts = 1;
3467
3468 rdev->config.evergreen.sc_prim_fifo_size = 0x40;
3469 rdev->config.evergreen.sc_hiz_tile_fifo_size = 0x30;
3470 rdev->config.evergreen.sc_earlyz_tile_fifo_size = 0x130;
3471 gb_addr_config = CAICOS_GB_ADDR_CONFIG_GOLDEN;
3472 break;
3473 }
3474
3475 /* Initialize HDP */
3476 for (i = 0, j = 0; i < 32; i++, j += 0x18) {
3477 WREG32((0x2c14 + j), 0x00000000);
3478 WREG32((0x2c18 + j), 0x00000000);
3479 WREG32((0x2c1c + j), 0x00000000);
3480 WREG32((0x2c20 + j), 0x00000000);
3481 WREG32((0x2c24 + j), 0x00000000);
3482 }
3483
3484 WREG32(GRBM_CNTL, GRBM_READ_TIMEOUT(0xff));
3485 WREG32(SRBM_INT_CNTL, 0x1);
3486 WREG32(SRBM_INT_ACK, 0x1);
3487
3488 evergreen_fix_pci_max_read_req_size(rdev);
3489
3490 mc_shared_chmap = RREG32(MC_SHARED_CHMAP);
3491 if ((rdev->family == CHIP_PALM) ||
3492 (rdev->family == CHIP_SUMO) ||
3493 (rdev->family == CHIP_SUMO2))
3494 mc_arb_ramcfg = RREG32(FUS_MC_ARB_RAMCFG);
3495 else
3496 mc_arb_ramcfg = RREG32(MC_ARB_RAMCFG);
3497
3498 /* setup tiling info dword. gb_addr_config is not adequate since it does
3499 * not have bank info, so create a custom tiling dword.
3500 * bits 3:0 num_pipes
3501 * bits 7:4 num_banks
3502 * bits 11:8 group_size
3503 * bits 15:12 row_size
3504 */
3505 rdev->config.evergreen.tile_config = 0;
3506 switch (rdev->config.evergreen.max_tile_pipes) {
3507 case 1:
3508 default:
3509 rdev->config.evergreen.tile_config |= (0 << 0);
3510 break;
3511 case 2:
3512 rdev->config.evergreen.tile_config |= (1 << 0);
3513 break;
3514 case 4:
3515 rdev->config.evergreen.tile_config |= (2 << 0);
3516 break;
3517 case 8:
3518 rdev->config.evergreen.tile_config |= (3 << 0);
3519 break;
3520 }
3521 /* num banks is 8 on all fusion asics. 0 = 4, 1 = 8, 2 = 16 */
3522 if (rdev->flags & RADEON_IS_IGP)
3523 rdev->config.evergreen.tile_config |= 1 << 4;
3524 else {
3525 switch ((mc_arb_ramcfg & NOOFBANK_MASK) >> NOOFBANK_SHIFT) {
3526 case 0: /* four banks */
3527 rdev->config.evergreen.tile_config |= 0 << 4;
3528 break;
3529 case 1: /* eight banks */
3530 rdev->config.evergreen.tile_config |= 1 << 4;
3531 break;
3532 case 2: /* sixteen banks */
3533 default:
3534 rdev->config.evergreen.tile_config |= 2 << 4;
3535 break;
3536 }
3537 }
3538 rdev->config.evergreen.tile_config |= 0 << 8;
3539 rdev->config.evergreen.tile_config |=
3540 ((gb_addr_config & 0x30000000) >> 28) << 12;
3541
3542 if ((rdev->family >= CHIP_CEDAR) && (rdev->family <= CHIP_HEMLOCK)) {
3543 u32 efuse_straps_4;
3544 u32 efuse_straps_3;
3545
3546 efuse_straps_4 = RREG32_RCU(0x204);
3547 efuse_straps_3 = RREG32_RCU(0x203);
3548 tmp = (((efuse_straps_4 & 0xf) << 4) |
3549 ((efuse_straps_3 & 0xf0000000) >> 28));
3550 } else {
3551 tmp = 0;
3552 for (i = (rdev->config.evergreen.num_ses - 1); i >= 0; i--) {
3553 u32 rb_disable_bitmap;
3554
3555 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3556 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3557 rb_disable_bitmap = (RREG32(CC_RB_BACKEND_DISABLE) & 0x00ff0000) >> 16;
3558 tmp <<= 4;
3559 tmp |= rb_disable_bitmap;
3560 }
3561 }
3562 /* enabled rb are just the one not disabled :) */
3563 disabled_rb_mask = tmp;
3564 tmp = 0;
3565 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3566 tmp |= (1 << i);
3567 /* if all the backends are disabled, fix it up here */
3568 if ((disabled_rb_mask & tmp) == tmp) {
3569 for (i = 0; i < rdev->config.evergreen.max_backends; i++)
3570 disabled_rb_mask &= ~(1 << i);
3571 }
3572
3573 for (i = 0; i < rdev->config.evergreen.num_ses; i++) {
3574 u32 simd_disable_bitmap;
3575
3576 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3577 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_INDEX(i));
3578 simd_disable_bitmap = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
3579 simd_disable_bitmap |= 0xffffffff << rdev->config.evergreen.max_simds;
3580 tmp <<= 16;
3581 tmp |= simd_disable_bitmap;
3582 }
3583 rdev->config.evergreen.active_simds = hweight32(~tmp);
3584
3585 WREG32(GRBM_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3586 WREG32(RLC_GFX_INDEX, INSTANCE_BROADCAST_WRITES | SE_BROADCAST_WRITES);
3587
3588 WREG32(GB_ADDR_CONFIG, gb_addr_config);
3589 WREG32(DMIF_ADDR_CONFIG, gb_addr_config);
3590 WREG32(HDP_ADDR_CONFIG, gb_addr_config);
3591 WREG32(DMA_TILING_CONFIG, gb_addr_config);
3592 WREG32(UVD_UDEC_ADDR_CONFIG, gb_addr_config);
3593 WREG32(UVD_UDEC_DB_ADDR_CONFIG, gb_addr_config);
3594 WREG32(UVD_UDEC_DBW_ADDR_CONFIG, gb_addr_config);
3595
3596 if ((rdev->config.evergreen.max_backends == 1) &&
3597 (rdev->flags & RADEON_IS_IGP)) {
3598 if ((disabled_rb_mask & 3) == 1) {
3599 /* RB0 disabled, RB1 enabled */
3600 tmp = 0x11111111;
3601 } else {
3602 /* RB1 disabled, RB0 enabled */
3603 tmp = 0x00000000;
3604 }
3605 } else {
3606 tmp = gb_addr_config & NUM_PIPES_MASK;
3607 tmp = r6xx_remap_render_backend(rdev, tmp, rdev->config.evergreen.max_backends,
3608 EVERGREEN_MAX_BACKENDS, disabled_rb_mask);
3609 }
3610 WREG32(GB_BACKEND_MAP, tmp);
3611
3612 WREG32(CGTS_SYS_TCC_DISABLE, 0);
3613 WREG32(CGTS_TCC_DISABLE, 0);
3614 WREG32(CGTS_USER_SYS_TCC_DISABLE, 0);
3615 WREG32(CGTS_USER_TCC_DISABLE, 0);
3616
3617 /* set HW defaults for 3D engine */
3618 WREG32(CP_QUEUE_THRESHOLDS, (ROQ_IB1_START(0x16) |
3619 ROQ_IB2_START(0x2b)));
3620
3621 WREG32(CP_MEQ_THRESHOLDS, STQ_SPLIT(0x30));
3622
3623 WREG32(TA_CNTL_AUX, (DISABLE_CUBE_ANISO |
3624 SYNC_GRADIENT |
3625 SYNC_WALKER |
3626 SYNC_ALIGNER));
3627
3628 sx_debug_1 = RREG32(SX_DEBUG_1);
3629 sx_debug_1 |= ENABLE_NEW_SMX_ADDRESS;
3630 WREG32(SX_DEBUG_1, sx_debug_1);
3631
3632
3633 smx_dc_ctl0 = RREG32(SMX_DC_CTL0);
3634 smx_dc_ctl0 &= ~NUMBER_OF_SETS(0x1ff);
3635 smx_dc_ctl0 |= NUMBER_OF_SETS(rdev->config.evergreen.sx_num_of_sets);
3636 WREG32(SMX_DC_CTL0, smx_dc_ctl0);
3637
3638 if (rdev->family <= CHIP_SUMO2)
3639 WREG32(SMX_SAR_CTL0, 0x00010000);
3640
3641 WREG32(SX_EXPORT_BUFFER_SIZES, (COLOR_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_size / 4) - 1) |
3642 POSITION_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_pos_size / 4) - 1) |
3643 SMX_BUFFER_SIZE((rdev->config.evergreen.sx_max_export_smx_size / 4) - 1)));
3644
3645 WREG32(PA_SC_FIFO_SIZE, (SC_PRIM_FIFO_SIZE(rdev->config.evergreen.sc_prim_fifo_size) |
3646 SC_HIZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_hiz_tile_fifo_size) |
3647 SC_EARLYZ_TILE_FIFO_SIZE(rdev->config.evergreen.sc_earlyz_tile_fifo_size)));
3648
3649 WREG32(VGT_NUM_INSTANCES, 1);
3650 WREG32(SPI_CONFIG_CNTL, 0);
3651 WREG32(SPI_CONFIG_CNTL_1, VTX_DONE_DELAY(4));
3652 WREG32(CP_PERFMON_CNTL, 0);
3653
3654 WREG32(SQ_MS_FIFO_SIZES, (CACHE_FIFO_SIZE(16 * rdev->config.evergreen.sq_num_cf_insts) |
3655 FETCH_FIFO_HIWATER(0x4) |
3656 DONE_FIFO_HIWATER(0xe0) |
3657 ALU_UPDATE_FIFO_HIWATER(0x8)));
3658
3659 sq_config = RREG32(SQ_CONFIG);
3660 sq_config &= ~(PS_PRIO(3) |
3661 VS_PRIO(3) |
3662 GS_PRIO(3) |
3663 ES_PRIO(3));
3664 sq_config |= (VC_ENABLE |
3665 EXPORT_SRC_C |
3666 PS_PRIO(0) |
3667 VS_PRIO(1) |
3668 GS_PRIO(2) |
3669 ES_PRIO(3));
3670
3671 switch (rdev->family) {
3672 case CHIP_CEDAR:
3673 case CHIP_PALM:
3674 case CHIP_SUMO:
3675 case CHIP_SUMO2:
3676 case CHIP_CAICOS:
3677 /* no vertex cache */
3678 sq_config &= ~VC_ENABLE;
3679 break;
3680 default:
3681 break;
3682 }
3683
3684 sq_lds_resource_mgmt = RREG32(SQ_LDS_RESOURCE_MGMT);
3685
3686 sq_gpr_resource_mgmt_1 = NUM_PS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2))* 12 / 32);
3687 sq_gpr_resource_mgmt_1 |= NUM_VS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 6 / 32);
3688 sq_gpr_resource_mgmt_1 |= NUM_CLAUSE_TEMP_GPRS(4);
3689 sq_gpr_resource_mgmt_2 = NUM_GS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3690 sq_gpr_resource_mgmt_2 |= NUM_ES_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 4 / 32);
3691 sq_gpr_resource_mgmt_3 = NUM_HS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3692 sq_gpr_resource_mgmt_3 |= NUM_LS_GPRS((rdev->config.evergreen.max_gprs - (4 * 2)) * 3 / 32);
3693
3694 switch (rdev->family) {
3695 case CHIP_CEDAR:
3696 case CHIP_PALM:
3697 case CHIP_SUMO:
3698 case CHIP_SUMO2:
3699 ps_thread_count = 96;
3700 break;
3701 default:
3702 ps_thread_count = 128;
3703 break;
3704 }
3705
3706 sq_thread_resource_mgmt = NUM_PS_THREADS(ps_thread_count);
3707 sq_thread_resource_mgmt |= NUM_VS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3708 sq_thread_resource_mgmt |= NUM_GS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3709 sq_thread_resource_mgmt |= NUM_ES_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3710 sq_thread_resource_mgmt_2 = NUM_HS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3711 sq_thread_resource_mgmt_2 |= NUM_LS_THREADS((((rdev->config.evergreen.max_threads - ps_thread_count) / 6) / 8) * 8);
3712
3713 sq_stack_resource_mgmt_1 = NUM_PS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3714 sq_stack_resource_mgmt_1 |= NUM_VS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3715 sq_stack_resource_mgmt_2 = NUM_GS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3716 sq_stack_resource_mgmt_2 |= NUM_ES_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3717 sq_stack_resource_mgmt_3 = NUM_HS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3718 sq_stack_resource_mgmt_3 |= NUM_LS_STACK_ENTRIES((rdev->config.evergreen.max_stack_entries * 1) / 6);
3719
3720 WREG32(SQ_CONFIG, sq_config);
3721 WREG32(SQ_GPR_RESOURCE_MGMT_1, sq_gpr_resource_mgmt_1);
3722 WREG32(SQ_GPR_RESOURCE_MGMT_2, sq_gpr_resource_mgmt_2);
3723 WREG32(SQ_GPR_RESOURCE_MGMT_3, sq_gpr_resource_mgmt_3);
3724 WREG32(SQ_THREAD_RESOURCE_MGMT, sq_thread_resource_mgmt);
3725 WREG32(SQ_THREAD_RESOURCE_MGMT_2, sq_thread_resource_mgmt_2);
3726 WREG32(SQ_STACK_RESOURCE_MGMT_1, sq_stack_resource_mgmt_1);
3727 WREG32(SQ_STACK_RESOURCE_MGMT_2, sq_stack_resource_mgmt_2);
3728 WREG32(SQ_STACK_RESOURCE_MGMT_3, sq_stack_resource_mgmt_3);
3729 WREG32(SQ_DYN_GPR_CNTL_PS_FLUSH_REQ, 0);
3730 WREG32(SQ_LDS_RESOURCE_MGMT, sq_lds_resource_mgmt);
3731
3732 WREG32(PA_SC_FORCE_EOV_MAX_CNTS, (FORCE_EOV_MAX_CLK_CNT(4095) |
3733 FORCE_EOV_MAX_REZ_CNT(255)));
3734
3735 switch (rdev->family) {
3736 case CHIP_CEDAR:
3737 case CHIP_PALM:
3738 case CHIP_SUMO:
3739 case CHIP_SUMO2:
3740 case CHIP_CAICOS:
3741 vgt_cache_invalidation = CACHE_INVALIDATION(TC_ONLY);
3742 break;
3743 default:
3744 vgt_cache_invalidation = CACHE_INVALIDATION(VC_AND_TC);
3745 break;
3746 }
3747 vgt_cache_invalidation |= AUTO_INVLD_EN(ES_AND_GS_AUTO);
3748 WREG32(VGT_CACHE_INVALIDATION, vgt_cache_invalidation);
3749
3750 WREG32(VGT_GS_VERTEX_REUSE, 16);
3751 WREG32(PA_SU_LINE_STIPPLE_VALUE, 0);
3752 WREG32(PA_SC_LINE_STIPPLE_STATE, 0);
3753
3754 WREG32(VGT_VERTEX_REUSE_BLOCK_CNTL, 14);
3755 WREG32(VGT_OUT_DEALLOC_CNTL, 16);
3756
3757 WREG32(CB_PERF_CTR0_SEL_0, 0);
3758 WREG32(CB_PERF_CTR0_SEL_1, 0);
3759 WREG32(CB_PERF_CTR1_SEL_0, 0);
3760 WREG32(CB_PERF_CTR1_SEL_1, 0);
3761 WREG32(CB_PERF_CTR2_SEL_0, 0);
3762 WREG32(CB_PERF_CTR2_SEL_1, 0);
3763 WREG32(CB_PERF_CTR3_SEL_0, 0);
3764 WREG32(CB_PERF_CTR3_SEL_1, 0);
3765
3766 /* clear render buffer base addresses */
3767 WREG32(CB_COLOR0_BASE, 0);
3768 WREG32(CB_COLOR1_BASE, 0);
3769 WREG32(CB_COLOR2_BASE, 0);
3770 WREG32(CB_COLOR3_BASE, 0);
3771 WREG32(CB_COLOR4_BASE, 0);
3772 WREG32(CB_COLOR5_BASE, 0);
3773 WREG32(CB_COLOR6_BASE, 0);
3774 WREG32(CB_COLOR7_BASE, 0);
3775 WREG32(CB_COLOR8_BASE, 0);
3776 WREG32(CB_COLOR9_BASE, 0);
3777 WREG32(CB_COLOR10_BASE, 0);
3778 WREG32(CB_COLOR11_BASE, 0);
3779
3780 /* set the shader const cache sizes to 0 */
3781 for (i = SQ_ALU_CONST_BUFFER_SIZE_PS_0; i < 0x28200; i += 4)
3782 WREG32(i, 0);
3783 for (i = SQ_ALU_CONST_BUFFER_SIZE_HS_0; i < 0x29000; i += 4)
3784 WREG32(i, 0);
3785
3786 tmp = RREG32(HDP_MISC_CNTL);
3787 tmp |= HDP_FLUSH_INVALIDATE_CACHE;
3788 WREG32(HDP_MISC_CNTL, tmp);
3789
3790 hdp_host_path_cntl = RREG32(HDP_HOST_PATH_CNTL);
3791 WREG32(HDP_HOST_PATH_CNTL, hdp_host_path_cntl);
3792
3793 WREG32(PA_CL_ENHANCE, CLIP_VTX_REORDER_ENA | NUM_CLIP_SEQ(3));
3794
3795 udelay(50);
3796
3797 }
3798
3799 int evergreen_mc_init(struct radeon_device *rdev)
3800 {
3801 u32 tmp;
3802 int chansize, numchan;
3803
3804 /* Get VRAM informations */
3805 rdev->mc.vram_is_ddr = true;
3806 if ((rdev->family == CHIP_PALM) ||
3807 (rdev->family == CHIP_SUMO) ||
3808 (rdev->family == CHIP_SUMO2))
3809 tmp = RREG32(FUS_MC_ARB_RAMCFG);
3810 else
3811 tmp = RREG32(MC_ARB_RAMCFG);
3812 if (tmp & CHANSIZE_OVERRIDE) {
3813 chansize = 16;
3814 } else if (tmp & CHANSIZE_MASK) {
3815 chansize = 64;
3816 } else {
3817 chansize = 32;
3818 }
3819 tmp = RREG32(MC_SHARED_CHMAP);
3820 switch ((tmp & NOOFCHAN_MASK) >> NOOFCHAN_SHIFT) {
3821 case 0:
3822 default:
3823 numchan = 1;
3824 break;
3825 case 1:
3826 numchan = 2;
3827 break;
3828 case 2:
3829 numchan = 4;
3830 break;
3831 case 3:
3832 numchan = 8;
3833 break;
3834 }
3835 rdev->mc.vram_width = numchan * chansize;
3836 /* Could aper size report 0 ? */
3837 rdev->mc.aper_base = pci_resource_start(rdev->pdev, 0);
3838 rdev->mc.aper_size = pci_resource_len(rdev->pdev, 0);
3839 /* Setup GPU memory space */
3840 if ((rdev->family == CHIP_PALM) ||
3841 (rdev->family == CHIP_SUMO) ||
3842 (rdev->family == CHIP_SUMO2)) {
3843 /* size in bytes on fusion */
3844 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE);
3845 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE);
3846 } else {
3847 /* size in MB on evergreen/cayman/tn */
3848 rdev->mc.mc_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3849 rdev->mc.real_vram_size = RREG32(CONFIG_MEMSIZE) * 1024ULL * 1024ULL;
3850 }
3851 rdev->mc.visible_vram_size = rdev->mc.aper_size;
3852 r700_vram_gtt_location(rdev, &rdev->mc);
3853 radeon_update_bandwidth_info(rdev);
3854
3855 return 0;
3856 }
3857
3858 void evergreen_print_gpu_status_regs(struct radeon_device *rdev)
3859 {
3860 dev_info(rdev->dev, " GRBM_STATUS = 0x%08X\n",
3861 RREG32(GRBM_STATUS));
3862 dev_info(rdev->dev, " GRBM_STATUS_SE0 = 0x%08X\n",
3863 RREG32(GRBM_STATUS_SE0));
3864 dev_info(rdev->dev, " GRBM_STATUS_SE1 = 0x%08X\n",
3865 RREG32(GRBM_STATUS_SE1));
3866 dev_info(rdev->dev, " SRBM_STATUS = 0x%08X\n",
3867 RREG32(SRBM_STATUS));
3868 dev_info(rdev->dev, " SRBM_STATUS2 = 0x%08X\n",
3869 RREG32(SRBM_STATUS2));
3870 dev_info(rdev->dev, " R_008674_CP_STALLED_STAT1 = 0x%08X\n",
3871 RREG32(CP_STALLED_STAT1));
3872 dev_info(rdev->dev, " R_008678_CP_STALLED_STAT2 = 0x%08X\n",
3873 RREG32(CP_STALLED_STAT2));
3874 dev_info(rdev->dev, " R_00867C_CP_BUSY_STAT = 0x%08X\n",
3875 RREG32(CP_BUSY_STAT));
3876 dev_info(rdev->dev, " R_008680_CP_STAT = 0x%08X\n",
3877 RREG32(CP_STAT));
3878 dev_info(rdev->dev, " R_00D034_DMA_STATUS_REG = 0x%08X\n",
3879 RREG32(DMA_STATUS_REG));
3880 if (rdev->family >= CHIP_CAYMAN) {
3881 dev_info(rdev->dev, " R_00D834_DMA_STATUS_REG = 0x%08X\n",
3882 RREG32(DMA_STATUS_REG + 0x800));
3883 }
3884 }
3885
3886 bool evergreen_is_display_hung(struct radeon_device *rdev)
3887 {
3888 u32 crtc_hung = 0;
3889 u32 crtc_status[6];
3890 u32 i, j, tmp;
3891
3892 for (i = 0; i < rdev->num_crtc; i++) {
3893 if (RREG32(EVERGREEN_CRTC_CONTROL + crtc_offsets[i]) & EVERGREEN_CRTC_MASTER_EN) {
3894 crtc_status[i] = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3895 crtc_hung |= (1 << i);
3896 }
3897 }
3898
3899 for (j = 0; j < 10; j++) {
3900 for (i = 0; i < rdev->num_crtc; i++) {
3901 if (crtc_hung & (1 << i)) {
3902 tmp = RREG32(EVERGREEN_CRTC_STATUS_HV_COUNT + crtc_offsets[i]);
3903 if (tmp != crtc_status[i])
3904 crtc_hung &= ~(1 << i);
3905 }
3906 }
3907 if (crtc_hung == 0)
3908 return false;
3909 udelay(100);
3910 }
3911
3912 return true;
3913 }
3914
3915 u32 evergreen_gpu_check_soft_reset(struct radeon_device *rdev)
3916 {
3917 u32 reset_mask = 0;
3918 u32 tmp;
3919
3920 /* GRBM_STATUS */
3921 tmp = RREG32(GRBM_STATUS);
3922 if (tmp & (PA_BUSY | SC_BUSY |
3923 SH_BUSY | SX_BUSY |
3924 TA_BUSY | VGT_BUSY |
3925 DB_BUSY | CB_BUSY |
3926 SPI_BUSY | VGT_BUSY_NO_DMA))
3927 reset_mask |= RADEON_RESET_GFX;
3928
3929 if (tmp & (CF_RQ_PENDING | PF_RQ_PENDING |
3930 CP_BUSY | CP_COHERENCY_BUSY))
3931 reset_mask |= RADEON_RESET_CP;
3932
3933 if (tmp & GRBM_EE_BUSY)
3934 reset_mask |= RADEON_RESET_GRBM | RADEON_RESET_GFX | RADEON_RESET_CP;
3935
3936 /* DMA_STATUS_REG */
3937 tmp = RREG32(DMA_STATUS_REG);
3938 if (!(tmp & DMA_IDLE))
3939 reset_mask |= RADEON_RESET_DMA;
3940
3941 /* SRBM_STATUS2 */
3942 tmp = RREG32(SRBM_STATUS2);
3943 if (tmp & DMA_BUSY)
3944 reset_mask |= RADEON_RESET_DMA;
3945
3946 /* SRBM_STATUS */
3947 tmp = RREG32(SRBM_STATUS);
3948 if (tmp & (RLC_RQ_PENDING | RLC_BUSY))
3949 reset_mask |= RADEON_RESET_RLC;
3950
3951 if (tmp & IH_BUSY)
3952 reset_mask |= RADEON_RESET_IH;
3953
3954 if (tmp & SEM_BUSY)
3955 reset_mask |= RADEON_RESET_SEM;
3956
3957 if (tmp & GRBM_RQ_PENDING)
3958 reset_mask |= RADEON_RESET_GRBM;
3959
3960 if (tmp & VMC_BUSY)
3961 reset_mask |= RADEON_RESET_VMC;
3962
3963 if (tmp & (MCB_BUSY | MCB_NON_DISPLAY_BUSY |
3964 MCC_BUSY | MCD_BUSY))
3965 reset_mask |= RADEON_RESET_MC;
3966
3967 if (evergreen_is_display_hung(rdev))
3968 reset_mask |= RADEON_RESET_DISPLAY;
3969
3970 /* VM_L2_STATUS */
3971 tmp = RREG32(VM_L2_STATUS);
3972 if (tmp & L2_BUSY)
3973 reset_mask |= RADEON_RESET_VMC;
3974
3975 /* Skip MC reset as it's mostly likely not hung, just busy */
3976 if (reset_mask & RADEON_RESET_MC) {
3977 DRM_DEBUG("MC busy: 0x%08X, clearing.\n", reset_mask);
3978 reset_mask &= ~RADEON_RESET_MC;
3979 }
3980
3981 return reset_mask;
3982 }
3983
3984 static void evergreen_gpu_soft_reset(struct radeon_device *rdev, u32 reset_mask)
3985 {
3986 struct evergreen_mc_save save;
3987 u32 grbm_soft_reset = 0, srbm_soft_reset = 0;
3988 u32 tmp;
3989
3990 if (reset_mask == 0)
3991 return;
3992
3993 dev_info(rdev->dev, "GPU softreset: 0x%08X\n", reset_mask);
3994
3995 evergreen_print_gpu_status_regs(rdev);
3996
3997 /* Disable CP parsing/prefetching */
3998 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
3999
4000 if (reset_mask & RADEON_RESET_DMA) {
4001 /* Disable DMA */
4002 tmp = RREG32(DMA_RB_CNTL);
4003 tmp &= ~DMA_RB_ENABLE;
4004 WREG32(DMA_RB_CNTL, tmp);
4005 }
4006
4007 udelay(50);
4008
4009 evergreen_mc_stop(rdev, &save);
4010 if (evergreen_mc_wait_for_idle(rdev)) {
4011 dev_warn(rdev->dev, "Wait for MC idle timedout !\n");
4012 }
4013
4014 if (reset_mask & (RADEON_RESET_GFX | RADEON_RESET_COMPUTE)) {
4015 grbm_soft_reset |= SOFT_RESET_DB |
4016 SOFT_RESET_CB |
4017 SOFT_RESET_PA |
4018 SOFT_RESET_SC |
4019 SOFT_RESET_SPI |
4020 SOFT_RESET_SX |
4021 SOFT_RESET_SH |
4022 SOFT_RESET_TC |
4023 SOFT_RESET_TA |
4024 SOFT_RESET_VC |
4025 SOFT_RESET_VGT;
4026 }
4027
4028 if (reset_mask & RADEON_RESET_CP) {
4029 grbm_soft_reset |= SOFT_RESET_CP |
4030 SOFT_RESET_VGT;
4031
4032 srbm_soft_reset |= SOFT_RESET_GRBM;
4033 }
4034
4035 if (reset_mask & RADEON_RESET_DMA)
4036 srbm_soft_reset |= SOFT_RESET_DMA;
4037
4038 if (reset_mask & RADEON_RESET_DISPLAY)
4039 srbm_soft_reset |= SOFT_RESET_DC;
4040
4041 if (reset_mask & RADEON_RESET_RLC)
4042 srbm_soft_reset |= SOFT_RESET_RLC;
4043
4044 if (reset_mask & RADEON_RESET_SEM)
4045 srbm_soft_reset |= SOFT_RESET_SEM;
4046
4047 if (reset_mask & RADEON_RESET_IH)
4048 srbm_soft_reset |= SOFT_RESET_IH;
4049
4050 if (reset_mask & RADEON_RESET_GRBM)
4051 srbm_soft_reset |= SOFT_RESET_GRBM;
4052
4053 if (reset_mask & RADEON_RESET_VMC)
4054 srbm_soft_reset |= SOFT_RESET_VMC;
4055
4056 if (!(rdev->flags & RADEON_IS_IGP)) {
4057 if (reset_mask & RADEON_RESET_MC)
4058 srbm_soft_reset |= SOFT_RESET_MC;
4059 }
4060
4061 if (grbm_soft_reset) {
4062 tmp = RREG32(GRBM_SOFT_RESET);
4063 tmp |= grbm_soft_reset;
4064 dev_info(rdev->dev, "GRBM_SOFT_RESET=0x%08X\n", tmp);
4065 WREG32(GRBM_SOFT_RESET, tmp);
4066 tmp = RREG32(GRBM_SOFT_RESET);
4067
4068 udelay(50);
4069
4070 tmp &= ~grbm_soft_reset;
4071 WREG32(GRBM_SOFT_RESET, tmp);
4072 tmp = RREG32(GRBM_SOFT_RESET);
4073 }
4074
4075 if (srbm_soft_reset) {
4076 tmp = RREG32(SRBM_SOFT_RESET);
4077 tmp |= srbm_soft_reset;
4078 dev_info(rdev->dev, "SRBM_SOFT_RESET=0x%08X\n", tmp);
4079 WREG32(SRBM_SOFT_RESET, tmp);
4080 tmp = RREG32(SRBM_SOFT_RESET);
4081
4082 udelay(50);
4083
4084 tmp &= ~srbm_soft_reset;
4085 WREG32(SRBM_SOFT_RESET, tmp);
4086 tmp = RREG32(SRBM_SOFT_RESET);
4087 }
4088
4089 /* Wait a little for things to settle down */
4090 udelay(50);
4091
4092 evergreen_mc_resume(rdev, &save);
4093 udelay(50);
4094
4095 evergreen_print_gpu_status_regs(rdev);
4096 }
4097
4098 void evergreen_gpu_pci_config_reset(struct radeon_device *rdev)
4099 {
4100 struct evergreen_mc_save save;
4101 u32 tmp, i;
4102
4103 dev_info(rdev->dev, "GPU pci config reset\n");
4104
4105 /* disable dpm? */
4106
4107 /* Disable CP parsing/prefetching */
4108 WREG32(CP_ME_CNTL, CP_ME_HALT | CP_PFP_HALT);
4109 udelay(50);
4110 /* Disable DMA */
4111 tmp = RREG32(DMA_RB_CNTL);
4112 tmp &= ~DMA_RB_ENABLE;
4113 WREG32(DMA_RB_CNTL, tmp);
4114 /* XXX other engines? */
4115
4116 /* halt the rlc */
4117 r600_rlc_stop(rdev);
4118
4119 udelay(50);
4120
4121 /* set mclk/sclk to bypass */
4122 rv770_set_clk_bypass_mode(rdev);
4123 /* disable BM */
4124 pci_clear_master(rdev->pdev);
4125 /* disable mem access */
4126 evergreen_mc_stop(rdev, &save);
4127 if (evergreen_mc_wait_for_idle(rdev)) {
4128 dev_warn(rdev->dev, "Wait for MC idle timed out !\n");
4129 }
4130 /* reset */
4131 radeon_pci_config_reset(rdev);
4132 /* wait for asic to come out of reset */
4133 for (i = 0; i < rdev->usec_timeout; i++) {
4134 if (RREG32(CONFIG_MEMSIZE) != 0xffffffff)
4135 break;
4136 udelay(1);
4137 }
4138 }
4139
4140 int evergreen_asic_reset(struct radeon_device *rdev, bool hard)
4141 {
4142 u32 reset_mask;
4143
4144 if (hard) {
4145 evergreen_gpu_pci_config_reset(rdev);
4146 return 0;
4147 }
4148
4149 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4150
4151 if (reset_mask)
4152 r600_set_bios_scratch_engine_hung(rdev, true);
4153
4154 /* try soft reset */
4155 evergreen_gpu_soft_reset(rdev, reset_mask);
4156
4157 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4158
4159 /* try pci config reset */
4160 if (reset_mask && radeon_hard_reset)
4161 evergreen_gpu_pci_config_reset(rdev);
4162
4163 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4164
4165 if (!reset_mask)
4166 r600_set_bios_scratch_engine_hung(rdev, false);
4167
4168 return 0;
4169 }
4170
4171 /**
4172 * evergreen_gfx_is_lockup - Check if the GFX engine is locked up
4173 *
4174 * @rdev: radeon_device pointer
4175 * @ring: radeon_ring structure holding ring information
4176 *
4177 * Check if the GFX engine is locked up.
4178 * Returns true if the engine appears to be locked up, false if not.
4179 */
4180 bool evergreen_gfx_is_lockup(struct radeon_device *rdev, struct radeon_ring *ring)
4181 {
4182 u32 reset_mask = evergreen_gpu_check_soft_reset(rdev);
4183
4184 if (!(reset_mask & (RADEON_RESET_GFX |
4185 RADEON_RESET_COMPUTE |
4186 RADEON_RESET_CP))) {
4187 radeon_ring_lockup_update(rdev, ring);
4188 return false;
4189 }
4190 return radeon_ring_test_lockup(rdev, ring);
4191 }
4192
4193 /*
4194 * RLC
4195 */
4196 #define RLC_SAVE_RESTORE_LIST_END_MARKER 0x00000000
4197 #define RLC_CLEAR_STATE_END_MARKER 0x00000001
4198
4199 void sumo_rlc_fini(struct radeon_device *rdev)
4200 {
4201 int r;
4202
4203 /* save restore block */
4204 if (rdev->rlc.save_restore_obj) {
4205 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4206 if (unlikely(r != 0))
4207 dev_warn(rdev->dev, "(%d) reserve RLC sr bo failed\n", r);
4208 radeon_bo_unpin(rdev->rlc.save_restore_obj);
4209 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4210
4211 radeon_bo_unref(&rdev->rlc.save_restore_obj);
4212 rdev->rlc.save_restore_obj = NULL;
4213 }
4214
4215 /* clear state block */
4216 if (rdev->rlc.clear_state_obj) {
4217 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4218 if (unlikely(r != 0))
4219 dev_warn(rdev->dev, "(%d) reserve RLC c bo failed\n", r);
4220 radeon_bo_unpin(rdev->rlc.clear_state_obj);
4221 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4222
4223 radeon_bo_unref(&rdev->rlc.clear_state_obj);
4224 rdev->rlc.clear_state_obj = NULL;
4225 }
4226
4227 /* clear state block */
4228 if (rdev->rlc.cp_table_obj) {
4229 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4230 if (unlikely(r != 0))
4231 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4232 radeon_bo_unpin(rdev->rlc.cp_table_obj);
4233 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4234
4235 radeon_bo_unref(&rdev->rlc.cp_table_obj);
4236 rdev->rlc.cp_table_obj = NULL;
4237 }
4238 }
4239
4240 #define CP_ME_TABLE_SIZE 96
4241
4242 int sumo_rlc_init(struct radeon_device *rdev)
4243 {
4244 const u32 *src_ptr;
4245 volatile u32 *dst_ptr;
4246 u32 dws, data, i, j, k, reg_num;
4247 u32 reg_list_num, reg_list_hdr_blk_index, reg_list_blk_index = 0;
4248 u64 reg_list_mc_addr;
4249 const struct cs_section_def *cs_data;
4250 int r;
4251
4252 src_ptr = rdev->rlc.reg_list;
4253 dws = rdev->rlc.reg_list_size;
4254 if (rdev->family >= CHIP_BONAIRE) {
4255 dws += (5 * 16) + 48 + 48 + 64;
4256 }
4257 cs_data = rdev->rlc.cs_data;
4258
4259 if (src_ptr) {
4260 /* save restore block */
4261 if (rdev->rlc.save_restore_obj == NULL) {
4262 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4263 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4264 NULL, &rdev->rlc.save_restore_obj);
4265 if (r) {
4266 dev_warn(rdev->dev, "(%d) create RLC sr bo failed\n", r);
4267 return r;
4268 }
4269 }
4270
4271 r = radeon_bo_reserve(rdev->rlc.save_restore_obj, false);
4272 if (unlikely(r != 0)) {
4273 sumo_rlc_fini(rdev);
4274 return r;
4275 }
4276 r = radeon_bo_pin(rdev->rlc.save_restore_obj, RADEON_GEM_DOMAIN_VRAM,
4277 &rdev->rlc.save_restore_gpu_addr);
4278 if (r) {
4279 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4280 dev_warn(rdev->dev, "(%d) pin RLC sr bo failed\n", r);
4281 sumo_rlc_fini(rdev);
4282 return r;
4283 }
4284
4285 r = radeon_bo_kmap(rdev->rlc.save_restore_obj, (void **)&rdev->rlc.sr_ptr);
4286 if (r) {
4287 dev_warn(rdev->dev, "(%d) map RLC sr bo failed\n", r);
4288 sumo_rlc_fini(rdev);
4289 return r;
4290 }
4291 /* write the sr buffer */
4292 dst_ptr = rdev->rlc.sr_ptr;
4293 if (rdev->family >= CHIP_TAHITI) {
4294 /* SI */
4295 for (i = 0; i < rdev->rlc.reg_list_size; i++)
4296 dst_ptr[i] = cpu_to_le32(src_ptr[i]);
4297 } else {
4298 /* ON/LN/TN */
4299 /* format:
4300 * dw0: (reg2 << 16) | reg1
4301 * dw1: reg1 save space
4302 * dw2: reg2 save space
4303 */
4304 for (i = 0; i < dws; i++) {
4305 data = src_ptr[i] >> 2;
4306 i++;
4307 if (i < dws)
4308 data |= (src_ptr[i] >> 2) << 16;
4309 j = (((i - 1) * 3) / 2);
4310 dst_ptr[j] = cpu_to_le32(data);
4311 }
4312 j = ((i * 3) / 2);
4313 dst_ptr[j] = cpu_to_le32(RLC_SAVE_RESTORE_LIST_END_MARKER);
4314 }
4315 radeon_bo_kunmap(rdev->rlc.save_restore_obj);
4316 radeon_bo_unreserve(rdev->rlc.save_restore_obj);
4317 }
4318
4319 if (cs_data) {
4320 /* clear state block */
4321 if (rdev->family >= CHIP_BONAIRE) {
4322 rdev->rlc.clear_state_size = dws = cik_get_csb_size(rdev);
4323 } else if (rdev->family >= CHIP_TAHITI) {
4324 rdev->rlc.clear_state_size = si_get_csb_size(rdev);
4325 dws = rdev->rlc.clear_state_size + (256 / 4);
4326 } else {
4327 reg_list_num = 0;
4328 dws = 0;
4329 for (i = 0; cs_data[i].section != NULL; i++) {
4330 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4331 reg_list_num++;
4332 dws += cs_data[i].section[j].reg_count;
4333 }
4334 }
4335 reg_list_blk_index = (3 * reg_list_num + 2);
4336 dws += reg_list_blk_index;
4337 rdev->rlc.clear_state_size = dws;
4338 }
4339
4340 if (rdev->rlc.clear_state_obj == NULL) {
4341 r = radeon_bo_create(rdev, dws * 4, PAGE_SIZE, true,
4342 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4343 NULL, &rdev->rlc.clear_state_obj);
4344 if (r) {
4345 dev_warn(rdev->dev, "(%d) create RLC c bo failed\n", r);
4346 sumo_rlc_fini(rdev);
4347 return r;
4348 }
4349 }
4350 r = radeon_bo_reserve(rdev->rlc.clear_state_obj, false);
4351 if (unlikely(r != 0)) {
4352 sumo_rlc_fini(rdev);
4353 return r;
4354 }
4355 r = radeon_bo_pin(rdev->rlc.clear_state_obj, RADEON_GEM_DOMAIN_VRAM,
4356 &rdev->rlc.clear_state_gpu_addr);
4357 if (r) {
4358 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4359 dev_warn(rdev->dev, "(%d) pin RLC c bo failed\n", r);
4360 sumo_rlc_fini(rdev);
4361 return r;
4362 }
4363
4364 r = radeon_bo_kmap(rdev->rlc.clear_state_obj, (void **)&rdev->rlc.cs_ptr);
4365 if (r) {
4366 dev_warn(rdev->dev, "(%d) map RLC c bo failed\n", r);
4367 sumo_rlc_fini(rdev);
4368 return r;
4369 }
4370 /* set up the cs buffer */
4371 dst_ptr = rdev->rlc.cs_ptr;
4372 if (rdev->family >= CHIP_BONAIRE) {
4373 cik_get_csb_buffer(rdev, dst_ptr);
4374 } else if (rdev->family >= CHIP_TAHITI) {
4375 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + 256;
4376 dst_ptr[0] = cpu_to_le32(upper_32_bits(reg_list_mc_addr));
4377 dst_ptr[1] = cpu_to_le32(lower_32_bits(reg_list_mc_addr));
4378 dst_ptr[2] = cpu_to_le32(rdev->rlc.clear_state_size);
4379 si_get_csb_buffer(rdev, &dst_ptr[(256/4)]);
4380 } else {
4381 reg_list_hdr_blk_index = 0;
4382 reg_list_mc_addr = rdev->rlc.clear_state_gpu_addr + (reg_list_blk_index * 4);
4383 data = upper_32_bits(reg_list_mc_addr);
4384 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4385 reg_list_hdr_blk_index++;
4386 for (i = 0; cs_data[i].section != NULL; i++) {
4387 for (j = 0; cs_data[i].section[j].extent != NULL; j++) {
4388 reg_num = cs_data[i].section[j].reg_count;
4389 data = reg_list_mc_addr & 0xffffffff;
4390 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4391 reg_list_hdr_blk_index++;
4392
4393 data = (cs_data[i].section[j].reg_index * 4) & 0xffffffff;
4394 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4395 reg_list_hdr_blk_index++;
4396
4397 data = 0x08000000 | (reg_num * 4);
4398 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(data);
4399 reg_list_hdr_blk_index++;
4400
4401 for (k = 0; k < reg_num; k++) {
4402 data = cs_data[i].section[j].extent[k];
4403 dst_ptr[reg_list_blk_index + k] = cpu_to_le32(data);
4404 }
4405 reg_list_mc_addr += reg_num * 4;
4406 reg_list_blk_index += reg_num;
4407 }
4408 }
4409 dst_ptr[reg_list_hdr_blk_index] = cpu_to_le32(RLC_CLEAR_STATE_END_MARKER);
4410 }
4411 radeon_bo_kunmap(rdev->rlc.clear_state_obj);
4412 radeon_bo_unreserve(rdev->rlc.clear_state_obj);
4413 }
4414
4415 if (rdev->rlc.cp_table_size) {
4416 if (rdev->rlc.cp_table_obj == NULL) {
4417 r = radeon_bo_create(rdev, rdev->rlc.cp_table_size,
4418 PAGE_SIZE, true,
4419 RADEON_GEM_DOMAIN_VRAM, 0, NULL,
4420 NULL, &rdev->rlc.cp_table_obj);
4421 if (r) {
4422 dev_warn(rdev->dev, "(%d) create RLC cp table bo failed\n", r);
4423 sumo_rlc_fini(rdev);
4424 return r;
4425 }
4426 }
4427
4428 r = radeon_bo_reserve(rdev->rlc.cp_table_obj, false);
4429 if (unlikely(r != 0)) {
4430 dev_warn(rdev->dev, "(%d) reserve RLC cp table bo failed\n", r);
4431 sumo_rlc_fini(rdev);
4432 return r;
4433 }
4434 r = radeon_bo_pin(rdev->rlc.cp_table_obj, RADEON_GEM_DOMAIN_VRAM,
4435 &rdev->rlc.cp_table_gpu_addr);
4436 if (r) {
4437 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4438 dev_warn(rdev->dev, "(%d) pin RLC cp_table bo failed\n", r);
4439 sumo_rlc_fini(rdev);
4440 return r;
4441 }
4442 r = radeon_bo_kmap(rdev->rlc.cp_table_obj, (void **)&rdev->rlc.cp_table_ptr);
4443 if (r) {
4444 dev_warn(rdev->dev, "(%d) map RLC cp table bo failed\n", r);
4445 sumo_rlc_fini(rdev);
4446 return r;
4447 }
4448
4449 cik_init_cp_pg_table(rdev);
4450
4451 radeon_bo_kunmap(rdev->rlc.cp_table_obj);
4452 radeon_bo_unreserve(rdev->rlc.cp_table_obj);
4453
4454 }
4455
4456 return 0;
4457 }
4458
4459 static void evergreen_rlc_start(struct radeon_device *rdev)
4460 {
4461 u32 mask = RLC_ENABLE;
4462
4463 if (rdev->flags & RADEON_IS_IGP) {
4464 mask |= GFX_POWER_GATING_ENABLE | GFX_POWER_GATING_SRC;
4465 }
4466
4467 WREG32(RLC_CNTL, mask);
4468 }
4469
4470 int evergreen_rlc_resume(struct radeon_device *rdev)
4471 {
4472 u32 i;
4473 const __be32 *fw_data;
4474
4475 if (!rdev->rlc_fw)
4476 return -EINVAL;
4477
4478 r600_rlc_stop(rdev);
4479
4480 WREG32(RLC_HB_CNTL, 0);
4481
4482 if (rdev->flags & RADEON_IS_IGP) {
4483 if (rdev->family == CHIP_ARUBA) {
4484 u32 always_on_bitmap =
4485 3 | (3 << (16 * rdev->config.cayman.max_shader_engines));
4486 /* find out the number of active simds */
4487 u32 tmp = (RREG32(CC_GC_SHADER_PIPE_CONFIG) & 0xffff0000) >> 16;
4488 tmp |= 0xffffffff << rdev->config.cayman.max_simds_per_se;
4489 tmp = hweight32(~tmp);
4490 if (tmp == rdev->config.cayman.max_simds_per_se) {
4491 WREG32(TN_RLC_LB_ALWAYS_ACTIVE_SIMD_MASK, always_on_bitmap);
4492 WREG32(TN_RLC_LB_PARAMS, 0x00601004);
4493 WREG32(TN_RLC_LB_INIT_SIMD_MASK, 0xffffffff);
4494 WREG32(TN_RLC_LB_CNTR_INIT, 0x00000000);
4495 WREG32(TN_RLC_LB_CNTR_MAX, 0x00002000);
4496 }
4497 } else {
4498 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4499 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4500 }
4501 WREG32(TN_RLC_SAVE_AND_RESTORE_BASE, rdev->rlc.save_restore_gpu_addr >> 8);
4502 WREG32(TN_RLC_CLEAR_STATE_RESTORE_BASE, rdev->rlc.clear_state_gpu_addr >> 8);
4503 } else {
4504 WREG32(RLC_HB_BASE, 0);
4505 WREG32(RLC_HB_RPTR, 0);
4506 WREG32(RLC_HB_WPTR, 0);
4507 WREG32(RLC_HB_WPTR_LSB_ADDR, 0);
4508 WREG32(RLC_HB_WPTR_MSB_ADDR, 0);
4509 }
4510 WREG32(RLC_MC_CNTL, 0);
4511 WREG32(RLC_UCODE_CNTL, 0);
4512
4513 fw_data = (const __be32 *)rdev->rlc_fw->data;
4514 if (rdev->family >= CHIP_ARUBA) {
4515 for (i = 0; i < ARUBA_RLC_UCODE_SIZE; i++) {
4516 WREG32(RLC_UCODE_ADDR, i);
4517 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4518 }
4519 } else if (rdev->family >= CHIP_CAYMAN) {
4520 for (i = 0; i < CAYMAN_RLC_UCODE_SIZE; i++) {
4521 WREG32(RLC_UCODE_ADDR, i);
4522 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4523 }
4524 } else {
4525 for (i = 0; i < EVERGREEN_RLC_UCODE_SIZE; i++) {
4526 WREG32(RLC_UCODE_ADDR, i);
4527 WREG32(RLC_UCODE_DATA, be32_to_cpup(fw_data++));
4528 }
4529 }
4530 WREG32(RLC_UCODE_ADDR, 0);
4531
4532 evergreen_rlc_start(rdev);
4533
4534 return 0;
4535 }
4536
4537 /* Interrupts */
4538
4539 u32 evergreen_get_vblank_counter(struct radeon_device *rdev, int crtc)
4540 {
4541 if (crtc >= rdev->num_crtc)
4542 return 0;
4543 else
4544 return RREG32(CRTC_STATUS_FRAME_COUNT + crtc_offsets[crtc]);
4545 }
4546
4547 void evergreen_disable_interrupt_state(struct radeon_device *rdev)
4548 {
4549 u32 tmp;
4550
4551 if (rdev->family >= CHIP_CAYMAN) {
4552 cayman_cp_int_cntl_setup(rdev, 0,
4553 CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4554 cayman_cp_int_cntl_setup(rdev, 1, 0);
4555 cayman_cp_int_cntl_setup(rdev, 2, 0);
4556 tmp = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4557 WREG32(CAYMAN_DMA1_CNTL, tmp);
4558 } else
4559 WREG32(CP_INT_CNTL, CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE);
4560 tmp = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4561 WREG32(DMA_CNTL, tmp);
4562 WREG32(GRBM_INT_CNTL, 0);
4563 WREG32(SRBM_INT_CNTL, 0);
4564 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4565 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4566 if (rdev->num_crtc >= 4) {
4567 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4568 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4569 }
4570 if (rdev->num_crtc >= 6) {
4571 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4572 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4573 }
4574
4575 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, 0);
4576 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, 0);
4577 if (rdev->num_crtc >= 4) {
4578 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, 0);
4579 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, 0);
4580 }
4581 if (rdev->num_crtc >= 6) {
4582 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, 0);
4583 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, 0);
4584 }
4585
4586 /* only one DAC on DCE5 */
4587 if (!ASIC_IS_DCE5(rdev))
4588 WREG32(DACA_AUTODETECT_INT_CONTROL, 0);
4589 WREG32(DACB_AUTODETECT_INT_CONTROL, 0);
4590
4591 tmp = RREG32(DC_HPD1_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4592 WREG32(DC_HPD1_INT_CONTROL, tmp);
4593 tmp = RREG32(DC_HPD2_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4594 WREG32(DC_HPD2_INT_CONTROL, tmp);
4595 tmp = RREG32(DC_HPD3_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4596 WREG32(DC_HPD3_INT_CONTROL, tmp);
4597 tmp = RREG32(DC_HPD4_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4598 WREG32(DC_HPD4_INT_CONTROL, tmp);
4599 tmp = RREG32(DC_HPD5_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4600 WREG32(DC_HPD5_INT_CONTROL, tmp);
4601 tmp = RREG32(DC_HPD6_INT_CONTROL) & DC_HPDx_INT_POLARITY;
4602 WREG32(DC_HPD6_INT_CONTROL, tmp);
4603
4604 }
4605
4606 int evergreen_irq_set(struct radeon_device *rdev)
4607 {
4608 u32 cp_int_cntl = CNTX_BUSY_INT_ENABLE | CNTX_EMPTY_INT_ENABLE;
4609 u32 cp_int_cntl1 = 0, cp_int_cntl2 = 0;
4610 u32 crtc1 = 0, crtc2 = 0, crtc3 = 0, crtc4 = 0, crtc5 = 0, crtc6 = 0;
4611 u32 hpd1, hpd2, hpd3, hpd4, hpd5, hpd6;
4612 u32 grbm_int_cntl = 0;
4613 u32 afmt1 = 0, afmt2 = 0, afmt3 = 0, afmt4 = 0, afmt5 = 0, afmt6 = 0;
4614 u32 dma_cntl, dma_cntl1 = 0;
4615 u32 thermal_int = 0;
4616
4617 if (!rdev->irq.installed) {
4618 WARN(1, "Can't enable IRQ/MSI because no handler is installed\n");
4619 return -EINVAL;
4620 }
4621 /* don't enable anything if the ih is disabled */
4622 if (!rdev->ih.enabled) {
4623 r600_disable_interrupts(rdev);
4624 /* force the active interrupt state to all disabled */
4625 evergreen_disable_interrupt_state(rdev);
4626 return 0;
4627 }
4628
4629 hpd1 = RREG32(DC_HPD1_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4630 hpd2 = RREG32(DC_HPD2_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4631 hpd3 = RREG32(DC_HPD3_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4632 hpd4 = RREG32(DC_HPD4_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4633 hpd5 = RREG32(DC_HPD5_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4634 hpd6 = RREG32(DC_HPD6_INT_CONTROL) & ~(DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN);
4635 if (rdev->family == CHIP_ARUBA)
4636 thermal_int = RREG32(TN_CG_THERMAL_INT_CTRL) &
4637 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4638 else
4639 thermal_int = RREG32(CG_THERMAL_INT) &
4640 ~(THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW);
4641
4642 afmt1 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4643 afmt2 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4644 afmt3 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4645 afmt4 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4646 afmt5 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4647 afmt6 = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET) & ~AFMT_AZ_FORMAT_WTRIG_MASK;
4648
4649 dma_cntl = RREG32(DMA_CNTL) & ~TRAP_ENABLE;
4650
4651 if (rdev->family >= CHIP_CAYMAN) {
4652 /* enable CP interrupts on all rings */
4653 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4654 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4655 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4656 }
4657 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP1_INDEX])) {
4658 DRM_DEBUG("evergreen_irq_set: sw int cp1\n");
4659 cp_int_cntl1 |= TIME_STAMP_INT_ENABLE;
4660 }
4661 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_CP2_INDEX])) {
4662 DRM_DEBUG("evergreen_irq_set: sw int cp2\n");
4663 cp_int_cntl2 |= TIME_STAMP_INT_ENABLE;
4664 }
4665 } else {
4666 if (atomic_read(&rdev->irq.ring_int[RADEON_RING_TYPE_GFX_INDEX])) {
4667 DRM_DEBUG("evergreen_irq_set: sw int gfx\n");
4668 cp_int_cntl |= RB_INT_ENABLE;
4669 cp_int_cntl |= TIME_STAMP_INT_ENABLE;
4670 }
4671 }
4672
4673 if (atomic_read(&rdev->irq.ring_int[R600_RING_TYPE_DMA_INDEX])) {
4674 DRM_DEBUG("r600_irq_set: sw int dma\n");
4675 dma_cntl |= TRAP_ENABLE;
4676 }
4677
4678 if (rdev->family >= CHIP_CAYMAN) {
4679 dma_cntl1 = RREG32(CAYMAN_DMA1_CNTL) & ~TRAP_ENABLE;
4680 if (atomic_read(&rdev->irq.ring_int[CAYMAN_RING_TYPE_DMA1_INDEX])) {
4681 DRM_DEBUG("r600_irq_set: sw int dma1\n");
4682 dma_cntl1 |= TRAP_ENABLE;
4683 }
4684 }
4685
4686 if (rdev->irq.dpm_thermal) {
4687 DRM_DEBUG("dpm thermal\n");
4688 thermal_int |= THERM_INT_MASK_HIGH | THERM_INT_MASK_LOW;
4689 }
4690
4691 if (rdev->irq.crtc_vblank_int[0] ||
4692 atomic_read(&rdev->irq.pflip[0])) {
4693 DRM_DEBUG("evergreen_irq_set: vblank 0\n");
4694 crtc1 |= VBLANK_INT_MASK;
4695 }
4696 if (rdev->irq.crtc_vblank_int[1] ||
4697 atomic_read(&rdev->irq.pflip[1])) {
4698 DRM_DEBUG("evergreen_irq_set: vblank 1\n");
4699 crtc2 |= VBLANK_INT_MASK;
4700 }
4701 if (rdev->irq.crtc_vblank_int[2] ||
4702 atomic_read(&rdev->irq.pflip[2])) {
4703 DRM_DEBUG("evergreen_irq_set: vblank 2\n");
4704 crtc3 |= VBLANK_INT_MASK;
4705 }
4706 if (rdev->irq.crtc_vblank_int[3] ||
4707 atomic_read(&rdev->irq.pflip[3])) {
4708 DRM_DEBUG("evergreen_irq_set: vblank 3\n");
4709 crtc4 |= VBLANK_INT_MASK;
4710 }
4711 if (rdev->irq.crtc_vblank_int[4] ||
4712 atomic_read(&rdev->irq.pflip[4])) {
4713 DRM_DEBUG("evergreen_irq_set: vblank 4\n");
4714 crtc5 |= VBLANK_INT_MASK;
4715 }
4716 if (rdev->irq.crtc_vblank_int[5] ||
4717 atomic_read(&rdev->irq.pflip[5])) {
4718 DRM_DEBUG("evergreen_irq_set: vblank 5\n");
4719 crtc6 |= VBLANK_INT_MASK;
4720 }
4721 if (rdev->irq.hpd[0]) {
4722 DRM_DEBUG("evergreen_irq_set: hpd 1\n");
4723 hpd1 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4724 }
4725 if (rdev->irq.hpd[1]) {
4726 DRM_DEBUG("evergreen_irq_set: hpd 2\n");
4727 hpd2 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4728 }
4729 if (rdev->irq.hpd[2]) {
4730 DRM_DEBUG("evergreen_irq_set: hpd 3\n");
4731 hpd3 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4732 }
4733 if (rdev->irq.hpd[3]) {
4734 DRM_DEBUG("evergreen_irq_set: hpd 4\n");
4735 hpd4 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4736 }
4737 if (rdev->irq.hpd[4]) {
4738 DRM_DEBUG("evergreen_irq_set: hpd 5\n");
4739 hpd5 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4740 }
4741 if (rdev->irq.hpd[5]) {
4742 DRM_DEBUG("evergreen_irq_set: hpd 6\n");
4743 hpd6 |= DC_HPDx_INT_EN | DC_HPDx_RX_INT_EN;
4744 }
4745 if (rdev->irq.afmt[0]) {
4746 DRM_DEBUG("evergreen_irq_set: hdmi 0\n");
4747 afmt1 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4748 }
4749 if (rdev->irq.afmt[1]) {
4750 DRM_DEBUG("evergreen_irq_set: hdmi 1\n");
4751 afmt2 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4752 }
4753 if (rdev->irq.afmt[2]) {
4754 DRM_DEBUG("evergreen_irq_set: hdmi 2\n");
4755 afmt3 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4756 }
4757 if (rdev->irq.afmt[3]) {
4758 DRM_DEBUG("evergreen_irq_set: hdmi 3\n");
4759 afmt4 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4760 }
4761 if (rdev->irq.afmt[4]) {
4762 DRM_DEBUG("evergreen_irq_set: hdmi 4\n");
4763 afmt5 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4764 }
4765 if (rdev->irq.afmt[5]) {
4766 DRM_DEBUG("evergreen_irq_set: hdmi 5\n");
4767 afmt6 |= AFMT_AZ_FORMAT_WTRIG_MASK;
4768 }
4769
4770 if (rdev->family >= CHIP_CAYMAN) {
4771 cayman_cp_int_cntl_setup(rdev, 0, cp_int_cntl);
4772 cayman_cp_int_cntl_setup(rdev, 1, cp_int_cntl1);
4773 cayman_cp_int_cntl_setup(rdev, 2, cp_int_cntl2);
4774 } else
4775 WREG32(CP_INT_CNTL, cp_int_cntl);
4776
4777 WREG32(DMA_CNTL, dma_cntl);
4778
4779 if (rdev->family >= CHIP_CAYMAN)
4780 WREG32(CAYMAN_DMA1_CNTL, dma_cntl1);
4781
4782 WREG32(GRBM_INT_CNTL, grbm_int_cntl);
4783
4784 WREG32(INT_MASK + EVERGREEN_CRTC0_REGISTER_OFFSET, crtc1);
4785 WREG32(INT_MASK + EVERGREEN_CRTC1_REGISTER_OFFSET, crtc2);
4786 if (rdev->num_crtc >= 4) {
4787 WREG32(INT_MASK + EVERGREEN_CRTC2_REGISTER_OFFSET, crtc3);
4788 WREG32(INT_MASK + EVERGREEN_CRTC3_REGISTER_OFFSET, crtc4);
4789 }
4790 if (rdev->num_crtc >= 6) {
4791 WREG32(INT_MASK + EVERGREEN_CRTC4_REGISTER_OFFSET, crtc5);
4792 WREG32(INT_MASK + EVERGREEN_CRTC5_REGISTER_OFFSET, crtc6);
4793 }
4794
4795 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET,
4796 GRPH_PFLIP_INT_MASK);
4797 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET,
4798 GRPH_PFLIP_INT_MASK);
4799 if (rdev->num_crtc >= 4) {
4800 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET,
4801 GRPH_PFLIP_INT_MASK);
4802 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET,
4803 GRPH_PFLIP_INT_MASK);
4804 }
4805 if (rdev->num_crtc >= 6) {
4806 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET,
4807 GRPH_PFLIP_INT_MASK);
4808 WREG32(GRPH_INT_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET,
4809 GRPH_PFLIP_INT_MASK);
4810 }
4811
4812 WREG32(DC_HPD1_INT_CONTROL, hpd1);
4813 WREG32(DC_HPD2_INT_CONTROL, hpd2);
4814 WREG32(DC_HPD3_INT_CONTROL, hpd3);
4815 WREG32(DC_HPD4_INT_CONTROL, hpd4);
4816 WREG32(DC_HPD5_INT_CONTROL, hpd5);
4817 WREG32(DC_HPD6_INT_CONTROL, hpd6);
4818 if (rdev->family == CHIP_ARUBA)
4819 WREG32(TN_CG_THERMAL_INT_CTRL, thermal_int);
4820 else
4821 WREG32(CG_THERMAL_INT, thermal_int);
4822
4823 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, afmt1);
4824 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, afmt2);
4825 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, afmt3);
4826 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, afmt4);
4827 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, afmt5);
4828 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, afmt6);
4829
4830 /* posting read */
4831 RREG32(SRBM_STATUS);
4832
4833 return 0;
4834 }
4835
4836 static void evergreen_irq_ack(struct radeon_device *rdev)
4837 {
4838 u32 tmp;
4839
4840 rdev->irq.stat_regs.evergreen.disp_int = RREG32(DISP_INTERRUPT_STATUS);
4841 rdev->irq.stat_regs.evergreen.disp_int_cont = RREG32(DISP_INTERRUPT_STATUS_CONTINUE);
4842 rdev->irq.stat_regs.evergreen.disp_int_cont2 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE2);
4843 rdev->irq.stat_regs.evergreen.disp_int_cont3 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE3);
4844 rdev->irq.stat_regs.evergreen.disp_int_cont4 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE4);
4845 rdev->irq.stat_regs.evergreen.disp_int_cont5 = RREG32(DISP_INTERRUPT_STATUS_CONTINUE5);
4846 rdev->irq.stat_regs.evergreen.d1grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4847 rdev->irq.stat_regs.evergreen.d2grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4848 if (rdev->num_crtc >= 4) {
4849 rdev->irq.stat_regs.evergreen.d3grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4850 rdev->irq.stat_regs.evergreen.d4grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4851 }
4852 if (rdev->num_crtc >= 6) {
4853 rdev->irq.stat_regs.evergreen.d5grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4854 rdev->irq.stat_regs.evergreen.d6grph_int = RREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4855 }
4856
4857 rdev->irq.stat_regs.evergreen.afmt_status1 = RREG32(AFMT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET);
4858 rdev->irq.stat_regs.evergreen.afmt_status2 = RREG32(AFMT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET);
4859 rdev->irq.stat_regs.evergreen.afmt_status3 = RREG32(AFMT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET);
4860 rdev->irq.stat_regs.evergreen.afmt_status4 = RREG32(AFMT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET);
4861 rdev->irq.stat_regs.evergreen.afmt_status5 = RREG32(AFMT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET);
4862 rdev->irq.stat_regs.evergreen.afmt_status6 = RREG32(AFMT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET);
4863
4864 if (rdev->irq.stat_regs.evergreen.d1grph_int & GRPH_PFLIP_INT_OCCURRED)
4865 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4866 if (rdev->irq.stat_regs.evergreen.d2grph_int & GRPH_PFLIP_INT_OCCURRED)
4867 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4868 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT)
4869 WREG32(VBLANK_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VBLANK_ACK);
4870 if (rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT)
4871 WREG32(VLINE_STATUS + EVERGREEN_CRTC0_REGISTER_OFFSET, VLINE_ACK);
4872 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT)
4873 WREG32(VBLANK_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VBLANK_ACK);
4874 if (rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT)
4875 WREG32(VLINE_STATUS + EVERGREEN_CRTC1_REGISTER_OFFSET, VLINE_ACK);
4876
4877 if (rdev->num_crtc >= 4) {
4878 if (rdev->irq.stat_regs.evergreen.d3grph_int & GRPH_PFLIP_INT_OCCURRED)
4879 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4880 if (rdev->irq.stat_regs.evergreen.d4grph_int & GRPH_PFLIP_INT_OCCURRED)
4881 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4882 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT)
4883 WREG32(VBLANK_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VBLANK_ACK);
4884 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT)
4885 WREG32(VLINE_STATUS + EVERGREEN_CRTC2_REGISTER_OFFSET, VLINE_ACK);
4886 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT)
4887 WREG32(VBLANK_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VBLANK_ACK);
4888 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT)
4889 WREG32(VLINE_STATUS + EVERGREEN_CRTC3_REGISTER_OFFSET, VLINE_ACK);
4890 }
4891
4892 if (rdev->num_crtc >= 6) {
4893 if (rdev->irq.stat_regs.evergreen.d5grph_int & GRPH_PFLIP_INT_OCCURRED)
4894 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4895 if (rdev->irq.stat_regs.evergreen.d6grph_int & GRPH_PFLIP_INT_OCCURRED)
4896 WREG32(GRPH_INT_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, GRPH_PFLIP_INT_CLEAR);
4897 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT)
4898 WREG32(VBLANK_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VBLANK_ACK);
4899 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT)
4900 WREG32(VLINE_STATUS + EVERGREEN_CRTC4_REGISTER_OFFSET, VLINE_ACK);
4901 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT)
4902 WREG32(VBLANK_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VBLANK_ACK);
4903 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT)
4904 WREG32(VLINE_STATUS + EVERGREEN_CRTC5_REGISTER_OFFSET, VLINE_ACK);
4905 }
4906
4907 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT) {
4908 tmp = RREG32(DC_HPD1_INT_CONTROL);
4909 tmp |= DC_HPDx_INT_ACK;
4910 WREG32(DC_HPD1_INT_CONTROL, tmp);
4911 }
4912 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT) {
4913 tmp = RREG32(DC_HPD2_INT_CONTROL);
4914 tmp |= DC_HPDx_INT_ACK;
4915 WREG32(DC_HPD2_INT_CONTROL, tmp);
4916 }
4917 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT) {
4918 tmp = RREG32(DC_HPD3_INT_CONTROL);
4919 tmp |= DC_HPDx_INT_ACK;
4920 WREG32(DC_HPD3_INT_CONTROL, tmp);
4921 }
4922 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT) {
4923 tmp = RREG32(DC_HPD4_INT_CONTROL);
4924 tmp |= DC_HPDx_INT_ACK;
4925 WREG32(DC_HPD4_INT_CONTROL, tmp);
4926 }
4927 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT) {
4928 tmp = RREG32(DC_HPD5_INT_CONTROL);
4929 tmp |= DC_HPDx_INT_ACK;
4930 WREG32(DC_HPD5_INT_CONTROL, tmp);
4931 }
4932 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT) {
4933 tmp = RREG32(DC_HPD6_INT_CONTROL);
4934 tmp |= DC_HPDx_INT_ACK;
4935 WREG32(DC_HPD6_INT_CONTROL, tmp);
4936 }
4937
4938 if (rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT) {
4939 tmp = RREG32(DC_HPD1_INT_CONTROL);
4940 tmp |= DC_HPDx_RX_INT_ACK;
4941 WREG32(DC_HPD1_INT_CONTROL, tmp);
4942 }
4943 if (rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT) {
4944 tmp = RREG32(DC_HPD2_INT_CONTROL);
4945 tmp |= DC_HPDx_RX_INT_ACK;
4946 WREG32(DC_HPD2_INT_CONTROL, tmp);
4947 }
4948 if (rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT) {
4949 tmp = RREG32(DC_HPD3_INT_CONTROL);
4950 tmp |= DC_HPDx_RX_INT_ACK;
4951 WREG32(DC_HPD3_INT_CONTROL, tmp);
4952 }
4953 if (rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT) {
4954 tmp = RREG32(DC_HPD4_INT_CONTROL);
4955 tmp |= DC_HPDx_RX_INT_ACK;
4956 WREG32(DC_HPD4_INT_CONTROL, tmp);
4957 }
4958 if (rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT) {
4959 tmp = RREG32(DC_HPD5_INT_CONTROL);
4960 tmp |= DC_HPDx_RX_INT_ACK;
4961 WREG32(DC_HPD5_INT_CONTROL, tmp);
4962 }
4963 if (rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT) {
4964 tmp = RREG32(DC_HPD6_INT_CONTROL);
4965 tmp |= DC_HPDx_RX_INT_ACK;
4966 WREG32(DC_HPD6_INT_CONTROL, tmp);
4967 }
4968
4969 if (rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG) {
4970 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET);
4971 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4972 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC0_REGISTER_OFFSET, tmp);
4973 }
4974 if (rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG) {
4975 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET);
4976 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4977 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC1_REGISTER_OFFSET, tmp);
4978 }
4979 if (rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG) {
4980 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET);
4981 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4982 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC2_REGISTER_OFFSET, tmp);
4983 }
4984 if (rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG) {
4985 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET);
4986 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4987 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC3_REGISTER_OFFSET, tmp);
4988 }
4989 if (rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG) {
4990 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET);
4991 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4992 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC4_REGISTER_OFFSET, tmp);
4993 }
4994 if (rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG) {
4995 tmp = RREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET);
4996 tmp |= AFMT_AZ_FORMAT_WTRIG_ACK;
4997 WREG32(AFMT_AUDIO_PACKET_CONTROL + EVERGREEN_CRTC5_REGISTER_OFFSET, tmp);
4998 }
4999 }
5000
5001 static void evergreen_irq_disable(struct radeon_device *rdev)
5002 {
5003 r600_disable_interrupts(rdev);
5004 /* Wait and acknowledge irq */
5005 mdelay(1);
5006 evergreen_irq_ack(rdev);
5007 evergreen_disable_interrupt_state(rdev);
5008 }
5009
5010 void evergreen_irq_suspend(struct radeon_device *rdev)
5011 {
5012 evergreen_irq_disable(rdev);
5013 r600_rlc_stop(rdev);
5014 }
5015
5016 static u32 evergreen_get_ih_wptr(struct radeon_device *rdev)
5017 {
5018 u32 wptr, tmp;
5019
5020 if (rdev->wb.enabled)
5021 wptr = le32_to_cpu(rdev->wb.wb[R600_WB_IH_WPTR_OFFSET/4]);
5022 else
5023 wptr = RREG32(IH_RB_WPTR);
5024
5025 if (wptr & RB_OVERFLOW) {
5026 wptr &= ~RB_OVERFLOW;
5027 /* When a ring buffer overflow happen start parsing interrupt
5028 * from the last not overwritten vector (wptr + 16). Hopefully
5029 * this should allow us to catchup.
5030 */
5031 dev_warn(rdev->dev, "IH ring buffer overflow (0x%08X, 0x%08X, 0x%08X)\n",
5032 wptr, rdev->ih.rptr, (wptr + 16) & rdev->ih.ptr_mask);
5033 rdev->ih.rptr = (wptr + 16) & rdev->ih.ptr_mask;
5034 tmp = RREG32(IH_RB_CNTL);
5035 tmp |= IH_WPTR_OVERFLOW_CLEAR;
5036 WREG32(IH_RB_CNTL, tmp);
5037 }
5038 return (wptr & rdev->ih.ptr_mask);
5039 }
5040
5041 int evergreen_irq_process(struct radeon_device *rdev)
5042 {
5043 u32 wptr;
5044 u32 rptr;
5045 u32 src_id, src_data;
5046 u32 ring_index;
5047 bool queue_hotplug = false;
5048 bool queue_hdmi = false;
5049 bool queue_dp = false;
5050 bool queue_thermal = false;
5051 u32 status, addr;
5052
5053 if (!rdev->ih.enabled || rdev->shutdown)
5054 return IRQ_NONE;
5055
5056 wptr = evergreen_get_ih_wptr(rdev);
5057
5058 restart_ih:
5059 /* is somebody else already processing irqs? */
5060 if (atomic_xchg(&rdev->ih.lock, 1))
5061 return IRQ_NONE;
5062
5063 rptr = rdev->ih.rptr;
5064 DRM_DEBUG("evergreen_irq_process start: rptr %d, wptr %d\n", rptr, wptr);
5065
5066 /* Order reading of wptr vs. reading of IH ring data */
5067 rmb();
5068
5069 /* display interrupts */
5070 evergreen_irq_ack(rdev);
5071
5072 while (rptr != wptr) {
5073 /* wptr/rptr are in bytes! */
5074 ring_index = rptr / 4;
5075 src_id = le32_to_cpu(rdev->ih.ring[ring_index]) & 0xff;
5076 src_data = le32_to_cpu(rdev->ih.ring[ring_index + 1]) & 0xfffffff;
5077
5078 switch (src_id) {
5079 case 1: /* D1 vblank/vline */
5080 switch (src_data) {
5081 case 0: /* D1 vblank */
5082 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VBLANK_INTERRUPT))
5083 DRM_DEBUG("IH: D1 vblank - IH event w/o asserted irq bit?\n");
5084
5085 if (rdev->irq.crtc_vblank_int[0]) {
5086 drm_handle_vblank(rdev->ddev, 0);
5087 rdev->pm.vblank_sync = true;
5088 wake_up(&rdev->irq.vblank_queue);
5089 }
5090 if (atomic_read(&rdev->irq.pflip[0]))
5091 radeon_crtc_handle_vblank(rdev, 0);
5092 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VBLANK_INTERRUPT;
5093 DRM_DEBUG("IH: D1 vblank\n");
5094
5095 break;
5096 case 1: /* D1 vline */
5097 if (!(rdev->irq.stat_regs.evergreen.disp_int & LB_D1_VLINE_INTERRUPT))
5098 DRM_DEBUG("IH: D1 vline - IH event w/o asserted irq bit?\n");
5099
5100 rdev->irq.stat_regs.evergreen.disp_int &= ~LB_D1_VLINE_INTERRUPT;
5101 DRM_DEBUG("IH: D1 vline\n");
5102
5103 break;
5104 default:
5105 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5106 break;
5107 }
5108 break;
5109 case 2: /* D2 vblank/vline */
5110 switch (src_data) {
5111 case 0: /* D2 vblank */
5112 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VBLANK_INTERRUPT))
5113 DRM_DEBUG("IH: D2 vblank - IH event w/o asserted irq bit?\n");
5114
5115 if (rdev->irq.crtc_vblank_int[1]) {
5116 drm_handle_vblank(rdev->ddev, 1);
5117 rdev->pm.vblank_sync = true;
5118 wake_up(&rdev->irq.vblank_queue);
5119 }
5120 if (atomic_read(&rdev->irq.pflip[1]))
5121 radeon_crtc_handle_vblank(rdev, 1);
5122 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VBLANK_INTERRUPT;
5123 DRM_DEBUG("IH: D2 vblank\n");
5124
5125 break;
5126 case 1: /* D2 vline */
5127 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & LB_D2_VLINE_INTERRUPT))
5128 DRM_DEBUG("IH: D2 vline - IH event w/o asserted irq bit?\n");
5129
5130 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~LB_D2_VLINE_INTERRUPT;
5131 DRM_DEBUG("IH: D2 vline\n");
5132
5133 break;
5134 default:
5135 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5136 break;
5137 }
5138 break;
5139 case 3: /* D3 vblank/vline */
5140 switch (src_data) {
5141 case 0: /* D3 vblank */
5142 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VBLANK_INTERRUPT))
5143 DRM_DEBUG("IH: D3 vblank - IH event w/o asserted irq bit?\n");
5144
5145 if (rdev->irq.crtc_vblank_int[2]) {
5146 drm_handle_vblank(rdev->ddev, 2);
5147 rdev->pm.vblank_sync = true;
5148 wake_up(&rdev->irq.vblank_queue);
5149 }
5150 if (atomic_read(&rdev->irq.pflip[2]))
5151 radeon_crtc_handle_vblank(rdev, 2);
5152 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VBLANK_INTERRUPT;
5153 DRM_DEBUG("IH: D3 vblank\n");
5154
5155 break;
5156 case 1: /* D3 vline */
5157 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & LB_D3_VLINE_INTERRUPT))
5158 DRM_DEBUG("IH: D3 vline - IH event w/o asserted irq bit?\n");
5159
5160 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~LB_D3_VLINE_INTERRUPT;
5161 DRM_DEBUG("IH: D3 vline\n");
5162
5163 break;
5164 default:
5165 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5166 break;
5167 }
5168 break;
5169 case 4: /* D4 vblank/vline */
5170 switch (src_data) {
5171 case 0: /* D4 vblank */
5172 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VBLANK_INTERRUPT))
5173 DRM_DEBUG("IH: D4 vblank - IH event w/o asserted irq bit?\n");
5174
5175 if (rdev->irq.crtc_vblank_int[3]) {
5176 drm_handle_vblank(rdev->ddev, 3);
5177 rdev->pm.vblank_sync = true;
5178 wake_up(&rdev->irq.vblank_queue);
5179 }
5180 if (atomic_read(&rdev->irq.pflip[3]))
5181 radeon_crtc_handle_vblank(rdev, 3);
5182 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VBLANK_INTERRUPT;
5183 DRM_DEBUG("IH: D4 vblank\n");
5184
5185 break;
5186 case 1: /* D4 vline */
5187 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & LB_D4_VLINE_INTERRUPT))
5188 DRM_DEBUG("IH: D4 vline - IH event w/o asserted irq bit?\n");
5189
5190 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~LB_D4_VLINE_INTERRUPT;
5191 DRM_DEBUG("IH: D4 vline\n");
5192
5193 break;
5194 default:
5195 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5196 break;
5197 }
5198 break;
5199 case 5: /* D5 vblank/vline */
5200 switch (src_data) {
5201 case 0: /* D5 vblank */
5202 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VBLANK_INTERRUPT))
5203 DRM_DEBUG("IH: D5 vblank - IH event w/o asserted irq bit?\n");
5204
5205 if (rdev->irq.crtc_vblank_int[4]) {
5206 drm_handle_vblank(rdev->ddev, 4);
5207 rdev->pm.vblank_sync = true;
5208 wake_up(&rdev->irq.vblank_queue);
5209 }
5210 if (atomic_read(&rdev->irq.pflip[4]))
5211 radeon_crtc_handle_vblank(rdev, 4);
5212 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VBLANK_INTERRUPT;
5213 DRM_DEBUG("IH: D5 vblank\n");
5214
5215 break;
5216 case 1: /* D5 vline */
5217 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & LB_D5_VLINE_INTERRUPT))
5218 DRM_DEBUG("IH: D5 vline - IH event w/o asserted irq bit?\n");
5219
5220 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~LB_D5_VLINE_INTERRUPT;
5221 DRM_DEBUG("IH: D5 vline\n");
5222
5223 break;
5224 default:
5225 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5226 break;
5227 }
5228 break;
5229 case 6: /* D6 vblank/vline */
5230 switch (src_data) {
5231 case 0: /* D6 vblank */
5232 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VBLANK_INTERRUPT))
5233 DRM_DEBUG("IH: D6 vblank - IH event w/o asserted irq bit?\n");
5234
5235 if (rdev->irq.crtc_vblank_int[5]) {
5236 drm_handle_vblank(rdev->ddev, 5);
5237 rdev->pm.vblank_sync = true;
5238 wake_up(&rdev->irq.vblank_queue);
5239 }
5240 if (atomic_read(&rdev->irq.pflip[5]))
5241 radeon_crtc_handle_vblank(rdev, 5);
5242 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VBLANK_INTERRUPT;
5243 DRM_DEBUG("IH: D6 vblank\n");
5244
5245 break;
5246 case 1: /* D6 vline */
5247 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & LB_D6_VLINE_INTERRUPT))
5248 DRM_DEBUG("IH: D6 vline - IH event w/o asserted irq bit?\n");
5249
5250 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~LB_D6_VLINE_INTERRUPT;
5251 DRM_DEBUG("IH: D6 vline\n");
5252
5253 break;
5254 default:
5255 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5256 break;
5257 }
5258 break;
5259 case 8: /* D1 page flip */
5260 case 10: /* D2 page flip */
5261 case 12: /* D3 page flip */
5262 case 14: /* D4 page flip */
5263 case 16: /* D5 page flip */
5264 case 18: /* D6 page flip */
5265 DRM_DEBUG("IH: D%d flip\n", ((src_id - 8) >> 1) + 1);
5266 if (radeon_use_pflipirq > 0)
5267 radeon_crtc_handle_flip(rdev, (src_id - 8) >> 1);
5268 break;
5269 case 42: /* HPD hotplug */
5270 switch (src_data) {
5271 case 0:
5272 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_INTERRUPT))
5273 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5274
5275 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_INTERRUPT;
5276 queue_hotplug = true;
5277 DRM_DEBUG("IH: HPD1\n");
5278 break;
5279 case 1:
5280 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_INTERRUPT))
5281 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5282
5283 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_INTERRUPT;
5284 queue_hotplug = true;
5285 DRM_DEBUG("IH: HPD2\n");
5286 break;
5287 case 2:
5288 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_INTERRUPT))
5289 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5290
5291 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_INTERRUPT;
5292 queue_hotplug = true;
5293 DRM_DEBUG("IH: HPD3\n");
5294 break;
5295 case 3:
5296 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_INTERRUPT))
5297 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5298
5299 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_INTERRUPT;
5300 queue_hotplug = true;
5301 DRM_DEBUG("IH: HPD4\n");
5302 break;
5303 case 4:
5304 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_INTERRUPT))
5305 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5306
5307 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_INTERRUPT;
5308 queue_hotplug = true;
5309 DRM_DEBUG("IH: HPD5\n");
5310 break;
5311 case 5:
5312 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_INTERRUPT))
5313 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5314
5315 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_INTERRUPT;
5316 queue_hotplug = true;
5317 DRM_DEBUG("IH: HPD6\n");
5318 break;
5319 case 6:
5320 if (!(rdev->irq.stat_regs.evergreen.disp_int & DC_HPD1_RX_INTERRUPT))
5321 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5322
5323 rdev->irq.stat_regs.evergreen.disp_int &= ~DC_HPD1_RX_INTERRUPT;
5324 queue_dp = true;
5325 DRM_DEBUG("IH: HPD_RX 1\n");
5326 break;
5327 case 7:
5328 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont & DC_HPD2_RX_INTERRUPT))
5329 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5330
5331 rdev->irq.stat_regs.evergreen.disp_int_cont &= ~DC_HPD2_RX_INTERRUPT;
5332 queue_dp = true;
5333 DRM_DEBUG("IH: HPD_RX 2\n");
5334 break;
5335 case 8:
5336 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont2 & DC_HPD3_RX_INTERRUPT))
5337 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5338
5339 rdev->irq.stat_regs.evergreen.disp_int_cont2 &= ~DC_HPD3_RX_INTERRUPT;
5340 queue_dp = true;
5341 DRM_DEBUG("IH: HPD_RX 3\n");
5342 break;
5343 case 9:
5344 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont3 & DC_HPD4_RX_INTERRUPT))
5345 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5346
5347 rdev->irq.stat_regs.evergreen.disp_int_cont3 &= ~DC_HPD4_RX_INTERRUPT;
5348 queue_dp = true;
5349 DRM_DEBUG("IH: HPD_RX 4\n");
5350 break;
5351 case 10:
5352 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont4 & DC_HPD5_RX_INTERRUPT))
5353 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5354
5355 rdev->irq.stat_regs.evergreen.disp_int_cont4 &= ~DC_HPD5_RX_INTERRUPT;
5356 queue_dp = true;
5357 DRM_DEBUG("IH: HPD_RX 5\n");
5358 break;
5359 case 11:
5360 if (!(rdev->irq.stat_regs.evergreen.disp_int_cont5 & DC_HPD6_RX_INTERRUPT))
5361 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5362
5363 rdev->irq.stat_regs.evergreen.disp_int_cont5 &= ~DC_HPD6_RX_INTERRUPT;
5364 queue_dp = true;
5365 DRM_DEBUG("IH: HPD_RX 6\n");
5366 break;
5367 default:
5368 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5369 break;
5370 }
5371 break;
5372 case 44: /* hdmi */
5373 switch (src_data) {
5374 case 0:
5375 if (!(rdev->irq.stat_regs.evergreen.afmt_status1 & AFMT_AZ_FORMAT_WTRIG))
5376 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5377
5378 rdev->irq.stat_regs.evergreen.afmt_status1 &= ~AFMT_AZ_FORMAT_WTRIG;
5379 queue_hdmi = true;
5380 DRM_DEBUG("IH: HDMI0\n");
5381 break;
5382 case 1:
5383 if (!(rdev->irq.stat_regs.evergreen.afmt_status2 & AFMT_AZ_FORMAT_WTRIG))
5384 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5385
5386 rdev->irq.stat_regs.evergreen.afmt_status2 &= ~AFMT_AZ_FORMAT_WTRIG;
5387 queue_hdmi = true;
5388 DRM_DEBUG("IH: HDMI1\n");
5389 break;
5390 case 2:
5391 if (!(rdev->irq.stat_regs.evergreen.afmt_status3 & AFMT_AZ_FORMAT_WTRIG))
5392 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5393
5394 rdev->irq.stat_regs.evergreen.afmt_status3 &= ~AFMT_AZ_FORMAT_WTRIG;
5395 queue_hdmi = true;
5396 DRM_DEBUG("IH: HDMI2\n");
5397 break;
5398 case 3:
5399 if (!(rdev->irq.stat_regs.evergreen.afmt_status4 & AFMT_AZ_FORMAT_WTRIG))
5400 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5401
5402 rdev->irq.stat_regs.evergreen.afmt_status4 &= ~AFMT_AZ_FORMAT_WTRIG;
5403 queue_hdmi = true;
5404 DRM_DEBUG("IH: HDMI3\n");
5405 break;
5406 case 4:
5407 if (!(rdev->irq.stat_regs.evergreen.afmt_status5 & AFMT_AZ_FORMAT_WTRIG))
5408 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5409
5410 rdev->irq.stat_regs.evergreen.afmt_status5 &= ~AFMT_AZ_FORMAT_WTRIG;
5411 queue_hdmi = true;
5412 DRM_DEBUG("IH: HDMI4\n");
5413 break;
5414 case 5:
5415 if (!(rdev->irq.stat_regs.evergreen.afmt_status6 & AFMT_AZ_FORMAT_WTRIG))
5416 DRM_DEBUG("IH: IH event w/o asserted irq bit?\n");
5417
5418 rdev->irq.stat_regs.evergreen.afmt_status6 &= ~AFMT_AZ_FORMAT_WTRIG;
5419 queue_hdmi = true;
5420 DRM_DEBUG("IH: HDMI5\n");
5421 break;
5422 default:
5423 DRM_ERROR("Unhandled interrupt: %d %d\n", src_id, src_data);
5424 break;
5425 }
5426 case 96:
5427 DRM_ERROR("SRBM_READ_ERROR: 0x%x\n", RREG32(SRBM_READ_ERROR));
5428 WREG32(SRBM_INT_ACK, 0x1);
5429 break;
5430 case 124: /* UVD */
5431 DRM_DEBUG("IH: UVD int: 0x%08x\n", src_data);
5432 radeon_fence_process(rdev, R600_RING_TYPE_UVD_INDEX);
5433 break;
5434 case 146:
5435 case 147:
5436 addr = RREG32(VM_CONTEXT1_PROTECTION_FAULT_ADDR);
5437 status = RREG32(VM_CONTEXT1_PROTECTION_FAULT_STATUS);
5438 /* reset addr and status */
5439 WREG32_P(VM_CONTEXT1_CNTL2, 1, ~1);
5440 if (addr == 0x0 && status == 0x0)
5441 break;
5442 dev_err(rdev->dev, "GPU fault detected: %d 0x%08x\n", src_id, src_data);
5443 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_ADDR 0x%08X\n",
5444 addr);
5445 dev_err(rdev->dev, " VM_CONTEXT1_PROTECTION_FAULT_STATUS 0x%08X\n",
5446 status);
5447 cayman_vm_decode_fault(rdev, status, addr);
5448 break;
5449 case 176: /* CP_INT in ring buffer */
5450 case 177: /* CP_INT in IB1 */
5451 case 178: /* CP_INT in IB2 */
5452 DRM_DEBUG("IH: CP int: 0x%08x\n", src_data);
5453 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5454 break;
5455 case 181: /* CP EOP event */
5456 DRM_DEBUG("IH: CP EOP\n");
5457 if (rdev->family >= CHIP_CAYMAN) {
5458 switch (src_data) {
5459 case 0:
5460 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5461 break;
5462 case 1:
5463 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP1_INDEX);
5464 break;
5465 case 2:
5466 radeon_fence_process(rdev, CAYMAN_RING_TYPE_CP2_INDEX);
5467 break;
5468 }
5469 } else
5470 radeon_fence_process(rdev, RADEON_RING_TYPE_GFX_INDEX);
5471 break;
5472 case 224: /* DMA trap event */
5473 DRM_DEBUG("IH: DMA trap\n");
5474 radeon_fence_process(rdev, R600_RING_TYPE_DMA_INDEX);
5475 break;
5476 case 230: /* thermal low to high */
5477 DRM_DEBUG("IH: thermal low to high\n");
5478 rdev->pm.dpm.thermal.high_to_low = false;
5479 queue_thermal = true;
5480 break;
5481 case 231: /* thermal high to low */
5482 DRM_DEBUG("IH: thermal high to low\n");
5483 rdev->pm.dpm.thermal.high_to_low = true;
5484 queue_thermal = true;
5485 break;
5486 case 233: /* GUI IDLE */
5487 DRM_DEBUG("IH: GUI idle\n");
5488 break;
5489 case 244: /* DMA trap event */
5490 if (rdev->family >= CHIP_CAYMAN) {
5491 DRM_DEBUG("IH: DMA1 trap\n");
5492 radeon_fence_process(rdev, CAYMAN_RING_TYPE_DMA1_INDEX);
5493 }
5494 break;
5495 default:
5496 DRM_DEBUG("Unhandled interrupt: %d %d\n", src_id, src_data);
5497 break;
5498 }
5499
5500 /* wptr/rptr are in bytes! */
5501 rptr += 16;
5502 rptr &= rdev->ih.ptr_mask;
5503 WREG32(IH_RB_RPTR, rptr);
5504 }
5505 if (queue_dp)
5506 schedule_work(&rdev->dp_work);
5507 if (queue_hotplug)
5508 schedule_delayed_work(&rdev->hotplug_work, 0);
5509 if (queue_hdmi)
5510 schedule_work(&rdev->audio_work);
5511 if (queue_thermal && rdev->pm.dpm_enabled)
5512 schedule_work(&rdev->pm.dpm.thermal.work);
5513 rdev->ih.rptr = rptr;
5514 atomic_set(&rdev->ih.lock, 0);
5515
5516 /* make sure wptr hasn't changed while processing */
5517 wptr = evergreen_get_ih_wptr(rdev);
5518 if (wptr != rptr)
5519 goto restart_ih;
5520
5521 return IRQ_HANDLED;
5522 }
5523
5524 static void evergreen_uvd_init(struct radeon_device *rdev)
5525 {
5526 int r;
5527
5528 if (!rdev->has_uvd)
5529 return;
5530
5531 r = radeon_uvd_init(rdev);
5532 if (r) {
5533 dev_err(rdev->dev, "failed UVD (%d) init.\n", r);
5534 /*
5535 * At this point rdev->uvd.vcpu_bo is NULL which trickles down
5536 * to early fails uvd_v2_2_resume() and thus nothing happens
5537 * there. So it is pointless to try to go through that code
5538 * hence why we disable uvd here.
5539 */
5540 rdev->has_uvd = 0;
5541 return;
5542 }
5543 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_obj = NULL;
5544 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_UVD_INDEX], 4096);
5545 }
5546
5547 static void evergreen_uvd_start(struct radeon_device *rdev)
5548 {
5549 int r;
5550
5551 if (!rdev->has_uvd)
5552 return;
5553
5554 r = uvd_v2_2_resume(rdev);
5555 if (r) {
5556 dev_err(rdev->dev, "failed UVD resume (%d).\n", r);
5557 goto error;
5558 }
5559 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_UVD_INDEX);
5560 if (r) {
5561 dev_err(rdev->dev, "failed initializing UVD fences (%d).\n", r);
5562 goto error;
5563 }
5564 return;
5565
5566 error:
5567 rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size = 0;
5568 }
5569
5570 static void evergreen_uvd_resume(struct radeon_device *rdev)
5571 {
5572 struct radeon_ring *ring;
5573 int r;
5574
5575 if (!rdev->has_uvd || !rdev->ring[R600_RING_TYPE_UVD_INDEX].ring_size)
5576 return;
5577
5578 ring = &rdev->ring[R600_RING_TYPE_UVD_INDEX];
5579 r = radeon_ring_init(rdev, ring, ring->ring_size, 0, PACKET0(UVD_NO_OP, 0));
5580 if (r) {
5581 dev_err(rdev->dev, "failed initializing UVD ring (%d).\n", r);
5582 return;
5583 }
5584 r = uvd_v1_0_init(rdev);
5585 if (r) {
5586 dev_err(rdev->dev, "failed initializing UVD (%d).\n", r);
5587 return;
5588 }
5589 }
5590
5591 static int evergreen_startup(struct radeon_device *rdev)
5592 {
5593 struct radeon_ring *ring;
5594 int r;
5595
5596 /* enable pcie gen2 link */
5597 evergreen_pcie_gen2_enable(rdev);
5598 /* enable aspm */
5599 evergreen_program_aspm(rdev);
5600
5601 /* scratch needs to be initialized before MC */
5602 r = r600_vram_scratch_init(rdev);
5603 if (r)
5604 return r;
5605
5606 evergreen_mc_program(rdev);
5607
5608 if (ASIC_IS_DCE5(rdev) && !rdev->pm.dpm_enabled) {
5609 r = ni_mc_load_microcode(rdev);
5610 if (r) {
5611 DRM_ERROR("Failed to load MC firmware!\n");
5612 return r;
5613 }
5614 }
5615
5616 if (rdev->flags & RADEON_IS_AGP) {
5617 evergreen_agp_enable(rdev);
5618 } else {
5619 r = evergreen_pcie_gart_enable(rdev);
5620 if (r)
5621 return r;
5622 }
5623 evergreen_gpu_init(rdev);
5624
5625 /* allocate rlc buffers */
5626 if (rdev->flags & RADEON_IS_IGP) {
5627 rdev->rlc.reg_list = sumo_rlc_save_restore_register_list;
5628 rdev->rlc.reg_list_size =
5629 (u32)ARRAY_SIZE(sumo_rlc_save_restore_register_list);
5630 rdev->rlc.cs_data = evergreen_cs_data;
5631 r = sumo_rlc_init(rdev);
5632 if (r) {
5633 DRM_ERROR("Failed to init rlc BOs!\n");
5634 return r;
5635 }
5636 }
5637
5638 /* allocate wb buffer */
5639 r = radeon_wb_init(rdev);
5640 if (r)
5641 return r;
5642
5643 r = radeon_fence_driver_start_ring(rdev, RADEON_RING_TYPE_GFX_INDEX);
5644 if (r) {
5645 dev_err(rdev->dev, "failed initializing CP fences (%d).\n", r);
5646 return r;
5647 }
5648
5649 r = radeon_fence_driver_start_ring(rdev, R600_RING_TYPE_DMA_INDEX);
5650 if (r) {
5651 dev_err(rdev->dev, "failed initializing DMA fences (%d).\n", r);
5652 return r;
5653 }
5654
5655 evergreen_uvd_start(rdev);
5656
5657 /* Enable IRQ */
5658 if (!rdev->irq.installed) {
5659 r = radeon_irq_kms_init(rdev);
5660 if (r)
5661 return r;
5662 }
5663
5664 r = r600_irq_init(rdev);
5665 if (r) {
5666 DRM_ERROR("radeon: IH init failed (%d).\n", r);
5667 radeon_irq_kms_fini(rdev);
5668 return r;
5669 }
5670 evergreen_irq_set(rdev);
5671
5672 ring = &rdev->ring[RADEON_RING_TYPE_GFX_INDEX];
5673 r = radeon_ring_init(rdev, ring, ring->ring_size, RADEON_WB_CP_RPTR_OFFSET,
5674 RADEON_CP_PACKET2);
5675 if (r)
5676 return r;
5677
5678 ring = &rdev->ring[R600_RING_TYPE_DMA_INDEX];
5679 r = radeon_ring_init(rdev, ring, ring->ring_size, R600_WB_DMA_RPTR_OFFSET,
5680 DMA_PACKET(DMA_PACKET_NOP, 0, 0));
5681 if (r)
5682 return r;
5683
5684 r = evergreen_cp_load_microcode(rdev);
5685 if (r)
5686 return r;
5687 r = evergreen_cp_resume(rdev);
5688 if (r)
5689 return r;
5690 r = r600_dma_resume(rdev);
5691 if (r)
5692 return r;
5693
5694 evergreen_uvd_resume(rdev);
5695
5696 r = radeon_ib_pool_init(rdev);
5697 if (r) {
5698 dev_err(rdev->dev, "IB initialization failed (%d).\n", r);
5699 return r;
5700 }
5701
5702 r = radeon_audio_init(rdev);
5703 if (r) {
5704 DRM_ERROR("radeon: audio init failed\n");
5705 return r;
5706 }
5707
5708 return 0;
5709 }
5710
5711 int evergreen_resume(struct radeon_device *rdev)
5712 {
5713 int r;
5714
5715 /* reset the asic, the gfx blocks are often in a bad state
5716 * after the driver is unloaded or after a resume
5717 */
5718 if (radeon_asic_reset(rdev))
5719 dev_warn(rdev->dev, "GPU reset failed !\n");
5720 /* Do not reset GPU before posting, on rv770 hw unlike on r500 hw,
5721 * posting will perform necessary task to bring back GPU into good
5722 * shape.
5723 */
5724 /* post card */
5725 atom_asic_init(rdev->mode_info.atom_context);
5726
5727 /* init golden registers */
5728 evergreen_init_golden_registers(rdev);
5729
5730 if (rdev->pm.pm_method == PM_METHOD_DPM)
5731 radeon_pm_resume(rdev);
5732
5733 rdev->accel_working = true;
5734 r = evergreen_startup(rdev);
5735 if (r) {
5736 DRM_ERROR("evergreen startup failed on resume\n");
5737 rdev->accel_working = false;
5738 return r;
5739 }
5740
5741 return r;
5742
5743 }
5744
5745 int evergreen_suspend(struct radeon_device *rdev)
5746 {
5747 radeon_pm_suspend(rdev);
5748 radeon_audio_fini(rdev);
5749 if (rdev->has_uvd) {
5750 uvd_v1_0_fini(rdev);
5751 radeon_uvd_suspend(rdev);
5752 }
5753 r700_cp_stop(rdev);
5754 r600_dma_stop(rdev);
5755 evergreen_irq_suspend(rdev);
5756 radeon_wb_disable(rdev);
5757 evergreen_pcie_gart_disable(rdev);
5758
5759 return 0;
5760 }
5761
5762 /* Plan is to move initialization in that function and use
5763 * helper function so that radeon_device_init pretty much
5764 * do nothing more than calling asic specific function. This
5765 * should also allow to remove a bunch of callback function
5766 * like vram_info.
5767 */
5768 int evergreen_init(struct radeon_device *rdev)
5769 {
5770 int r;
5771
5772 /* Read BIOS */
5773 if (!radeon_get_bios(rdev)) {
5774 if (ASIC_IS_AVIVO(rdev))
5775 return -EINVAL;
5776 }
5777 /* Must be an ATOMBIOS */
5778 if (!rdev->is_atom_bios) {
5779 dev_err(rdev->dev, "Expecting atombios for evergreen GPU\n");
5780 return -EINVAL;
5781 }
5782 r = radeon_atombios_init(rdev);
5783 if (r)
5784 return r;
5785 /* reset the asic, the gfx blocks are often in a bad state
5786 * after the driver is unloaded or after a resume
5787 */
5788 if (radeon_asic_reset(rdev))
5789 dev_warn(rdev->dev, "GPU reset failed !\n");
5790 /* Post card if necessary */
5791 if (!radeon_card_posted(rdev)) {
5792 if (!rdev->bios) {
5793 dev_err(rdev->dev, "Card not posted and no BIOS - ignoring\n");
5794 return -EINVAL;
5795 }
5796 DRM_INFO("GPU not posted. posting now...\n");
5797 atom_asic_init(rdev->mode_info.atom_context);
5798 }
5799 /* init golden registers */
5800 evergreen_init_golden_registers(rdev);
5801 /* Initialize scratch registers */
5802 r600_scratch_init(rdev);
5803 /* Initialize surface registers */
5804 radeon_surface_init(rdev);
5805 /* Initialize clocks */
5806 radeon_get_clock_info(rdev->ddev);
5807 /* Fence driver */
5808 r = radeon_fence_driver_init(rdev);
5809 if (r)
5810 return r;
5811 /* initialize AGP */
5812 if (rdev->flags & RADEON_IS_AGP) {
5813 r = radeon_agp_init(rdev);
5814 if (r)
5815 radeon_agp_disable(rdev);
5816 }
5817 /* initialize memory controller */
5818 r = evergreen_mc_init(rdev);
5819 if (r)
5820 return r;
5821 /* Memory manager */
5822 r = radeon_bo_init(rdev);
5823 if (r)
5824 return r;
5825
5826 if (ASIC_IS_DCE5(rdev)) {
5827 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw || !rdev->mc_fw) {
5828 r = ni_init_microcode(rdev);
5829 if (r) {
5830 DRM_ERROR("Failed to load firmware!\n");
5831 return r;
5832 }
5833 }
5834 } else {
5835 if (!rdev->me_fw || !rdev->pfp_fw || !rdev->rlc_fw) {
5836 r = r600_init_microcode(rdev);
5837 if (r) {
5838 DRM_ERROR("Failed to load firmware!\n");
5839 return r;
5840 }
5841 }
5842 }
5843
5844 /* Initialize power management */
5845 radeon_pm_init(rdev);
5846
5847 rdev->ring[RADEON_RING_TYPE_GFX_INDEX].ring_obj = NULL;
5848 r600_ring_init(rdev, &rdev->ring[RADEON_RING_TYPE_GFX_INDEX], 1024 * 1024);
5849
5850 rdev->ring[R600_RING_TYPE_DMA_INDEX].ring_obj = NULL;
5851 r600_ring_init(rdev, &rdev->ring[R600_RING_TYPE_DMA_INDEX], 64 * 1024);
5852
5853 evergreen_uvd_init(rdev);
5854
5855 rdev->ih.ring_obj = NULL;
5856 r600_ih_ring_init(rdev, 64 * 1024);
5857
5858 r = r600_pcie_gart_init(rdev);
5859 if (r)
5860 return r;
5861
5862 rdev->accel_working = true;
5863 r = evergreen_startup(rdev);
5864 if (r) {
5865 dev_err(rdev->dev, "disabling GPU acceleration\n");
5866 r700_cp_fini(rdev);
5867 r600_dma_fini(rdev);
5868 r600_irq_fini(rdev);
5869 if (rdev->flags & RADEON_IS_IGP)
5870 sumo_rlc_fini(rdev);
5871 radeon_wb_fini(rdev);
5872 radeon_ib_pool_fini(rdev);
5873 radeon_irq_kms_fini(rdev);
5874 evergreen_pcie_gart_fini(rdev);
5875 rdev->accel_working = false;
5876 }
5877
5878 /* Don't start up if the MC ucode is missing on BTC parts.
5879 * The default clocks and voltages before the MC ucode
5880 * is loaded are not suffient for advanced operations.
5881 */
5882 if (ASIC_IS_DCE5(rdev)) {
5883 if (!rdev->mc_fw && !(rdev->flags & RADEON_IS_IGP)) {
5884 DRM_ERROR("radeon: MC ucode required for NI+.\n");
5885 return -EINVAL;
5886 }
5887 }
5888
5889 return 0;
5890 }
5891
5892 void evergreen_fini(struct radeon_device *rdev)
5893 {
5894 radeon_pm_fini(rdev);
5895 radeon_audio_fini(rdev);
5896 r700_cp_fini(rdev);
5897 r600_dma_fini(rdev);
5898 r600_irq_fini(rdev);
5899 if (rdev->flags & RADEON_IS_IGP)
5900 sumo_rlc_fini(rdev);
5901 radeon_wb_fini(rdev);
5902 radeon_ib_pool_fini(rdev);
5903 radeon_irq_kms_fini(rdev);
5904 uvd_v1_0_fini(rdev);
5905 radeon_uvd_fini(rdev);
5906 evergreen_pcie_gart_fini(rdev);
5907 r600_vram_scratch_fini(rdev);
5908 radeon_gem_fini(rdev);
5909 radeon_fence_driver_fini(rdev);
5910 radeon_agp_fini(rdev);
5911 radeon_bo_fini(rdev);
5912 radeon_atombios_fini(rdev);
5913 kfree(rdev->bios);
5914 rdev->bios = NULL;
5915 }
5916
5917 void evergreen_pcie_gen2_enable(struct radeon_device *rdev)
5918 {
5919 u32 link_width_cntl, speed_cntl;
5920
5921 if (radeon_pcie_gen2 == 0)
5922 return;
5923
5924 if (rdev->flags & RADEON_IS_IGP)
5925 return;
5926
5927 if (!(rdev->flags & RADEON_IS_PCIE))
5928 return;
5929
5930 /* x2 cards have a special sequence */
5931 if (ASIC_IS_X2(rdev))
5932 return;
5933
5934 if ((rdev->pdev->bus->max_bus_speed != PCIE_SPEED_5_0GT) &&
5935 (rdev->pdev->bus->max_bus_speed != PCIE_SPEED_8_0GT))
5936 return;
5937
5938 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5939 if (speed_cntl & LC_CURRENT_DATA_RATE) {
5940 DRM_INFO("PCIE gen 2 link speeds already enabled\n");
5941 return;
5942 }
5943
5944 DRM_INFO("enabling PCIE gen 2 link speeds, disable with radeon.pcie_gen2=0\n");
5945
5946 if ((speed_cntl & LC_OTHER_SIDE_EVER_SENT_GEN2) ||
5947 (speed_cntl & LC_OTHER_SIDE_SUPPORTS_GEN2)) {
5948
5949 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5950 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5951 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5952
5953 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5954 speed_cntl &= ~LC_TARGET_LINK_SPEED_OVERRIDE_EN;
5955 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5956
5957 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5958 speed_cntl |= LC_CLR_FAILED_SPD_CHANGE_CNT;
5959 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5960
5961 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5962 speed_cntl &= ~LC_CLR_FAILED_SPD_CHANGE_CNT;
5963 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5964
5965 speed_cntl = RREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL);
5966 speed_cntl |= LC_GEN2_EN_STRAP;
5967 WREG32_PCIE_PORT(PCIE_LC_SPEED_CNTL, speed_cntl);
5968
5969 } else {
5970 link_width_cntl = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
5971 /* XXX: only disable it if gen1 bridge vendor == 0x111d or 0x1106 */
5972 if (1)
5973 link_width_cntl |= LC_UPCONFIGURE_DIS;
5974 else
5975 link_width_cntl &= ~LC_UPCONFIGURE_DIS;
5976 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, link_width_cntl);
5977 }
5978 }
5979
5980 void evergreen_program_aspm(struct radeon_device *rdev)
5981 {
5982 u32 data, orig;
5983 u32 pcie_lc_cntl, pcie_lc_cntl_old;
5984 bool disable_l0s, disable_l1 = false, disable_plloff_in_l1 = false;
5985 /* fusion_platform = true
5986 * if the system is a fusion system
5987 * (APU or DGPU in a fusion system).
5988 * todo: check if the system is a fusion platform.
5989 */
5990 bool fusion_platform = false;
5991
5992 if (radeon_aspm == 0)
5993 return;
5994
5995 if (!(rdev->flags & RADEON_IS_PCIE))
5996 return;
5997
5998 switch (rdev->family) {
5999 case CHIP_CYPRESS:
6000 case CHIP_HEMLOCK:
6001 case CHIP_JUNIPER:
6002 case CHIP_REDWOOD:
6003 case CHIP_CEDAR:
6004 case CHIP_SUMO:
6005 case CHIP_SUMO2:
6006 case CHIP_PALM:
6007 case CHIP_ARUBA:
6008 disable_l0s = true;
6009 break;
6010 default:
6011 disable_l0s = false;
6012 break;
6013 }
6014
6015 if (rdev->flags & RADEON_IS_IGP)
6016 fusion_platform = true; /* XXX also dGPUs in a fusion system */
6017
6018 data = orig = RREG32_PIF_PHY0(PB0_PIF_PAIRING);
6019 if (fusion_platform)
6020 data &= ~MULTI_PIF;
6021 else
6022 data |= MULTI_PIF;
6023 if (data != orig)
6024 WREG32_PIF_PHY0(PB0_PIF_PAIRING, data);
6025
6026 data = orig = RREG32_PIF_PHY1(PB1_PIF_PAIRING);
6027 if (fusion_platform)
6028 data &= ~MULTI_PIF;
6029 else
6030 data |= MULTI_PIF;
6031 if (data != orig)
6032 WREG32_PIF_PHY1(PB1_PIF_PAIRING, data);
6033
6034 pcie_lc_cntl = pcie_lc_cntl_old = RREG32_PCIE_PORT(PCIE_LC_CNTL);
6035 pcie_lc_cntl &= ~(LC_L0S_INACTIVITY_MASK | LC_L1_INACTIVITY_MASK);
6036 if (!disable_l0s) {
6037 if (rdev->family >= CHIP_BARTS)
6038 pcie_lc_cntl |= LC_L0S_INACTIVITY(7);
6039 else
6040 pcie_lc_cntl |= LC_L0S_INACTIVITY(3);
6041 }
6042
6043 if (!disable_l1) {
6044 if (rdev->family >= CHIP_BARTS)
6045 pcie_lc_cntl |= LC_L1_INACTIVITY(7);
6046 else
6047 pcie_lc_cntl |= LC_L1_INACTIVITY(8);
6048
6049 if (!disable_plloff_in_l1) {
6050 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6051 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6052 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6053 if (data != orig)
6054 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6055
6056 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6057 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6058 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6059 if (data != orig)
6060 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6061
6062 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6063 data &= ~(PLL_POWER_STATE_IN_OFF_0_MASK | PLL_POWER_STATE_IN_TXS2_0_MASK);
6064 data |= PLL_POWER_STATE_IN_OFF_0(7) | PLL_POWER_STATE_IN_TXS2_0(7);
6065 if (data != orig)
6066 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6067
6068 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6069 data &= ~(PLL_POWER_STATE_IN_OFF_1_MASK | PLL_POWER_STATE_IN_TXS2_1_MASK);
6070 data |= PLL_POWER_STATE_IN_OFF_1(7) | PLL_POWER_STATE_IN_TXS2_1(7);
6071 if (data != orig)
6072 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6073
6074 if (rdev->family >= CHIP_BARTS) {
6075 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0);
6076 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6077 data |= PLL_RAMP_UP_TIME_0(4);
6078 if (data != orig)
6079 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_0, data);
6080
6081 data = orig = RREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1);
6082 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6083 data |= PLL_RAMP_UP_TIME_1(4);
6084 if (data != orig)
6085 WREG32_PIF_PHY0(PB0_PIF_PWRDOWN_1, data);
6086
6087 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0);
6088 data &= ~PLL_RAMP_UP_TIME_0_MASK;
6089 data |= PLL_RAMP_UP_TIME_0(4);
6090 if (data != orig)
6091 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_0, data);
6092
6093 data = orig = RREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1);
6094 data &= ~PLL_RAMP_UP_TIME_1_MASK;
6095 data |= PLL_RAMP_UP_TIME_1(4);
6096 if (data != orig)
6097 WREG32_PIF_PHY1(PB1_PIF_PWRDOWN_1, data);
6098 }
6099
6100 data = orig = RREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL);
6101 data &= ~LC_DYN_LANES_PWR_STATE_MASK;
6102 data |= LC_DYN_LANES_PWR_STATE(3);
6103 if (data != orig)
6104 WREG32_PCIE_PORT(PCIE_LC_LINK_WIDTH_CNTL, data);
6105
6106 if (rdev->family >= CHIP_BARTS) {
6107 data = orig = RREG32_PIF_PHY0(PB0_PIF_CNTL);
6108 data &= ~LS2_EXIT_TIME_MASK;
6109 data |= LS2_EXIT_TIME(1);
6110 if (data != orig)
6111 WREG32_PIF_PHY0(PB0_PIF_CNTL, data);
6112
6113 data = orig = RREG32_PIF_PHY1(PB1_PIF_CNTL);
6114 data &= ~LS2_EXIT_TIME_MASK;
6115 data |= LS2_EXIT_TIME(1);
6116 if (data != orig)
6117 WREG32_PIF_PHY1(PB1_PIF_CNTL, data);
6118 }
6119 }
6120 }
6121
6122 /* evergreen parts only */
6123 if (rdev->family < CHIP_BARTS)
6124 pcie_lc_cntl |= LC_PMI_TO_L1_DIS;
6125
6126 if (pcie_lc_cntl != pcie_lc_cntl_old)
6127 WREG32_PCIE_PORT(PCIE_LC_CNTL, pcie_lc_cntl);
6128 }