drm/vgaarb: add VGA arbitration support to the drm and kms.
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / drivers / gpu / drm / radeon / radeon_asic.h
1 /*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28 #ifndef __RADEON_ASIC_H__
29 #define __RADEON_ASIC_H__
30
31 /*
32 * common functions
33 */
34 void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35 void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
37 void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38 void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39 void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
41 /*
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43 */
44 int r100_init(struct radeon_device *rdev);
45 int r200_init(struct radeon_device *rdev);
46 uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
47 void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
48 void r100_errata(struct radeon_device *rdev);
49 void r100_vram_info(struct radeon_device *rdev);
50 void r100_vga_set_state(struct radeon_device *rdev, bool state);
51 int r100_gpu_reset(struct radeon_device *rdev);
52 int r100_mc_init(struct radeon_device *rdev);
53 void r100_mc_fini(struct radeon_device *rdev);
54 u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
55 int r100_wb_init(struct radeon_device *rdev);
56 void r100_wb_fini(struct radeon_device *rdev);
57 int r100_pci_gart_init(struct radeon_device *rdev);
58 void r100_pci_gart_fini(struct radeon_device *rdev);
59 int r100_pci_gart_enable(struct radeon_device *rdev);
60 void r100_pci_gart_disable(struct radeon_device *rdev);
61 void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
62 int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
63 int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
64 void r100_cp_fini(struct radeon_device *rdev);
65 void r100_cp_disable(struct radeon_device *rdev);
66 void r100_cp_commit(struct radeon_device *rdev);
67 void r100_ring_start(struct radeon_device *rdev);
68 int r100_irq_set(struct radeon_device *rdev);
69 int r100_irq_process(struct radeon_device *rdev);
70 void r100_fence_ring_emit(struct radeon_device *rdev,
71 struct radeon_fence *fence);
72 int r100_cs_parse(struct radeon_cs_parser *p);
73 void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
74 uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
75 int r100_copy_blit(struct radeon_device *rdev,
76 uint64_t src_offset,
77 uint64_t dst_offset,
78 unsigned num_pages,
79 struct radeon_fence *fence);
80 int r100_set_surface_reg(struct radeon_device *rdev, int reg,
81 uint32_t tiling_flags, uint32_t pitch,
82 uint32_t offset, uint32_t obj_size);
83 int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
84 void r100_bandwidth_update(struct radeon_device *rdev);
85 void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
86 int r100_ib_test(struct radeon_device *rdev);
87 int r100_ring_test(struct radeon_device *rdev);
88
89 static struct radeon_asic r100_asic = {
90 .init = &r100_init,
91 .errata = &r100_errata,
92 .vram_info = &r100_vram_info,
93 .vga_set_state = &r100_vga_set_state,
94 .gpu_reset = &r100_gpu_reset,
95 .mc_init = &r100_mc_init,
96 .mc_fini = &r100_mc_fini,
97 .wb_init = &r100_wb_init,
98 .wb_fini = &r100_wb_fini,
99 .gart_init = &r100_pci_gart_init,
100 .gart_fini = &r100_pci_gart_fini,
101 .gart_enable = &r100_pci_gart_enable,
102 .gart_disable = &r100_pci_gart_disable,
103 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
104 .gart_set_page = &r100_pci_gart_set_page,
105 .cp_init = &r100_cp_init,
106 .cp_fini = &r100_cp_fini,
107 .cp_disable = &r100_cp_disable,
108 .cp_commit = &r100_cp_commit,
109 .ring_start = &r100_ring_start,
110 .ring_test = &r100_ring_test,
111 .ring_ib_execute = &r100_ring_ib_execute,
112 .ib_test = &r100_ib_test,
113 .irq_set = &r100_irq_set,
114 .irq_process = &r100_irq_process,
115 .get_vblank_counter = &r100_get_vblank_counter,
116 .fence_ring_emit = &r100_fence_ring_emit,
117 .cs_parse = &r100_cs_parse,
118 .copy_blit = &r100_copy_blit,
119 .copy_dma = NULL,
120 .copy = &r100_copy_blit,
121 .set_engine_clock = &radeon_legacy_set_engine_clock,
122 .set_memory_clock = NULL,
123 .set_pcie_lanes = NULL,
124 .set_clock_gating = &radeon_legacy_set_clock_gating,
125 .set_surface_reg = r100_set_surface_reg,
126 .clear_surface_reg = r100_clear_surface_reg,
127 .bandwidth_update = &r100_bandwidth_update,
128 };
129
130
131 /*
132 * r300,r350,rv350,rv380
133 */
134 int r300_init(struct radeon_device *rdev);
135 void r300_errata(struct radeon_device *rdev);
136 void r300_vram_info(struct radeon_device *rdev);
137 int r300_gpu_reset(struct radeon_device *rdev);
138 int r300_mc_init(struct radeon_device *rdev);
139 void r300_mc_fini(struct radeon_device *rdev);
140 void r300_ring_start(struct radeon_device *rdev);
141 void r300_fence_ring_emit(struct radeon_device *rdev,
142 struct radeon_fence *fence);
143 int r300_cs_parse(struct radeon_cs_parser *p);
144 int rv370_pcie_gart_init(struct radeon_device *rdev);
145 void rv370_pcie_gart_fini(struct radeon_device *rdev);
146 int rv370_pcie_gart_enable(struct radeon_device *rdev);
147 void rv370_pcie_gart_disable(struct radeon_device *rdev);
148 void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
149 int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
150 uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
151 void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
152 void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
153 int r300_copy_dma(struct radeon_device *rdev,
154 uint64_t src_offset,
155 uint64_t dst_offset,
156 unsigned num_pages,
157 struct radeon_fence *fence);
158
159 static struct radeon_asic r300_asic = {
160 .init = &r300_init,
161 .errata = &r300_errata,
162 .vram_info = &r300_vram_info,
163 .vga_set_state = &r100_vga_set_state,
164 .gpu_reset = &r300_gpu_reset,
165 .mc_init = &r300_mc_init,
166 .mc_fini = &r300_mc_fini,
167 .wb_init = &r100_wb_init,
168 .wb_fini = &r100_wb_fini,
169 .gart_init = &r100_pci_gart_init,
170 .gart_fini = &r100_pci_gart_fini,
171 .gart_enable = &r100_pci_gart_enable,
172 .gart_disable = &r100_pci_gart_disable,
173 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
174 .gart_set_page = &r100_pci_gart_set_page,
175 .cp_init = &r100_cp_init,
176 .cp_fini = &r100_cp_fini,
177 .cp_disable = &r100_cp_disable,
178 .cp_commit = &r100_cp_commit,
179 .ring_start = &r300_ring_start,
180 .ring_test = &r100_ring_test,
181 .ring_ib_execute = &r100_ring_ib_execute,
182 .ib_test = &r100_ib_test,
183 .irq_set = &r100_irq_set,
184 .irq_process = &r100_irq_process,
185 .get_vblank_counter = &r100_get_vblank_counter,
186 .fence_ring_emit = &r300_fence_ring_emit,
187 .cs_parse = &r300_cs_parse,
188 .copy_blit = &r100_copy_blit,
189 .copy_dma = &r300_copy_dma,
190 .copy = &r100_copy_blit,
191 .set_engine_clock = &radeon_legacy_set_engine_clock,
192 .set_memory_clock = NULL,
193 .set_pcie_lanes = &rv370_set_pcie_lanes,
194 .set_clock_gating = &radeon_legacy_set_clock_gating,
195 .set_surface_reg = r100_set_surface_reg,
196 .clear_surface_reg = r100_clear_surface_reg,
197 .bandwidth_update = &r100_bandwidth_update,
198 };
199
200 /*
201 * r420,r423,rv410
202 */
203 extern int r420_init(struct radeon_device *rdev);
204 extern void r420_fini(struct radeon_device *rdev);
205 extern int r420_suspend(struct radeon_device *rdev);
206 extern int r420_resume(struct radeon_device *rdev);
207 static struct radeon_asic r420_asic = {
208 .init = &r420_init,
209 .fini = &r420_fini,
210 .suspend = &r420_suspend,
211 .resume = &r420_resume,
212 .errata = NULL,
213 .vram_info = NULL,
214 .vga_set_state = &r100_vga_set_state,
215 .gpu_reset = &r300_gpu_reset,
216 .mc_init = NULL,
217 .mc_fini = NULL,
218 .wb_init = NULL,
219 .wb_fini = NULL,
220 .gart_enable = NULL,
221 .gart_disable = NULL,
222 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
223 .gart_set_page = &rv370_pcie_gart_set_page,
224 .cp_init = NULL,
225 .cp_fini = NULL,
226 .cp_disable = NULL,
227 .cp_commit = &r100_cp_commit,
228 .ring_start = &r300_ring_start,
229 .ring_test = &r100_ring_test,
230 .ring_ib_execute = &r100_ring_ib_execute,
231 .ib_test = NULL,
232 .irq_set = &r100_irq_set,
233 .irq_process = &r100_irq_process,
234 .get_vblank_counter = &r100_get_vblank_counter,
235 .fence_ring_emit = &r300_fence_ring_emit,
236 .cs_parse = &r300_cs_parse,
237 .copy_blit = &r100_copy_blit,
238 .copy_dma = &r300_copy_dma,
239 .copy = &r100_copy_blit,
240 .set_engine_clock = &radeon_atom_set_engine_clock,
241 .set_memory_clock = &radeon_atom_set_memory_clock,
242 .set_pcie_lanes = &rv370_set_pcie_lanes,
243 .set_clock_gating = &radeon_atom_set_clock_gating,
244 .set_surface_reg = r100_set_surface_reg,
245 .clear_surface_reg = r100_clear_surface_reg,
246 .bandwidth_update = &r100_bandwidth_update,
247 };
248
249
250 /*
251 * rs400,rs480
252 */
253 void rs400_errata(struct radeon_device *rdev);
254 void rs400_vram_info(struct radeon_device *rdev);
255 int rs400_mc_init(struct radeon_device *rdev);
256 void rs400_mc_fini(struct radeon_device *rdev);
257 int rs400_gart_init(struct radeon_device *rdev);
258 void rs400_gart_fini(struct radeon_device *rdev);
259 int rs400_gart_enable(struct radeon_device *rdev);
260 void rs400_gart_disable(struct radeon_device *rdev);
261 void rs400_gart_tlb_flush(struct radeon_device *rdev);
262 int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
263 uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
264 void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
265 static struct radeon_asic rs400_asic = {
266 .init = &r300_init,
267 .errata = &rs400_errata,
268 .vram_info = &rs400_vram_info,
269 .vga_set_state = &r100_vga_set_state,
270 .gpu_reset = &r300_gpu_reset,
271 .mc_init = &rs400_mc_init,
272 .mc_fini = &rs400_mc_fini,
273 .wb_init = &r100_wb_init,
274 .wb_fini = &r100_wb_fini,
275 .gart_init = &rs400_gart_init,
276 .gart_fini = &rs400_gart_fini,
277 .gart_enable = &rs400_gart_enable,
278 .gart_disable = &rs400_gart_disable,
279 .gart_tlb_flush = &rs400_gart_tlb_flush,
280 .gart_set_page = &rs400_gart_set_page,
281 .cp_init = &r100_cp_init,
282 .cp_fini = &r100_cp_fini,
283 .cp_disable = &r100_cp_disable,
284 .cp_commit = &r100_cp_commit,
285 .ring_start = &r300_ring_start,
286 .ring_test = &r100_ring_test,
287 .ring_ib_execute = &r100_ring_ib_execute,
288 .ib_test = &r100_ib_test,
289 .irq_set = &r100_irq_set,
290 .irq_process = &r100_irq_process,
291 .get_vblank_counter = &r100_get_vblank_counter,
292 .fence_ring_emit = &r300_fence_ring_emit,
293 .cs_parse = &r300_cs_parse,
294 .copy_blit = &r100_copy_blit,
295 .copy_dma = &r300_copy_dma,
296 .copy = &r100_copy_blit,
297 .set_engine_clock = &radeon_legacy_set_engine_clock,
298 .set_memory_clock = NULL,
299 .set_pcie_lanes = NULL,
300 .set_clock_gating = &radeon_legacy_set_clock_gating,
301 .set_surface_reg = r100_set_surface_reg,
302 .clear_surface_reg = r100_clear_surface_reg,
303 .bandwidth_update = &r100_bandwidth_update,
304 };
305
306
307 /*
308 * rs600.
309 */
310 int rs600_init(struct radeon_device *rdev);
311 void rs600_errata(struct radeon_device *rdev);
312 void rs600_vram_info(struct radeon_device *rdev);
313 int rs600_mc_init(struct radeon_device *rdev);
314 void rs600_mc_fini(struct radeon_device *rdev);
315 int rs600_irq_set(struct radeon_device *rdev);
316 int rs600_irq_process(struct radeon_device *rdev);
317 u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
318 int rs600_gart_init(struct radeon_device *rdev);
319 void rs600_gart_fini(struct radeon_device *rdev);
320 int rs600_gart_enable(struct radeon_device *rdev);
321 void rs600_gart_disable(struct radeon_device *rdev);
322 void rs600_gart_tlb_flush(struct radeon_device *rdev);
323 int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
324 uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
325 void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
326 void rs600_bandwidth_update(struct radeon_device *rdev);
327 static struct radeon_asic rs600_asic = {
328 .init = &rs600_init,
329 .errata = &rs600_errata,
330 .vram_info = &rs600_vram_info,
331 .vga_set_state = &r100_vga_set_state,
332 .gpu_reset = &r300_gpu_reset,
333 .mc_init = &rs600_mc_init,
334 .mc_fini = &rs600_mc_fini,
335 .wb_init = &r100_wb_init,
336 .wb_fini = &r100_wb_fini,
337 .gart_init = &rs600_gart_init,
338 .gart_fini = &rs600_gart_fini,
339 .gart_enable = &rs600_gart_enable,
340 .gart_disable = &rs600_gart_disable,
341 .gart_tlb_flush = &rs600_gart_tlb_flush,
342 .gart_set_page = &rs600_gart_set_page,
343 .cp_init = &r100_cp_init,
344 .cp_fini = &r100_cp_fini,
345 .cp_disable = &r100_cp_disable,
346 .cp_commit = &r100_cp_commit,
347 .ring_start = &r300_ring_start,
348 .ring_test = &r100_ring_test,
349 .ring_ib_execute = &r100_ring_ib_execute,
350 .ib_test = &r100_ib_test,
351 .irq_set = &rs600_irq_set,
352 .irq_process = &rs600_irq_process,
353 .get_vblank_counter = &rs600_get_vblank_counter,
354 .fence_ring_emit = &r300_fence_ring_emit,
355 .cs_parse = &r300_cs_parse,
356 .copy_blit = &r100_copy_blit,
357 .copy_dma = &r300_copy_dma,
358 .copy = &r100_copy_blit,
359 .set_engine_clock = &radeon_atom_set_engine_clock,
360 .set_memory_clock = &radeon_atom_set_memory_clock,
361 .set_pcie_lanes = NULL,
362 .set_clock_gating = &radeon_atom_set_clock_gating,
363 .bandwidth_update = &rs600_bandwidth_update,
364 };
365
366
367 /*
368 * rs690,rs740
369 */
370 void rs690_errata(struct radeon_device *rdev);
371 void rs690_vram_info(struct radeon_device *rdev);
372 int rs690_mc_init(struct radeon_device *rdev);
373 void rs690_mc_fini(struct radeon_device *rdev);
374 uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
375 void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
376 void rs690_bandwidth_update(struct radeon_device *rdev);
377 static struct radeon_asic rs690_asic = {
378 .init = &rs600_init,
379 .errata = &rs690_errata,
380 .vram_info = &rs690_vram_info,
381 .vga_set_state = &r100_vga_set_state,
382 .gpu_reset = &r300_gpu_reset,
383 .mc_init = &rs690_mc_init,
384 .mc_fini = &rs690_mc_fini,
385 .wb_init = &r100_wb_init,
386 .wb_fini = &r100_wb_fini,
387 .gart_init = &rs400_gart_init,
388 .gart_fini = &rs400_gart_fini,
389 .gart_enable = &rs400_gart_enable,
390 .gart_disable = &rs400_gart_disable,
391 .gart_tlb_flush = &rs400_gart_tlb_flush,
392 .gart_set_page = &rs400_gart_set_page,
393 .cp_init = &r100_cp_init,
394 .cp_fini = &r100_cp_fini,
395 .cp_disable = &r100_cp_disable,
396 .cp_commit = &r100_cp_commit,
397 .ring_start = &r300_ring_start,
398 .ring_test = &r100_ring_test,
399 .ring_ib_execute = &r100_ring_ib_execute,
400 .ib_test = &r100_ib_test,
401 .irq_set = &rs600_irq_set,
402 .irq_process = &rs600_irq_process,
403 .get_vblank_counter = &rs600_get_vblank_counter,
404 .fence_ring_emit = &r300_fence_ring_emit,
405 .cs_parse = &r300_cs_parse,
406 .copy_blit = &r100_copy_blit,
407 .copy_dma = &r300_copy_dma,
408 .copy = &r300_copy_dma,
409 .set_engine_clock = &radeon_atom_set_engine_clock,
410 .set_memory_clock = &radeon_atom_set_memory_clock,
411 .set_pcie_lanes = NULL,
412 .set_clock_gating = &radeon_atom_set_clock_gating,
413 .set_surface_reg = r100_set_surface_reg,
414 .clear_surface_reg = r100_clear_surface_reg,
415 .bandwidth_update = &rs690_bandwidth_update,
416 };
417
418
419 /*
420 * rv515
421 */
422 int rv515_init(struct radeon_device *rdev);
423 void rv515_errata(struct radeon_device *rdev);
424 void rv515_vram_info(struct radeon_device *rdev);
425 int rv515_gpu_reset(struct radeon_device *rdev);
426 int rv515_mc_init(struct radeon_device *rdev);
427 void rv515_mc_fini(struct radeon_device *rdev);
428 uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
429 void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
430 void rv515_ring_start(struct radeon_device *rdev);
431 uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
432 void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
433 void rv515_bandwidth_update(struct radeon_device *rdev);
434 static struct radeon_asic rv515_asic = {
435 .init = &rv515_init,
436 .errata = &rv515_errata,
437 .vram_info = &rv515_vram_info,
438 .vga_set_state = &r100_vga_set_state,
439 .gpu_reset = &rv515_gpu_reset,
440 .mc_init = &rv515_mc_init,
441 .mc_fini = &rv515_mc_fini,
442 .wb_init = &r100_wb_init,
443 .wb_fini = &r100_wb_fini,
444 .gart_init = &rv370_pcie_gart_init,
445 .gart_fini = &rv370_pcie_gart_fini,
446 .gart_enable = &rv370_pcie_gart_enable,
447 .gart_disable = &rv370_pcie_gart_disable,
448 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
449 .gart_set_page = &rv370_pcie_gart_set_page,
450 .cp_init = &r100_cp_init,
451 .cp_fini = &r100_cp_fini,
452 .cp_disable = &r100_cp_disable,
453 .cp_commit = &r100_cp_commit,
454 .ring_start = &rv515_ring_start,
455 .ring_test = &r100_ring_test,
456 .ring_ib_execute = &r100_ring_ib_execute,
457 .ib_test = &r100_ib_test,
458 .irq_set = &rs600_irq_set,
459 .irq_process = &rs600_irq_process,
460 .get_vblank_counter = &rs600_get_vblank_counter,
461 .fence_ring_emit = &r300_fence_ring_emit,
462 .cs_parse = &r300_cs_parse,
463 .copy_blit = &r100_copy_blit,
464 .copy_dma = &r300_copy_dma,
465 .copy = &r100_copy_blit,
466 .set_engine_clock = &radeon_atom_set_engine_clock,
467 .set_memory_clock = &radeon_atom_set_memory_clock,
468 .set_pcie_lanes = &rv370_set_pcie_lanes,
469 .set_clock_gating = &radeon_atom_set_clock_gating,
470 .set_surface_reg = r100_set_surface_reg,
471 .clear_surface_reg = r100_clear_surface_reg,
472 .bandwidth_update = &rv515_bandwidth_update,
473 };
474
475
476 /*
477 * r520,rv530,rv560,rv570,r580
478 */
479 void r520_errata(struct radeon_device *rdev);
480 void r520_vram_info(struct radeon_device *rdev);
481 int r520_mc_init(struct radeon_device *rdev);
482 void r520_mc_fini(struct radeon_device *rdev);
483 void r520_bandwidth_update(struct radeon_device *rdev);
484 static struct radeon_asic r520_asic = {
485 .init = &rv515_init,
486 .errata = &r520_errata,
487 .vram_info = &r520_vram_info,
488 .vga_set_state = &r100_vga_set_state,
489 .gpu_reset = &rv515_gpu_reset,
490 .mc_init = &r520_mc_init,
491 .mc_fini = &r520_mc_fini,
492 .wb_init = &r100_wb_init,
493 .wb_fini = &r100_wb_fini,
494 .gart_init = &rv370_pcie_gart_init,
495 .gart_fini = &rv370_pcie_gart_fini,
496 .gart_enable = &rv370_pcie_gart_enable,
497 .gart_disable = &rv370_pcie_gart_disable,
498 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
499 .gart_set_page = &rv370_pcie_gart_set_page,
500 .cp_init = &r100_cp_init,
501 .cp_fini = &r100_cp_fini,
502 .cp_disable = &r100_cp_disable,
503 .cp_commit = &r100_cp_commit,
504 .ring_start = &rv515_ring_start,
505 .ring_test = &r100_ring_test,
506 .ring_ib_execute = &r100_ring_ib_execute,
507 .ib_test = &r100_ib_test,
508 .irq_set = &rs600_irq_set,
509 .irq_process = &rs600_irq_process,
510 .get_vblank_counter = &rs600_get_vblank_counter,
511 .fence_ring_emit = &r300_fence_ring_emit,
512 .cs_parse = &r300_cs_parse,
513 .copy_blit = &r100_copy_blit,
514 .copy_dma = &r300_copy_dma,
515 .copy = &r100_copy_blit,
516 .set_engine_clock = &radeon_atom_set_engine_clock,
517 .set_memory_clock = &radeon_atom_set_memory_clock,
518 .set_pcie_lanes = &rv370_set_pcie_lanes,
519 .set_clock_gating = &radeon_atom_set_clock_gating,
520 .set_surface_reg = r100_set_surface_reg,
521 .clear_surface_reg = r100_clear_surface_reg,
522 .bandwidth_update = &r520_bandwidth_update,
523 };
524
525 /*
526 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
527 */
528 int r600_init(struct radeon_device *rdev);
529 void r600_fini(struct radeon_device *rdev);
530 int r600_suspend(struct radeon_device *rdev);
531 int r600_resume(struct radeon_device *rdev);
532 void r600_vga_set_state(struct radeon_device *rdev, bool state);
533 int r600_wb_init(struct radeon_device *rdev);
534 void r600_wb_fini(struct radeon_device *rdev);
535 void r600_cp_commit(struct radeon_device *rdev);
536 void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
537 uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
538 void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
539 int r600_cs_parse(struct radeon_cs_parser *p);
540 void r600_fence_ring_emit(struct radeon_device *rdev,
541 struct radeon_fence *fence);
542 int r600_copy_dma(struct radeon_device *rdev,
543 uint64_t src_offset,
544 uint64_t dst_offset,
545 unsigned num_pages,
546 struct radeon_fence *fence);
547 int r600_irq_process(struct radeon_device *rdev);
548 int r600_irq_set(struct radeon_device *rdev);
549 int r600_gpu_reset(struct radeon_device *rdev);
550 int r600_set_surface_reg(struct radeon_device *rdev, int reg,
551 uint32_t tiling_flags, uint32_t pitch,
552 uint32_t offset, uint32_t obj_size);
553 int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
554 void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
555 int r600_ib_test(struct radeon_device *rdev);
556 int r600_ring_test(struct radeon_device *rdev);
557 int r600_copy_blit(struct radeon_device *rdev,
558 uint64_t src_offset, uint64_t dst_offset,
559 unsigned num_pages, struct radeon_fence *fence);
560
561 static struct radeon_asic r600_asic = {
562 .errata = NULL,
563 .init = &r600_init,
564 .fini = &r600_fini,
565 .suspend = &r600_suspend,
566 .resume = &r600_resume,
567 .cp_commit = &r600_cp_commit,
568 .vram_info = NULL,
569 .vga_set_state = &r600_vga_set_state,
570 .gpu_reset = &r600_gpu_reset,
571 .mc_init = NULL,
572 .mc_fini = NULL,
573 .wb_init = &r600_wb_init,
574 .wb_fini = &r600_wb_fini,
575 .gart_enable = NULL,
576 .gart_disable = NULL,
577 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
578 .gart_set_page = &rs600_gart_set_page,
579 .cp_init = NULL,
580 .cp_fini = NULL,
581 .cp_disable = NULL,
582 .ring_start = NULL,
583 .ring_test = &r600_ring_test,
584 .ring_ib_execute = &r600_ring_ib_execute,
585 .ib_test = &r600_ib_test,
586 .irq_set = &r600_irq_set,
587 .irq_process = &r600_irq_process,
588 .fence_ring_emit = &r600_fence_ring_emit,
589 .cs_parse = &r600_cs_parse,
590 .copy_blit = &r600_copy_blit,
591 .copy_dma = &r600_copy_blit,
592 .copy = &r600_copy_blit,
593 .set_engine_clock = &radeon_atom_set_engine_clock,
594 .set_memory_clock = &radeon_atom_set_memory_clock,
595 .set_pcie_lanes = NULL,
596 .set_clock_gating = &radeon_atom_set_clock_gating,
597 .set_surface_reg = r600_set_surface_reg,
598 .clear_surface_reg = r600_clear_surface_reg,
599 .bandwidth_update = &r520_bandwidth_update,
600 };
601
602 /*
603 * rv770,rv730,rv710,rv740
604 */
605 int rv770_init(struct radeon_device *rdev);
606 void rv770_fini(struct radeon_device *rdev);
607 int rv770_suspend(struct radeon_device *rdev);
608 int rv770_resume(struct radeon_device *rdev);
609 int rv770_gpu_reset(struct radeon_device *rdev);
610
611 static struct radeon_asic rv770_asic = {
612 .errata = NULL,
613 .init = &rv770_init,
614 .fini = &rv770_fini,
615 .suspend = &rv770_suspend,
616 .resume = &rv770_resume,
617 .cp_commit = &r600_cp_commit,
618 .vram_info = NULL,
619 .gpu_reset = &rv770_gpu_reset,
620 .vga_set_state = &r600_vga_set_state,
621 .mc_init = NULL,
622 .mc_fini = NULL,
623 .wb_init = &r600_wb_init,
624 .wb_fini = &r600_wb_fini,
625 .gart_enable = NULL,
626 .gart_disable = NULL,
627 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
628 .gart_set_page = &rs600_gart_set_page,
629 .cp_init = NULL,
630 .cp_fini = NULL,
631 .cp_disable = NULL,
632 .ring_start = NULL,
633 .ring_test = &r600_ring_test,
634 .ring_ib_execute = &r600_ring_ib_execute,
635 .ib_test = &r600_ib_test,
636 .irq_set = &r600_irq_set,
637 .irq_process = &r600_irq_process,
638 .fence_ring_emit = &r600_fence_ring_emit,
639 .cs_parse = &r600_cs_parse,
640 .copy_blit = &r600_copy_blit,
641 .copy_dma = &r600_copy_blit,
642 .copy = &r600_copy_blit,
643 .set_engine_clock = &radeon_atom_set_engine_clock,
644 .set_memory_clock = &radeon_atom_set_memory_clock,
645 .set_pcie_lanes = NULL,
646 .set_clock_gating = &radeon_atom_set_clock_gating,
647 .set_surface_reg = r600_set_surface_reg,
648 .clear_surface_reg = r600_clear_surface_reg,
649 .bandwidth_update = &r520_bandwidth_update,
650 };
651
652 #endif