Merge branch 'for_linus' of git://git.kernel.org/pub/scm/linux/kernel/git/tytso/ext4
[GitHub/LineageOS/android_kernel_samsung_universal7580.git] / drivers / gpu / drm / radeon / radeon_asic.h
CommitLineData
771fe6b9
JG
1/*
2 * Copyright 2008 Advanced Micro Devices, Inc.
3 * Copyright 2008 Red Hat Inc.
4 * Copyright 2009 Jerome Glisse.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice shall be included in
14 * all copies or substantial portions of the Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
22 * OTHER DEALINGS IN THE SOFTWARE.
23 *
24 * Authors: Dave Airlie
25 * Alex Deucher
26 * Jerome Glisse
27 */
28#ifndef __RADEON_ASIC_H__
29#define __RADEON_ASIC_H__
30
31/*
32 * common functions
33 */
34void radeon_legacy_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
35void radeon_legacy_set_clock_gating(struct radeon_device *rdev, int enable);
36
37void radeon_atom_set_engine_clock(struct radeon_device *rdev, uint32_t eng_clock);
38void radeon_atom_set_memory_clock(struct radeon_device *rdev, uint32_t mem_clock);
39void radeon_atom_set_clock_gating(struct radeon_device *rdev, int enable);
40
41/*
42 * r100,rv100,rs100,rv200,rs200,r200,rv250,rs300,rv280
43 */
068a117c 44int r100_init(struct radeon_device *rdev);
551ebd83 45int r200_init(struct radeon_device *rdev);
771fe6b9
JG
46uint32_t r100_mm_rreg(struct radeon_device *rdev, uint32_t reg);
47void r100_mm_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
48void r100_errata(struct radeon_device *rdev);
49void r100_vram_info(struct radeon_device *rdev);
28d52043 50void r100_vga_set_state(struct radeon_device *rdev, bool state);
771fe6b9
JG
51int r100_gpu_reset(struct radeon_device *rdev);
52int r100_mc_init(struct radeon_device *rdev);
53void r100_mc_fini(struct radeon_device *rdev);
7ed220d7 54u32 r100_get_vblank_counter(struct radeon_device *rdev, int crtc);
771fe6b9
JG
55int r100_wb_init(struct radeon_device *rdev);
56void r100_wb_fini(struct radeon_device *rdev);
4aac0473
JG
57int r100_pci_gart_init(struct radeon_device *rdev);
58void r100_pci_gart_fini(struct radeon_device *rdev);
59int r100_pci_gart_enable(struct radeon_device *rdev);
771fe6b9
JG
60void r100_pci_gart_disable(struct radeon_device *rdev);
61void r100_pci_gart_tlb_flush(struct radeon_device *rdev);
62int r100_pci_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
63int r100_cp_init(struct radeon_device *rdev, unsigned ring_size);
64void r100_cp_fini(struct radeon_device *rdev);
65void r100_cp_disable(struct radeon_device *rdev);
3ce0a23d 66void r100_cp_commit(struct radeon_device *rdev);
771fe6b9
JG
67void r100_ring_start(struct radeon_device *rdev);
68int r100_irq_set(struct radeon_device *rdev);
69int r100_irq_process(struct radeon_device *rdev);
70void r100_fence_ring_emit(struct radeon_device *rdev,
71 struct radeon_fence *fence);
72int r100_cs_parse(struct radeon_cs_parser *p);
73void r100_pll_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
74uint32_t r100_pll_rreg(struct radeon_device *rdev, uint32_t reg);
75int r100_copy_blit(struct radeon_device *rdev,
76 uint64_t src_offset,
77 uint64_t dst_offset,
78 unsigned num_pages,
79 struct radeon_fence *fence);
e024e110
DA
80int r100_set_surface_reg(struct radeon_device *rdev, int reg,
81 uint32_t tiling_flags, uint32_t pitch,
82 uint32_t offset, uint32_t obj_size);
83int r100_clear_surface_reg(struct radeon_device *rdev, int reg);
c93bb85b 84void r100_bandwidth_update(struct radeon_device *rdev);
3ce0a23d
JG
85void r100_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
86int r100_ib_test(struct radeon_device *rdev);
87int r100_ring_test(struct radeon_device *rdev);
771fe6b9
JG
88
89static struct radeon_asic r100_asic = {
068a117c 90 .init = &r100_init,
771fe6b9
JG
91 .errata = &r100_errata,
92 .vram_info = &r100_vram_info,
28d52043 93 .vga_set_state = &r100_vga_set_state,
771fe6b9
JG
94 .gpu_reset = &r100_gpu_reset,
95 .mc_init = &r100_mc_init,
96 .mc_fini = &r100_mc_fini,
97 .wb_init = &r100_wb_init,
98 .wb_fini = &r100_wb_fini,
4aac0473
JG
99 .gart_init = &r100_pci_gart_init,
100 .gart_fini = &r100_pci_gart_fini,
101 .gart_enable = &r100_pci_gart_enable,
771fe6b9
JG
102 .gart_disable = &r100_pci_gart_disable,
103 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
104 .gart_set_page = &r100_pci_gart_set_page,
105 .cp_init = &r100_cp_init,
106 .cp_fini = &r100_cp_fini,
107 .cp_disable = &r100_cp_disable,
3ce0a23d 108 .cp_commit = &r100_cp_commit,
771fe6b9 109 .ring_start = &r100_ring_start,
3ce0a23d
JG
110 .ring_test = &r100_ring_test,
111 .ring_ib_execute = &r100_ring_ib_execute,
112 .ib_test = &r100_ib_test,
771fe6b9
JG
113 .irq_set = &r100_irq_set,
114 .irq_process = &r100_irq_process,
7ed220d7 115 .get_vblank_counter = &r100_get_vblank_counter,
771fe6b9
JG
116 .fence_ring_emit = &r100_fence_ring_emit,
117 .cs_parse = &r100_cs_parse,
118 .copy_blit = &r100_copy_blit,
119 .copy_dma = NULL,
120 .copy = &r100_copy_blit,
121 .set_engine_clock = &radeon_legacy_set_engine_clock,
122 .set_memory_clock = NULL,
123 .set_pcie_lanes = NULL,
124 .set_clock_gating = &radeon_legacy_set_clock_gating,
e024e110
DA
125 .set_surface_reg = r100_set_surface_reg,
126 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 127 .bandwidth_update = &r100_bandwidth_update,
771fe6b9
JG
128};
129
130
131/*
132 * r300,r350,rv350,rv380
133 */
068a117c 134int r300_init(struct radeon_device *rdev);
771fe6b9
JG
135void r300_errata(struct radeon_device *rdev);
136void r300_vram_info(struct radeon_device *rdev);
137int r300_gpu_reset(struct radeon_device *rdev);
138int r300_mc_init(struct radeon_device *rdev);
139void r300_mc_fini(struct radeon_device *rdev);
140void r300_ring_start(struct radeon_device *rdev);
141void r300_fence_ring_emit(struct radeon_device *rdev,
142 struct radeon_fence *fence);
143int r300_cs_parse(struct radeon_cs_parser *p);
4aac0473
JG
144int rv370_pcie_gart_init(struct radeon_device *rdev);
145void rv370_pcie_gart_fini(struct radeon_device *rdev);
146int rv370_pcie_gart_enable(struct radeon_device *rdev);
771fe6b9
JG
147void rv370_pcie_gart_disable(struct radeon_device *rdev);
148void rv370_pcie_gart_tlb_flush(struct radeon_device *rdev);
149int rv370_pcie_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
150uint32_t rv370_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
151void rv370_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
152void rv370_set_pcie_lanes(struct radeon_device *rdev, int lanes);
153int r300_copy_dma(struct radeon_device *rdev,
154 uint64_t src_offset,
155 uint64_t dst_offset,
156 unsigned num_pages,
157 struct radeon_fence *fence);
e024e110 158
771fe6b9 159static struct radeon_asic r300_asic = {
068a117c 160 .init = &r300_init,
771fe6b9
JG
161 .errata = &r300_errata,
162 .vram_info = &r300_vram_info,
28d52043 163 .vga_set_state = &r100_vga_set_state,
771fe6b9
JG
164 .gpu_reset = &r300_gpu_reset,
165 .mc_init = &r300_mc_init,
166 .mc_fini = &r300_mc_fini,
167 .wb_init = &r100_wb_init,
168 .wb_fini = &r100_wb_fini,
4aac0473
JG
169 .gart_init = &r100_pci_gart_init,
170 .gart_fini = &r100_pci_gart_fini,
171 .gart_enable = &r100_pci_gart_enable,
771fe6b9
JG
172 .gart_disable = &r100_pci_gart_disable,
173 .gart_tlb_flush = &r100_pci_gart_tlb_flush,
174 .gart_set_page = &r100_pci_gart_set_page,
175 .cp_init = &r100_cp_init,
176 .cp_fini = &r100_cp_fini,
177 .cp_disable = &r100_cp_disable,
3ce0a23d 178 .cp_commit = &r100_cp_commit,
771fe6b9 179 .ring_start = &r300_ring_start,
3ce0a23d
JG
180 .ring_test = &r100_ring_test,
181 .ring_ib_execute = &r100_ring_ib_execute,
182 .ib_test = &r100_ib_test,
771fe6b9
JG
183 .irq_set = &r100_irq_set,
184 .irq_process = &r100_irq_process,
7ed220d7 185 .get_vblank_counter = &r100_get_vblank_counter,
771fe6b9
JG
186 .fence_ring_emit = &r300_fence_ring_emit,
187 .cs_parse = &r300_cs_parse,
188 .copy_blit = &r100_copy_blit,
189 .copy_dma = &r300_copy_dma,
190 .copy = &r100_copy_blit,
191 .set_engine_clock = &radeon_legacy_set_engine_clock,
192 .set_memory_clock = NULL,
193 .set_pcie_lanes = &rv370_set_pcie_lanes,
194 .set_clock_gating = &radeon_legacy_set_clock_gating,
e024e110
DA
195 .set_surface_reg = r100_set_surface_reg,
196 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 197 .bandwidth_update = &r100_bandwidth_update,
771fe6b9
JG
198};
199
200/*
201 * r420,r423,rv410
202 */
9f022ddf
JG
203extern int r420_init(struct radeon_device *rdev);
204extern void r420_fini(struct radeon_device *rdev);
205extern int r420_suspend(struct radeon_device *rdev);
206extern int r420_resume(struct radeon_device *rdev);
771fe6b9 207static struct radeon_asic r420_asic = {
9f022ddf
JG
208 .init = &r420_init,
209 .fini = &r420_fini,
210 .suspend = &r420_suspend,
211 .resume = &r420_resume,
212 .errata = NULL,
213 .vram_info = NULL,
28d52043 214 .vga_set_state = &r100_vga_set_state,
771fe6b9 215 .gpu_reset = &r300_gpu_reset,
9f022ddf
JG
216 .mc_init = NULL,
217 .mc_fini = NULL,
218 .wb_init = NULL,
219 .wb_fini = NULL,
4aac0473
JG
220 .gart_enable = NULL,
221 .gart_disable = NULL,
771fe6b9
JG
222 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
223 .gart_set_page = &rv370_pcie_gart_set_page,
9f022ddf
JG
224 .cp_init = NULL,
225 .cp_fini = NULL,
226 .cp_disable = NULL,
3ce0a23d 227 .cp_commit = &r100_cp_commit,
771fe6b9 228 .ring_start = &r300_ring_start,
3ce0a23d
JG
229 .ring_test = &r100_ring_test,
230 .ring_ib_execute = &r100_ring_ib_execute,
9f022ddf 231 .ib_test = NULL,
771fe6b9
JG
232 .irq_set = &r100_irq_set,
233 .irq_process = &r100_irq_process,
7ed220d7 234 .get_vblank_counter = &r100_get_vblank_counter,
771fe6b9
JG
235 .fence_ring_emit = &r300_fence_ring_emit,
236 .cs_parse = &r300_cs_parse,
237 .copy_blit = &r100_copy_blit,
238 .copy_dma = &r300_copy_dma,
239 .copy = &r100_copy_blit,
240 .set_engine_clock = &radeon_atom_set_engine_clock,
241 .set_memory_clock = &radeon_atom_set_memory_clock,
242 .set_pcie_lanes = &rv370_set_pcie_lanes,
243 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
244 .set_surface_reg = r100_set_surface_reg,
245 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 246 .bandwidth_update = &r100_bandwidth_update,
771fe6b9
JG
247};
248
249
250/*
251 * rs400,rs480
252 */
253void rs400_errata(struct radeon_device *rdev);
254void rs400_vram_info(struct radeon_device *rdev);
255int rs400_mc_init(struct radeon_device *rdev);
256void rs400_mc_fini(struct radeon_device *rdev);
4aac0473
JG
257int rs400_gart_init(struct radeon_device *rdev);
258void rs400_gart_fini(struct radeon_device *rdev);
771fe6b9
JG
259int rs400_gart_enable(struct radeon_device *rdev);
260void rs400_gart_disable(struct radeon_device *rdev);
261void rs400_gart_tlb_flush(struct radeon_device *rdev);
262int rs400_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
263uint32_t rs400_mc_rreg(struct radeon_device *rdev, uint32_t reg);
264void rs400_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
265static struct radeon_asic rs400_asic = {
068a117c 266 .init = &r300_init,
771fe6b9
JG
267 .errata = &rs400_errata,
268 .vram_info = &rs400_vram_info,
28d52043 269 .vga_set_state = &r100_vga_set_state,
771fe6b9
JG
270 .gpu_reset = &r300_gpu_reset,
271 .mc_init = &rs400_mc_init,
272 .mc_fini = &rs400_mc_fini,
273 .wb_init = &r100_wb_init,
274 .wb_fini = &r100_wb_fini,
4aac0473
JG
275 .gart_init = &rs400_gart_init,
276 .gart_fini = &rs400_gart_fini,
771fe6b9
JG
277 .gart_enable = &rs400_gart_enable,
278 .gart_disable = &rs400_gart_disable,
279 .gart_tlb_flush = &rs400_gart_tlb_flush,
280 .gart_set_page = &rs400_gart_set_page,
281 .cp_init = &r100_cp_init,
282 .cp_fini = &r100_cp_fini,
283 .cp_disable = &r100_cp_disable,
3ce0a23d 284 .cp_commit = &r100_cp_commit,
771fe6b9 285 .ring_start = &r300_ring_start,
3ce0a23d
JG
286 .ring_test = &r100_ring_test,
287 .ring_ib_execute = &r100_ring_ib_execute,
288 .ib_test = &r100_ib_test,
771fe6b9
JG
289 .irq_set = &r100_irq_set,
290 .irq_process = &r100_irq_process,
7ed220d7 291 .get_vblank_counter = &r100_get_vblank_counter,
771fe6b9
JG
292 .fence_ring_emit = &r300_fence_ring_emit,
293 .cs_parse = &r300_cs_parse,
294 .copy_blit = &r100_copy_blit,
295 .copy_dma = &r300_copy_dma,
296 .copy = &r100_copy_blit,
297 .set_engine_clock = &radeon_legacy_set_engine_clock,
298 .set_memory_clock = NULL,
299 .set_pcie_lanes = NULL,
300 .set_clock_gating = &radeon_legacy_set_clock_gating,
e024e110
DA
301 .set_surface_reg = r100_set_surface_reg,
302 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 303 .bandwidth_update = &r100_bandwidth_update,
771fe6b9
JG
304};
305
306
307/*
308 * rs600.
309 */
3f7dc91a 310int rs600_init(struct radeon_device *rdev);
771fe6b9
JG
311void rs600_errata(struct radeon_device *rdev);
312void rs600_vram_info(struct radeon_device *rdev);
313int rs600_mc_init(struct radeon_device *rdev);
314void rs600_mc_fini(struct radeon_device *rdev);
315int rs600_irq_set(struct radeon_device *rdev);
7ed220d7
MD
316int rs600_irq_process(struct radeon_device *rdev);
317u32 rs600_get_vblank_counter(struct radeon_device *rdev, int crtc);
4aac0473
JG
318int rs600_gart_init(struct radeon_device *rdev);
319void rs600_gart_fini(struct radeon_device *rdev);
771fe6b9
JG
320int rs600_gart_enable(struct radeon_device *rdev);
321void rs600_gart_disable(struct radeon_device *rdev);
322void rs600_gart_tlb_flush(struct radeon_device *rdev);
323int rs600_gart_set_page(struct radeon_device *rdev, int i, uint64_t addr);
324uint32_t rs600_mc_rreg(struct radeon_device *rdev, uint32_t reg);
325void rs600_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
c93bb85b 326void rs600_bandwidth_update(struct radeon_device *rdev);
771fe6b9 327static struct radeon_asic rs600_asic = {
3f7dc91a 328 .init = &rs600_init,
771fe6b9
JG
329 .errata = &rs600_errata,
330 .vram_info = &rs600_vram_info,
28d52043 331 .vga_set_state = &r100_vga_set_state,
771fe6b9
JG
332 .gpu_reset = &r300_gpu_reset,
333 .mc_init = &rs600_mc_init,
334 .mc_fini = &rs600_mc_fini,
335 .wb_init = &r100_wb_init,
336 .wb_fini = &r100_wb_fini,
4aac0473
JG
337 .gart_init = &rs600_gart_init,
338 .gart_fini = &rs600_gart_fini,
771fe6b9
JG
339 .gart_enable = &rs600_gart_enable,
340 .gart_disable = &rs600_gart_disable,
341 .gart_tlb_flush = &rs600_gart_tlb_flush,
342 .gart_set_page = &rs600_gart_set_page,
343 .cp_init = &r100_cp_init,
344 .cp_fini = &r100_cp_fini,
345 .cp_disable = &r100_cp_disable,
3ce0a23d 346 .cp_commit = &r100_cp_commit,
771fe6b9 347 .ring_start = &r300_ring_start,
3ce0a23d
JG
348 .ring_test = &r100_ring_test,
349 .ring_ib_execute = &r100_ring_ib_execute,
350 .ib_test = &r100_ib_test,
771fe6b9 351 .irq_set = &rs600_irq_set,
7ed220d7
MD
352 .irq_process = &rs600_irq_process,
353 .get_vblank_counter = &rs600_get_vblank_counter,
771fe6b9
JG
354 .fence_ring_emit = &r300_fence_ring_emit,
355 .cs_parse = &r300_cs_parse,
356 .copy_blit = &r100_copy_blit,
357 .copy_dma = &r300_copy_dma,
358 .copy = &r100_copy_blit,
359 .set_engine_clock = &radeon_atom_set_engine_clock,
360 .set_memory_clock = &radeon_atom_set_memory_clock,
361 .set_pcie_lanes = NULL,
362 .set_clock_gating = &radeon_atom_set_clock_gating,
c93bb85b 363 .bandwidth_update = &rs600_bandwidth_update,
771fe6b9
JG
364};
365
366
367/*
368 * rs690,rs740
369 */
370void rs690_errata(struct radeon_device *rdev);
371void rs690_vram_info(struct radeon_device *rdev);
372int rs690_mc_init(struct radeon_device *rdev);
373void rs690_mc_fini(struct radeon_device *rdev);
374uint32_t rs690_mc_rreg(struct radeon_device *rdev, uint32_t reg);
375void rs690_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
c93bb85b 376void rs690_bandwidth_update(struct radeon_device *rdev);
771fe6b9 377static struct radeon_asic rs690_asic = {
3f7dc91a 378 .init = &rs600_init,
771fe6b9
JG
379 .errata = &rs690_errata,
380 .vram_info = &rs690_vram_info,
28d52043 381 .vga_set_state = &r100_vga_set_state,
771fe6b9
JG
382 .gpu_reset = &r300_gpu_reset,
383 .mc_init = &rs690_mc_init,
384 .mc_fini = &rs690_mc_fini,
385 .wb_init = &r100_wb_init,
386 .wb_fini = &r100_wb_fini,
4aac0473
JG
387 .gart_init = &rs400_gart_init,
388 .gart_fini = &rs400_gart_fini,
771fe6b9
JG
389 .gart_enable = &rs400_gart_enable,
390 .gart_disable = &rs400_gart_disable,
391 .gart_tlb_flush = &rs400_gart_tlb_flush,
392 .gart_set_page = &rs400_gart_set_page,
393 .cp_init = &r100_cp_init,
394 .cp_fini = &r100_cp_fini,
395 .cp_disable = &r100_cp_disable,
3ce0a23d 396 .cp_commit = &r100_cp_commit,
771fe6b9 397 .ring_start = &r300_ring_start,
3ce0a23d
JG
398 .ring_test = &r100_ring_test,
399 .ring_ib_execute = &r100_ring_ib_execute,
400 .ib_test = &r100_ib_test,
771fe6b9 401 .irq_set = &rs600_irq_set,
7ed220d7
MD
402 .irq_process = &rs600_irq_process,
403 .get_vblank_counter = &rs600_get_vblank_counter,
771fe6b9
JG
404 .fence_ring_emit = &r300_fence_ring_emit,
405 .cs_parse = &r300_cs_parse,
406 .copy_blit = &r100_copy_blit,
407 .copy_dma = &r300_copy_dma,
408 .copy = &r300_copy_dma,
409 .set_engine_clock = &radeon_atom_set_engine_clock,
410 .set_memory_clock = &radeon_atom_set_memory_clock,
411 .set_pcie_lanes = NULL,
412 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
413 .set_surface_reg = r100_set_surface_reg,
414 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 415 .bandwidth_update = &rs690_bandwidth_update,
771fe6b9
JG
416};
417
418
419/*
420 * rv515
421 */
068a117c 422int rv515_init(struct radeon_device *rdev);
d39c3b89 423void rv515_fini(struct radeon_device *rdev);
771fe6b9 424int rv515_gpu_reset(struct radeon_device *rdev);
771fe6b9
JG
425uint32_t rv515_mc_rreg(struct radeon_device *rdev, uint32_t reg);
426void rv515_mc_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
427void rv515_ring_start(struct radeon_device *rdev);
428uint32_t rv515_pcie_rreg(struct radeon_device *rdev, uint32_t reg);
429void rv515_pcie_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
c93bb85b 430void rv515_bandwidth_update(struct radeon_device *rdev);
d39c3b89
JG
431int rv515_resume(struct radeon_device *rdev);
432int rv515_suspend(struct radeon_device *rdev);
771fe6b9 433static struct radeon_asic rv515_asic = {
068a117c 434 .init = &rv515_init,
d39c3b89
JG
435 .fini = &rv515_fini,
436 .suspend = &rv515_suspend,
437 .resume = &rv515_resume,
438 .errata = NULL,
439 .vram_info = NULL,
28d52043 440 .vga_set_state = &r100_vga_set_state,
771fe6b9 441 .gpu_reset = &rv515_gpu_reset,
d39c3b89
JG
442 .mc_init = NULL,
443 .mc_fini = NULL,
444 .wb_init = NULL,
445 .wb_fini = NULL,
4aac0473
JG
446 .gart_init = &rv370_pcie_gart_init,
447 .gart_fini = &rv370_pcie_gart_fini,
d39c3b89
JG
448 .gart_enable = NULL,
449 .gart_disable = NULL,
771fe6b9
JG
450 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
451 .gart_set_page = &rv370_pcie_gart_set_page,
d39c3b89
JG
452 .cp_init = NULL,
453 .cp_fini = NULL,
454 .cp_disable = NULL,
3ce0a23d 455 .cp_commit = &r100_cp_commit,
771fe6b9 456 .ring_start = &rv515_ring_start,
3ce0a23d
JG
457 .ring_test = &r100_ring_test,
458 .ring_ib_execute = &r100_ring_ib_execute,
d39c3b89 459 .ib_test = NULL,
7ed220d7
MD
460 .irq_set = &rs600_irq_set,
461 .irq_process = &rs600_irq_process,
462 .get_vblank_counter = &rs600_get_vblank_counter,
771fe6b9 463 .fence_ring_emit = &r300_fence_ring_emit,
068a117c 464 .cs_parse = &r300_cs_parse,
771fe6b9
JG
465 .copy_blit = &r100_copy_blit,
466 .copy_dma = &r300_copy_dma,
467 .copy = &r100_copy_blit,
468 .set_engine_clock = &radeon_atom_set_engine_clock,
469 .set_memory_clock = &radeon_atom_set_memory_clock,
470 .set_pcie_lanes = &rv370_set_pcie_lanes,
471 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
472 .set_surface_reg = r100_set_surface_reg,
473 .clear_surface_reg = r100_clear_surface_reg,
c93bb85b 474 .bandwidth_update = &rv515_bandwidth_update,
771fe6b9
JG
475};
476
477
478/*
479 * r520,rv530,rv560,rv570,r580
480 */
d39c3b89 481int r520_init(struct radeon_device *rdev);
f0ed1f65 482int r520_resume(struct radeon_device *rdev);
771fe6b9 483static struct radeon_asic r520_asic = {
d39c3b89 484 .init = &r520_init,
f0ed1f65
JG
485 .fini = &rv515_fini,
486 .suspend = &rv515_suspend,
487 .resume = &r520_resume,
488 .errata = NULL,
489 .vram_info = NULL,
28d52043 490 .vga_set_state = &r100_vga_set_state,
771fe6b9 491 .gpu_reset = &rv515_gpu_reset,
f0ed1f65
JG
492 .mc_init = NULL,
493 .mc_fini = NULL,
494 .wb_init = NULL,
495 .wb_fini = NULL,
496 .gart_init = NULL,
497 .gart_fini = NULL,
498 .gart_enable = NULL,
499 .gart_disable = NULL,
771fe6b9
JG
500 .gart_tlb_flush = &rv370_pcie_gart_tlb_flush,
501 .gart_set_page = &rv370_pcie_gart_set_page,
f0ed1f65
JG
502 .cp_init = NULL,
503 .cp_fini = NULL,
504 .cp_disable = NULL,
3ce0a23d 505 .cp_commit = &r100_cp_commit,
771fe6b9 506 .ring_start = &rv515_ring_start,
3ce0a23d
JG
507 .ring_test = &r100_ring_test,
508 .ring_ib_execute = &r100_ring_ib_execute,
f0ed1f65 509 .ib_test = NULL,
7ed220d7
MD
510 .irq_set = &rs600_irq_set,
511 .irq_process = &rs600_irq_process,
512 .get_vblank_counter = &rs600_get_vblank_counter,
771fe6b9 513 .fence_ring_emit = &r300_fence_ring_emit,
068a117c 514 .cs_parse = &r300_cs_parse,
771fe6b9
JG
515 .copy_blit = &r100_copy_blit,
516 .copy_dma = &r300_copy_dma,
517 .copy = &r100_copy_blit,
518 .set_engine_clock = &radeon_atom_set_engine_clock,
519 .set_memory_clock = &radeon_atom_set_memory_clock,
520 .set_pcie_lanes = &rv370_set_pcie_lanes,
521 .set_clock_gating = &radeon_atom_set_clock_gating,
e024e110
DA
522 .set_surface_reg = r100_set_surface_reg,
523 .clear_surface_reg = r100_clear_surface_reg,
f0ed1f65 524 .bandwidth_update = &rv515_bandwidth_update,
771fe6b9
JG
525};
526
527/*
3ce0a23d 528 * r600,rv610,rv630,rv620,rv635,rv670,rs780,rs880
771fe6b9 529 */
3ce0a23d
JG
530int r600_init(struct radeon_device *rdev);
531void r600_fini(struct radeon_device *rdev);
532int r600_suspend(struct radeon_device *rdev);
533int r600_resume(struct radeon_device *rdev);
28d52043 534void r600_vga_set_state(struct radeon_device *rdev, bool state);
3ce0a23d
JG
535int r600_wb_init(struct radeon_device *rdev);
536void r600_wb_fini(struct radeon_device *rdev);
537void r600_cp_commit(struct radeon_device *rdev);
538void r600_pcie_gart_tlb_flush(struct radeon_device *rdev);
771fe6b9
JG
539uint32_t r600_pciep_rreg(struct radeon_device *rdev, uint32_t reg);
540void r600_pciep_wreg(struct radeon_device *rdev, uint32_t reg, uint32_t v);
3ce0a23d
JG
541int r600_cs_parse(struct radeon_cs_parser *p);
542void r600_fence_ring_emit(struct radeon_device *rdev,
543 struct radeon_fence *fence);
544int r600_copy_dma(struct radeon_device *rdev,
545 uint64_t src_offset,
546 uint64_t dst_offset,
547 unsigned num_pages,
548 struct radeon_fence *fence);
549int r600_irq_process(struct radeon_device *rdev);
550int r600_irq_set(struct radeon_device *rdev);
551int r600_gpu_reset(struct radeon_device *rdev);
552int r600_set_surface_reg(struct radeon_device *rdev, int reg,
553 uint32_t tiling_flags, uint32_t pitch,
554 uint32_t offset, uint32_t obj_size);
555int r600_clear_surface_reg(struct radeon_device *rdev, int reg);
556void r600_ring_ib_execute(struct radeon_device *rdev, struct radeon_ib *ib);
557int r600_ib_test(struct radeon_device *rdev);
558int r600_ring_test(struct radeon_device *rdev);
559int r600_copy_blit(struct radeon_device *rdev,
560 uint64_t src_offset, uint64_t dst_offset,
561 unsigned num_pages, struct radeon_fence *fence);
562
563static struct radeon_asic r600_asic = {
564 .errata = NULL,
565 .init = &r600_init,
566 .fini = &r600_fini,
567 .suspend = &r600_suspend,
568 .resume = &r600_resume,
569 .cp_commit = &r600_cp_commit,
570 .vram_info = NULL,
28d52043 571 .vga_set_state = &r600_vga_set_state,
3ce0a23d
JG
572 .gpu_reset = &r600_gpu_reset,
573 .mc_init = NULL,
574 .mc_fini = NULL,
575 .wb_init = &r600_wb_init,
576 .wb_fini = &r600_wb_fini,
577 .gart_enable = NULL,
578 .gart_disable = NULL,
579 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
580 .gart_set_page = &rs600_gart_set_page,
581 .cp_init = NULL,
582 .cp_fini = NULL,
583 .cp_disable = NULL,
584 .ring_start = NULL,
585 .ring_test = &r600_ring_test,
586 .ring_ib_execute = &r600_ring_ib_execute,
587 .ib_test = &r600_ib_test,
588 .irq_set = &r600_irq_set,
589 .irq_process = &r600_irq_process,
590 .fence_ring_emit = &r600_fence_ring_emit,
591 .cs_parse = &r600_cs_parse,
592 .copy_blit = &r600_copy_blit,
593 .copy_dma = &r600_copy_blit,
a3812877 594 .copy = &r600_copy_blit,
3ce0a23d
JG
595 .set_engine_clock = &radeon_atom_set_engine_clock,
596 .set_memory_clock = &radeon_atom_set_memory_clock,
597 .set_pcie_lanes = NULL,
598 .set_clock_gating = &radeon_atom_set_clock_gating,
599 .set_surface_reg = r600_set_surface_reg,
600 .clear_surface_reg = r600_clear_surface_reg,
f0ed1f65 601 .bandwidth_update = &rv515_bandwidth_update,
3ce0a23d
JG
602};
603
604/*
605 * rv770,rv730,rv710,rv740
606 */
607int rv770_init(struct radeon_device *rdev);
608void rv770_fini(struct radeon_device *rdev);
609int rv770_suspend(struct radeon_device *rdev);
610int rv770_resume(struct radeon_device *rdev);
611int rv770_gpu_reset(struct radeon_device *rdev);
612
613static struct radeon_asic rv770_asic = {
614 .errata = NULL,
615 .init = &rv770_init,
616 .fini = &rv770_fini,
617 .suspend = &rv770_suspend,
618 .resume = &rv770_resume,
619 .cp_commit = &r600_cp_commit,
620 .vram_info = NULL,
621 .gpu_reset = &rv770_gpu_reset,
28d52043 622 .vga_set_state = &r600_vga_set_state,
3ce0a23d
JG
623 .mc_init = NULL,
624 .mc_fini = NULL,
625 .wb_init = &r600_wb_init,
626 .wb_fini = &r600_wb_fini,
627 .gart_enable = NULL,
628 .gart_disable = NULL,
629 .gart_tlb_flush = &r600_pcie_gart_tlb_flush,
630 .gart_set_page = &rs600_gart_set_page,
631 .cp_init = NULL,
632 .cp_fini = NULL,
633 .cp_disable = NULL,
634 .ring_start = NULL,
635 .ring_test = &r600_ring_test,
636 .ring_ib_execute = &r600_ring_ib_execute,
637 .ib_test = &r600_ib_test,
638 .irq_set = &r600_irq_set,
639 .irq_process = &r600_irq_process,
640 .fence_ring_emit = &r600_fence_ring_emit,
641 .cs_parse = &r600_cs_parse,
642 .copy_blit = &r600_copy_blit,
643 .copy_dma = &r600_copy_blit,
a3812877 644 .copy = &r600_copy_blit,
3ce0a23d
JG
645 .set_engine_clock = &radeon_atom_set_engine_clock,
646 .set_memory_clock = &radeon_atom_set_memory_clock,
647 .set_pcie_lanes = NULL,
648 .set_clock_gating = &radeon_atom_set_clock_gating,
649 .set_surface_reg = r600_set_surface_reg,
650 .clear_surface_reg = r600_clear_surface_reg,
f0ed1f65 651 .bandwidth_update = &rv515_bandwidth_update,
3ce0a23d 652};
771fe6b9
JG
653
654#endif