include cleanup: Update gfp.h and slab.h includes to prepare for breaking implicit...
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / drivers / gpu / drm / nouveau / nouveau_state.c
1 /*
2 * Copyright 2005 Stephane Marchesin
3 * Copyright 2008 Stuart Bennett
4 * All Rights Reserved.
5 *
6 * Permission is hereby granted, free of charge, to any person obtaining a
7 * copy of this software and associated documentation files (the "Software"),
8 * to deal in the Software without restriction, including without limitation
9 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
10 * and/or sell copies of the Software, and to permit persons to whom the
11 * Software is furnished to do so, subject to the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the next
14 * paragraph) shall be included in all copies or substantial portions of the
15 * Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
18 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
19 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
20 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
21 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
22 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
23 * DEALINGS IN THE SOFTWARE.
24 */
25
26 #include <linux/swab.h>
27 #include <linux/slab.h>
28 #include "drmP.h"
29 #include "drm.h"
30 #include "drm_sarea.h"
31 #include "drm_crtc_helper.h"
32 #include <linux/vgaarb.h>
33 #include <linux/vga_switcheroo.h>
34
35 #include "nouveau_drv.h"
36 #include "nouveau_drm.h"
37 #include "nv50_display.h"
38
39 static int nouveau_stub_init(struct drm_device *dev) { return 0; }
40 static void nouveau_stub_takedown(struct drm_device *dev) {}
41
42 static int nouveau_init_engine_ptrs(struct drm_device *dev)
43 {
44 struct drm_nouveau_private *dev_priv = dev->dev_private;
45 struct nouveau_engine *engine = &dev_priv->engine;
46
47 switch (dev_priv->chipset & 0xf0) {
48 case 0x00:
49 engine->instmem.init = nv04_instmem_init;
50 engine->instmem.takedown = nv04_instmem_takedown;
51 engine->instmem.suspend = nv04_instmem_suspend;
52 engine->instmem.resume = nv04_instmem_resume;
53 engine->instmem.populate = nv04_instmem_populate;
54 engine->instmem.clear = nv04_instmem_clear;
55 engine->instmem.bind = nv04_instmem_bind;
56 engine->instmem.unbind = nv04_instmem_unbind;
57 engine->instmem.prepare_access = nv04_instmem_prepare_access;
58 engine->instmem.finish_access = nv04_instmem_finish_access;
59 engine->mc.init = nv04_mc_init;
60 engine->mc.takedown = nv04_mc_takedown;
61 engine->timer.init = nv04_timer_init;
62 engine->timer.read = nv04_timer_read;
63 engine->timer.takedown = nv04_timer_takedown;
64 engine->fb.init = nv04_fb_init;
65 engine->fb.takedown = nv04_fb_takedown;
66 engine->graph.grclass = nv04_graph_grclass;
67 engine->graph.init = nv04_graph_init;
68 engine->graph.takedown = nv04_graph_takedown;
69 engine->graph.fifo_access = nv04_graph_fifo_access;
70 engine->graph.channel = nv04_graph_channel;
71 engine->graph.create_context = nv04_graph_create_context;
72 engine->graph.destroy_context = nv04_graph_destroy_context;
73 engine->graph.load_context = nv04_graph_load_context;
74 engine->graph.unload_context = nv04_graph_unload_context;
75 engine->fifo.channels = 16;
76 engine->fifo.init = nv04_fifo_init;
77 engine->fifo.takedown = nouveau_stub_takedown;
78 engine->fifo.disable = nv04_fifo_disable;
79 engine->fifo.enable = nv04_fifo_enable;
80 engine->fifo.reassign = nv04_fifo_reassign;
81 engine->fifo.cache_flush = nv04_fifo_cache_flush;
82 engine->fifo.cache_pull = nv04_fifo_cache_pull;
83 engine->fifo.channel_id = nv04_fifo_channel_id;
84 engine->fifo.create_context = nv04_fifo_create_context;
85 engine->fifo.destroy_context = nv04_fifo_destroy_context;
86 engine->fifo.load_context = nv04_fifo_load_context;
87 engine->fifo.unload_context = nv04_fifo_unload_context;
88 break;
89 case 0x10:
90 engine->instmem.init = nv04_instmem_init;
91 engine->instmem.takedown = nv04_instmem_takedown;
92 engine->instmem.suspend = nv04_instmem_suspend;
93 engine->instmem.resume = nv04_instmem_resume;
94 engine->instmem.populate = nv04_instmem_populate;
95 engine->instmem.clear = nv04_instmem_clear;
96 engine->instmem.bind = nv04_instmem_bind;
97 engine->instmem.unbind = nv04_instmem_unbind;
98 engine->instmem.prepare_access = nv04_instmem_prepare_access;
99 engine->instmem.finish_access = nv04_instmem_finish_access;
100 engine->mc.init = nv04_mc_init;
101 engine->mc.takedown = nv04_mc_takedown;
102 engine->timer.init = nv04_timer_init;
103 engine->timer.read = nv04_timer_read;
104 engine->timer.takedown = nv04_timer_takedown;
105 engine->fb.init = nv10_fb_init;
106 engine->fb.takedown = nv10_fb_takedown;
107 engine->fb.set_region_tiling = nv10_fb_set_region_tiling;
108 engine->graph.grclass = nv10_graph_grclass;
109 engine->graph.init = nv10_graph_init;
110 engine->graph.takedown = nv10_graph_takedown;
111 engine->graph.channel = nv10_graph_channel;
112 engine->graph.create_context = nv10_graph_create_context;
113 engine->graph.destroy_context = nv10_graph_destroy_context;
114 engine->graph.fifo_access = nv04_graph_fifo_access;
115 engine->graph.load_context = nv10_graph_load_context;
116 engine->graph.unload_context = nv10_graph_unload_context;
117 engine->graph.set_region_tiling = nv10_graph_set_region_tiling;
118 engine->fifo.channels = 32;
119 engine->fifo.init = nv10_fifo_init;
120 engine->fifo.takedown = nouveau_stub_takedown;
121 engine->fifo.disable = nv04_fifo_disable;
122 engine->fifo.enable = nv04_fifo_enable;
123 engine->fifo.reassign = nv04_fifo_reassign;
124 engine->fifo.cache_flush = nv04_fifo_cache_flush;
125 engine->fifo.cache_pull = nv04_fifo_cache_pull;
126 engine->fifo.channel_id = nv10_fifo_channel_id;
127 engine->fifo.create_context = nv10_fifo_create_context;
128 engine->fifo.destroy_context = nv10_fifo_destroy_context;
129 engine->fifo.load_context = nv10_fifo_load_context;
130 engine->fifo.unload_context = nv10_fifo_unload_context;
131 break;
132 case 0x20:
133 engine->instmem.init = nv04_instmem_init;
134 engine->instmem.takedown = nv04_instmem_takedown;
135 engine->instmem.suspend = nv04_instmem_suspend;
136 engine->instmem.resume = nv04_instmem_resume;
137 engine->instmem.populate = nv04_instmem_populate;
138 engine->instmem.clear = nv04_instmem_clear;
139 engine->instmem.bind = nv04_instmem_bind;
140 engine->instmem.unbind = nv04_instmem_unbind;
141 engine->instmem.prepare_access = nv04_instmem_prepare_access;
142 engine->instmem.finish_access = nv04_instmem_finish_access;
143 engine->mc.init = nv04_mc_init;
144 engine->mc.takedown = nv04_mc_takedown;
145 engine->timer.init = nv04_timer_init;
146 engine->timer.read = nv04_timer_read;
147 engine->timer.takedown = nv04_timer_takedown;
148 engine->fb.init = nv10_fb_init;
149 engine->fb.takedown = nv10_fb_takedown;
150 engine->fb.set_region_tiling = nv10_fb_set_region_tiling;
151 engine->graph.grclass = nv20_graph_grclass;
152 engine->graph.init = nv20_graph_init;
153 engine->graph.takedown = nv20_graph_takedown;
154 engine->graph.channel = nv10_graph_channel;
155 engine->graph.create_context = nv20_graph_create_context;
156 engine->graph.destroy_context = nv20_graph_destroy_context;
157 engine->graph.fifo_access = nv04_graph_fifo_access;
158 engine->graph.load_context = nv20_graph_load_context;
159 engine->graph.unload_context = nv20_graph_unload_context;
160 engine->graph.set_region_tiling = nv20_graph_set_region_tiling;
161 engine->fifo.channels = 32;
162 engine->fifo.init = nv10_fifo_init;
163 engine->fifo.takedown = nouveau_stub_takedown;
164 engine->fifo.disable = nv04_fifo_disable;
165 engine->fifo.enable = nv04_fifo_enable;
166 engine->fifo.reassign = nv04_fifo_reassign;
167 engine->fifo.cache_flush = nv04_fifo_cache_flush;
168 engine->fifo.cache_pull = nv04_fifo_cache_pull;
169 engine->fifo.channel_id = nv10_fifo_channel_id;
170 engine->fifo.create_context = nv10_fifo_create_context;
171 engine->fifo.destroy_context = nv10_fifo_destroy_context;
172 engine->fifo.load_context = nv10_fifo_load_context;
173 engine->fifo.unload_context = nv10_fifo_unload_context;
174 break;
175 case 0x30:
176 engine->instmem.init = nv04_instmem_init;
177 engine->instmem.takedown = nv04_instmem_takedown;
178 engine->instmem.suspend = nv04_instmem_suspend;
179 engine->instmem.resume = nv04_instmem_resume;
180 engine->instmem.populate = nv04_instmem_populate;
181 engine->instmem.clear = nv04_instmem_clear;
182 engine->instmem.bind = nv04_instmem_bind;
183 engine->instmem.unbind = nv04_instmem_unbind;
184 engine->instmem.prepare_access = nv04_instmem_prepare_access;
185 engine->instmem.finish_access = nv04_instmem_finish_access;
186 engine->mc.init = nv04_mc_init;
187 engine->mc.takedown = nv04_mc_takedown;
188 engine->timer.init = nv04_timer_init;
189 engine->timer.read = nv04_timer_read;
190 engine->timer.takedown = nv04_timer_takedown;
191 engine->fb.init = nv10_fb_init;
192 engine->fb.takedown = nv10_fb_takedown;
193 engine->fb.set_region_tiling = nv10_fb_set_region_tiling;
194 engine->graph.grclass = nv30_graph_grclass;
195 engine->graph.init = nv30_graph_init;
196 engine->graph.takedown = nv20_graph_takedown;
197 engine->graph.fifo_access = nv04_graph_fifo_access;
198 engine->graph.channel = nv10_graph_channel;
199 engine->graph.create_context = nv20_graph_create_context;
200 engine->graph.destroy_context = nv20_graph_destroy_context;
201 engine->graph.load_context = nv20_graph_load_context;
202 engine->graph.unload_context = nv20_graph_unload_context;
203 engine->graph.set_region_tiling = nv20_graph_set_region_tiling;
204 engine->fifo.channels = 32;
205 engine->fifo.init = nv10_fifo_init;
206 engine->fifo.takedown = nouveau_stub_takedown;
207 engine->fifo.disable = nv04_fifo_disable;
208 engine->fifo.enable = nv04_fifo_enable;
209 engine->fifo.reassign = nv04_fifo_reassign;
210 engine->fifo.cache_flush = nv04_fifo_cache_flush;
211 engine->fifo.cache_pull = nv04_fifo_cache_pull;
212 engine->fifo.channel_id = nv10_fifo_channel_id;
213 engine->fifo.create_context = nv10_fifo_create_context;
214 engine->fifo.destroy_context = nv10_fifo_destroy_context;
215 engine->fifo.load_context = nv10_fifo_load_context;
216 engine->fifo.unload_context = nv10_fifo_unload_context;
217 break;
218 case 0x40:
219 case 0x60:
220 engine->instmem.init = nv04_instmem_init;
221 engine->instmem.takedown = nv04_instmem_takedown;
222 engine->instmem.suspend = nv04_instmem_suspend;
223 engine->instmem.resume = nv04_instmem_resume;
224 engine->instmem.populate = nv04_instmem_populate;
225 engine->instmem.clear = nv04_instmem_clear;
226 engine->instmem.bind = nv04_instmem_bind;
227 engine->instmem.unbind = nv04_instmem_unbind;
228 engine->instmem.prepare_access = nv04_instmem_prepare_access;
229 engine->instmem.finish_access = nv04_instmem_finish_access;
230 engine->mc.init = nv40_mc_init;
231 engine->mc.takedown = nv40_mc_takedown;
232 engine->timer.init = nv04_timer_init;
233 engine->timer.read = nv04_timer_read;
234 engine->timer.takedown = nv04_timer_takedown;
235 engine->fb.init = nv40_fb_init;
236 engine->fb.takedown = nv40_fb_takedown;
237 engine->fb.set_region_tiling = nv40_fb_set_region_tiling;
238 engine->graph.grclass = nv40_graph_grclass;
239 engine->graph.init = nv40_graph_init;
240 engine->graph.takedown = nv40_graph_takedown;
241 engine->graph.fifo_access = nv04_graph_fifo_access;
242 engine->graph.channel = nv40_graph_channel;
243 engine->graph.create_context = nv40_graph_create_context;
244 engine->graph.destroy_context = nv40_graph_destroy_context;
245 engine->graph.load_context = nv40_graph_load_context;
246 engine->graph.unload_context = nv40_graph_unload_context;
247 engine->graph.set_region_tiling = nv40_graph_set_region_tiling;
248 engine->fifo.channels = 32;
249 engine->fifo.init = nv40_fifo_init;
250 engine->fifo.takedown = nouveau_stub_takedown;
251 engine->fifo.disable = nv04_fifo_disable;
252 engine->fifo.enable = nv04_fifo_enable;
253 engine->fifo.reassign = nv04_fifo_reassign;
254 engine->fifo.cache_flush = nv04_fifo_cache_flush;
255 engine->fifo.cache_pull = nv04_fifo_cache_pull;
256 engine->fifo.channel_id = nv10_fifo_channel_id;
257 engine->fifo.create_context = nv40_fifo_create_context;
258 engine->fifo.destroy_context = nv40_fifo_destroy_context;
259 engine->fifo.load_context = nv40_fifo_load_context;
260 engine->fifo.unload_context = nv40_fifo_unload_context;
261 break;
262 case 0x50:
263 case 0x80: /* gotta love NVIDIA's consistency.. */
264 case 0x90:
265 case 0xA0:
266 engine->instmem.init = nv50_instmem_init;
267 engine->instmem.takedown = nv50_instmem_takedown;
268 engine->instmem.suspend = nv50_instmem_suspend;
269 engine->instmem.resume = nv50_instmem_resume;
270 engine->instmem.populate = nv50_instmem_populate;
271 engine->instmem.clear = nv50_instmem_clear;
272 engine->instmem.bind = nv50_instmem_bind;
273 engine->instmem.unbind = nv50_instmem_unbind;
274 engine->instmem.prepare_access = nv50_instmem_prepare_access;
275 engine->instmem.finish_access = nv50_instmem_finish_access;
276 engine->mc.init = nv50_mc_init;
277 engine->mc.takedown = nv50_mc_takedown;
278 engine->timer.init = nv04_timer_init;
279 engine->timer.read = nv04_timer_read;
280 engine->timer.takedown = nv04_timer_takedown;
281 engine->fb.init = nouveau_stub_init;
282 engine->fb.takedown = nouveau_stub_takedown;
283 engine->graph.grclass = nv50_graph_grclass;
284 engine->graph.init = nv50_graph_init;
285 engine->graph.takedown = nv50_graph_takedown;
286 engine->graph.fifo_access = nv50_graph_fifo_access;
287 engine->graph.channel = nv50_graph_channel;
288 engine->graph.create_context = nv50_graph_create_context;
289 engine->graph.destroy_context = nv50_graph_destroy_context;
290 engine->graph.load_context = nv50_graph_load_context;
291 engine->graph.unload_context = nv50_graph_unload_context;
292 engine->fifo.channels = 128;
293 engine->fifo.init = nv50_fifo_init;
294 engine->fifo.takedown = nv50_fifo_takedown;
295 engine->fifo.disable = nv04_fifo_disable;
296 engine->fifo.enable = nv04_fifo_enable;
297 engine->fifo.reassign = nv04_fifo_reassign;
298 engine->fifo.channel_id = nv50_fifo_channel_id;
299 engine->fifo.create_context = nv50_fifo_create_context;
300 engine->fifo.destroy_context = nv50_fifo_destroy_context;
301 engine->fifo.load_context = nv50_fifo_load_context;
302 engine->fifo.unload_context = nv50_fifo_unload_context;
303 break;
304 default:
305 NV_ERROR(dev, "NV%02x unsupported\n", dev_priv->chipset);
306 return 1;
307 }
308
309 return 0;
310 }
311
312 static unsigned int
313 nouveau_vga_set_decode(void *priv, bool state)
314 {
315 struct drm_device *dev = priv;
316 struct drm_nouveau_private *dev_priv = dev->dev_private;
317
318 if (dev_priv->chipset >= 0x40)
319 nv_wr32(dev, 0x88054, state);
320 else
321 nv_wr32(dev, 0x1854, state);
322
323 if (state)
324 return VGA_RSRC_LEGACY_IO | VGA_RSRC_LEGACY_MEM |
325 VGA_RSRC_NORMAL_IO | VGA_RSRC_NORMAL_MEM;
326 else
327 return VGA_RSRC_NORMAL_IO | VGA_RSRC_NORMAL_MEM;
328 }
329
330 static int
331 nouveau_card_init_channel(struct drm_device *dev)
332 {
333 struct drm_nouveau_private *dev_priv = dev->dev_private;
334 struct nouveau_gpuobj *gpuobj;
335 int ret;
336
337 ret = nouveau_channel_alloc(dev, &dev_priv->channel,
338 (struct drm_file *)-2,
339 NvDmaFB, NvDmaTT);
340 if (ret)
341 return ret;
342
343 gpuobj = NULL;
344 ret = nouveau_gpuobj_dma_new(dev_priv->channel, NV_CLASS_DMA_IN_MEMORY,
345 0, nouveau_mem_fb_amount(dev),
346 NV_DMA_ACCESS_RW, NV_DMA_TARGET_VIDMEM,
347 &gpuobj);
348 if (ret)
349 goto out_err;
350
351 ret = nouveau_gpuobj_ref_add(dev, dev_priv->channel, NvDmaVRAM,
352 gpuobj, NULL);
353 if (ret)
354 goto out_err;
355
356 gpuobj = NULL;
357 ret = nouveau_gpuobj_gart_dma_new(dev_priv->channel, 0,
358 dev_priv->gart_info.aper_size,
359 NV_DMA_ACCESS_RW, &gpuobj, NULL);
360 if (ret)
361 goto out_err;
362
363 ret = nouveau_gpuobj_ref_add(dev, dev_priv->channel, NvDmaGART,
364 gpuobj, NULL);
365 if (ret)
366 goto out_err;
367
368 return 0;
369 out_err:
370 nouveau_gpuobj_del(dev, &gpuobj);
371 nouveau_channel_free(dev_priv->channel);
372 dev_priv->channel = NULL;
373 return ret;
374 }
375
376 static void nouveau_switcheroo_set_state(struct pci_dev *pdev,
377 enum vga_switcheroo_state state)
378 {
379 pm_message_t pmm = { .event = PM_EVENT_SUSPEND };
380 if (state == VGA_SWITCHEROO_ON) {
381 printk(KERN_ERR "VGA switcheroo: switched nouveau on\n");
382 nouveau_pci_resume(pdev);
383 } else {
384 printk(KERN_ERR "VGA switcheroo: switched nouveau off\n");
385 nouveau_pci_suspend(pdev, pmm);
386 }
387 }
388
389 static bool nouveau_switcheroo_can_switch(struct pci_dev *pdev)
390 {
391 struct drm_device *dev = pci_get_drvdata(pdev);
392 bool can_switch;
393
394 spin_lock(&dev->count_lock);
395 can_switch = (dev->open_count == 0);
396 spin_unlock(&dev->count_lock);
397 return can_switch;
398 }
399
400 int
401 nouveau_card_init(struct drm_device *dev)
402 {
403 struct drm_nouveau_private *dev_priv = dev->dev_private;
404 struct nouveau_engine *engine;
405 int ret;
406
407 NV_DEBUG(dev, "prev state = %d\n", dev_priv->init_state);
408
409 if (dev_priv->init_state == NOUVEAU_CARD_INIT_DONE)
410 return 0;
411
412 vga_client_register(dev->pdev, dev, NULL, nouveau_vga_set_decode);
413 vga_switcheroo_register_client(dev->pdev, nouveau_switcheroo_set_state,
414 nouveau_switcheroo_can_switch);
415
416 /* Initialise internal driver API hooks */
417 ret = nouveau_init_engine_ptrs(dev);
418 if (ret)
419 goto out;
420 engine = &dev_priv->engine;
421 dev_priv->init_state = NOUVEAU_CARD_INIT_FAILED;
422 spin_lock_init(&dev_priv->context_switch_lock);
423
424 /* Parse BIOS tables / Run init tables if card not POSTed */
425 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
426 ret = nouveau_bios_init(dev);
427 if (ret)
428 goto out;
429 }
430
431 ret = nouveau_gpuobj_early_init(dev);
432 if (ret)
433 goto out_bios;
434
435 /* Initialise instance memory, must happen before mem_init so we
436 * know exactly how much VRAM we're able to use for "normal"
437 * purposes.
438 */
439 ret = engine->instmem.init(dev);
440 if (ret)
441 goto out_gpuobj_early;
442
443 /* Setup the memory manager */
444 ret = nouveau_mem_init(dev);
445 if (ret)
446 goto out_instmem;
447
448 ret = nouveau_gpuobj_init(dev);
449 if (ret)
450 goto out_mem;
451
452 /* PMC */
453 ret = engine->mc.init(dev);
454 if (ret)
455 goto out_gpuobj;
456
457 /* PTIMER */
458 ret = engine->timer.init(dev);
459 if (ret)
460 goto out_mc;
461
462 /* PFB */
463 ret = engine->fb.init(dev);
464 if (ret)
465 goto out_timer;
466
467 if (nouveau_noaccel)
468 engine->graph.accel_blocked = true;
469 else {
470 /* PGRAPH */
471 ret = engine->graph.init(dev);
472 if (ret)
473 goto out_fb;
474
475 /* PFIFO */
476 ret = engine->fifo.init(dev);
477 if (ret)
478 goto out_graph;
479 }
480
481 /* this call irq_preinstall, register irq handler and
482 * call irq_postinstall
483 */
484 ret = drm_irq_install(dev);
485 if (ret)
486 goto out_fifo;
487
488 ret = drm_vblank_init(dev, 0);
489 if (ret)
490 goto out_irq;
491
492 /* what about PVIDEO/PCRTC/PRAMDAC etc? */
493
494 if (!engine->graph.accel_blocked) {
495 ret = nouveau_card_init_channel(dev);
496 if (ret)
497 goto out_irq;
498 }
499
500 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
501 if (dev_priv->card_type >= NV_50)
502 ret = nv50_display_create(dev);
503 else
504 ret = nv04_display_create(dev);
505 if (ret)
506 goto out_irq;
507 }
508
509 ret = nouveau_backlight_init(dev);
510 if (ret)
511 NV_ERROR(dev, "Error %d registering backlight\n", ret);
512
513 dev_priv->init_state = NOUVEAU_CARD_INIT_DONE;
514
515 if (drm_core_check_feature(dev, DRIVER_MODESET))
516 drm_helper_initial_config(dev);
517
518 return 0;
519
520 out_irq:
521 drm_irq_uninstall(dev);
522 out_fifo:
523 if (!nouveau_noaccel)
524 engine->fifo.takedown(dev);
525 out_graph:
526 if (!nouveau_noaccel)
527 engine->graph.takedown(dev);
528 out_fb:
529 engine->fb.takedown(dev);
530 out_timer:
531 engine->timer.takedown(dev);
532 out_mc:
533 engine->mc.takedown(dev);
534 out_gpuobj:
535 nouveau_gpuobj_takedown(dev);
536 out_mem:
537 nouveau_mem_close(dev);
538 out_instmem:
539 engine->instmem.takedown(dev);
540 out_gpuobj_early:
541 nouveau_gpuobj_late_takedown(dev);
542 out_bios:
543 nouveau_bios_takedown(dev);
544 out:
545 vga_client_register(dev->pdev, NULL, NULL, NULL);
546 return ret;
547 }
548
549 static void nouveau_card_takedown(struct drm_device *dev)
550 {
551 struct drm_nouveau_private *dev_priv = dev->dev_private;
552 struct nouveau_engine *engine = &dev_priv->engine;
553
554 NV_DEBUG(dev, "prev state = %d\n", dev_priv->init_state);
555
556 if (dev_priv->init_state != NOUVEAU_CARD_INIT_DOWN) {
557 nouveau_backlight_exit(dev);
558
559 if (dev_priv->channel) {
560 nouveau_channel_free(dev_priv->channel);
561 dev_priv->channel = NULL;
562 }
563
564 if (!nouveau_noaccel) {
565 engine->fifo.takedown(dev);
566 engine->graph.takedown(dev);
567 }
568 engine->fb.takedown(dev);
569 engine->timer.takedown(dev);
570 engine->mc.takedown(dev);
571
572 mutex_lock(&dev->struct_mutex);
573 ttm_bo_clean_mm(&dev_priv->ttm.bdev, TTM_PL_VRAM);
574 ttm_bo_clean_mm(&dev_priv->ttm.bdev, TTM_PL_TT);
575 mutex_unlock(&dev->struct_mutex);
576 nouveau_sgdma_takedown(dev);
577
578 nouveau_gpuobj_takedown(dev);
579 nouveau_mem_close(dev);
580 engine->instmem.takedown(dev);
581
582 if (drm_core_check_feature(dev, DRIVER_MODESET))
583 drm_irq_uninstall(dev);
584
585 nouveau_gpuobj_late_takedown(dev);
586 nouveau_bios_takedown(dev);
587
588 vga_client_register(dev->pdev, NULL, NULL, NULL);
589
590 dev_priv->init_state = NOUVEAU_CARD_INIT_DOWN;
591 }
592 }
593
594 /* here a client dies, release the stuff that was allocated for its
595 * file_priv */
596 void nouveau_preclose(struct drm_device *dev, struct drm_file *file_priv)
597 {
598 nouveau_channel_cleanup(dev, file_priv);
599 }
600
601 /* first module load, setup the mmio/fb mapping */
602 /* KMS: we need mmio at load time, not when the first drm client opens. */
603 int nouveau_firstopen(struct drm_device *dev)
604 {
605 return 0;
606 }
607
608 /* if we have an OF card, copy vbios to RAMIN */
609 static void nouveau_OF_copy_vbios_to_ramin(struct drm_device *dev)
610 {
611 #if defined(__powerpc__)
612 int size, i;
613 const uint32_t *bios;
614 struct device_node *dn = pci_device_to_OF_node(dev->pdev);
615 if (!dn) {
616 NV_INFO(dev, "Unable to get the OF node\n");
617 return;
618 }
619
620 bios = of_get_property(dn, "NVDA,BMP", &size);
621 if (bios) {
622 for (i = 0; i < size; i += 4)
623 nv_wi32(dev, i, bios[i/4]);
624 NV_INFO(dev, "OF bios successfully copied (%d bytes)\n", size);
625 } else {
626 NV_INFO(dev, "Unable to get the OF bios\n");
627 }
628 #endif
629 }
630
631 int nouveau_load(struct drm_device *dev, unsigned long flags)
632 {
633 struct drm_nouveau_private *dev_priv;
634 uint32_t reg0;
635 resource_size_t mmio_start_offs;
636
637 dev_priv = kzalloc(sizeof(*dev_priv), GFP_KERNEL);
638 if (!dev_priv)
639 return -ENOMEM;
640 dev->dev_private = dev_priv;
641 dev_priv->dev = dev;
642
643 dev_priv->flags = flags & NOUVEAU_FLAGS;
644 dev_priv->init_state = NOUVEAU_CARD_INIT_DOWN;
645
646 NV_DEBUG(dev, "vendor: 0x%X device: 0x%X class: 0x%X\n",
647 dev->pci_vendor, dev->pci_device, dev->pdev->class);
648
649 dev_priv->wq = create_workqueue("nouveau");
650 if (!dev_priv->wq)
651 return -EINVAL;
652
653 /* resource 0 is mmio regs */
654 /* resource 1 is linear FB */
655 /* resource 2 is RAMIN (mmio regs + 0x1000000) */
656 /* resource 6 is bios */
657
658 /* map the mmio regs */
659 mmio_start_offs = pci_resource_start(dev->pdev, 0);
660 dev_priv->mmio = ioremap(mmio_start_offs, 0x00800000);
661 if (!dev_priv->mmio) {
662 NV_ERROR(dev, "Unable to initialize the mmio mapping. "
663 "Please report your setup to " DRIVER_EMAIL "\n");
664 return -EINVAL;
665 }
666 NV_DEBUG(dev, "regs mapped ok at 0x%llx\n",
667 (unsigned long long)mmio_start_offs);
668
669 #ifdef __BIG_ENDIAN
670 /* Put the card in BE mode if it's not */
671 if (nv_rd32(dev, NV03_PMC_BOOT_1))
672 nv_wr32(dev, NV03_PMC_BOOT_1, 0x00000001);
673
674 DRM_MEMORYBARRIER();
675 #endif
676
677 /* Time to determine the card architecture */
678 reg0 = nv_rd32(dev, NV03_PMC_BOOT_0);
679
680 /* We're dealing with >=NV10 */
681 if ((reg0 & 0x0f000000) > 0) {
682 /* Bit 27-20 contain the architecture in hex */
683 dev_priv->chipset = (reg0 & 0xff00000) >> 20;
684 /* NV04 or NV05 */
685 } else if ((reg0 & 0xff00fff0) == 0x20004000) {
686 if (reg0 & 0x00f00000)
687 dev_priv->chipset = 0x05;
688 else
689 dev_priv->chipset = 0x04;
690 } else
691 dev_priv->chipset = 0xff;
692
693 switch (dev_priv->chipset & 0xf0) {
694 case 0x00:
695 case 0x10:
696 case 0x20:
697 case 0x30:
698 dev_priv->card_type = dev_priv->chipset & 0xf0;
699 break;
700 case 0x40:
701 case 0x60:
702 dev_priv->card_type = NV_40;
703 break;
704 case 0x50:
705 case 0x80:
706 case 0x90:
707 case 0xa0:
708 dev_priv->card_type = NV_50;
709 break;
710 default:
711 NV_INFO(dev, "Unsupported chipset 0x%08x\n", reg0);
712 return -EINVAL;
713 }
714
715 NV_INFO(dev, "Detected an NV%2x generation card (0x%08x)\n",
716 dev_priv->card_type, reg0);
717
718 /* map larger RAMIN aperture on NV40 cards */
719 dev_priv->ramin = NULL;
720 if (dev_priv->card_type >= NV_40) {
721 int ramin_bar = 2;
722 if (pci_resource_len(dev->pdev, ramin_bar) == 0)
723 ramin_bar = 3;
724
725 dev_priv->ramin_size = pci_resource_len(dev->pdev, ramin_bar);
726 dev_priv->ramin = ioremap(
727 pci_resource_start(dev->pdev, ramin_bar),
728 dev_priv->ramin_size);
729 if (!dev_priv->ramin) {
730 NV_ERROR(dev, "Failed to init RAMIN mapping, "
731 "limited instance memory available\n");
732 }
733 }
734
735 /* On older cards (or if the above failed), create a map covering
736 * the BAR0 PRAMIN aperture */
737 if (!dev_priv->ramin) {
738 dev_priv->ramin_size = 1 * 1024 * 1024;
739 dev_priv->ramin = ioremap(mmio_start_offs + NV_RAMIN,
740 dev_priv->ramin_size);
741 if (!dev_priv->ramin) {
742 NV_ERROR(dev, "Failed to map BAR0 PRAMIN.\n");
743 return -ENOMEM;
744 }
745 }
746
747 nouveau_OF_copy_vbios_to_ramin(dev);
748
749 /* Special flags */
750 if (dev->pci_device == 0x01a0)
751 dev_priv->flags |= NV_NFORCE;
752 else if (dev->pci_device == 0x01f0)
753 dev_priv->flags |= NV_NFORCE2;
754
755 /* For kernel modesetting, init card now and bring up fbcon */
756 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
757 int ret = nouveau_card_init(dev);
758 if (ret)
759 return ret;
760 }
761
762 return 0;
763 }
764
765 static void nouveau_close(struct drm_device *dev)
766 {
767 struct drm_nouveau_private *dev_priv = dev->dev_private;
768
769 /* In the case of an error dev_priv may not be allocated yet */
770 if (dev_priv)
771 nouveau_card_takedown(dev);
772 }
773
774 /* KMS: we need mmio at load time, not when the first drm client opens. */
775 void nouveau_lastclose(struct drm_device *dev)
776 {
777 if (drm_core_check_feature(dev, DRIVER_MODESET))
778 return;
779
780 nouveau_close(dev);
781 }
782
783 int nouveau_unload(struct drm_device *dev)
784 {
785 struct drm_nouveau_private *dev_priv = dev->dev_private;
786
787 if (drm_core_check_feature(dev, DRIVER_MODESET)) {
788 if (dev_priv->card_type >= NV_50)
789 nv50_display_destroy(dev);
790 else
791 nv04_display_destroy(dev);
792 nouveau_close(dev);
793 }
794
795 iounmap(dev_priv->mmio);
796 iounmap(dev_priv->ramin);
797
798 kfree(dev_priv);
799 dev->dev_private = NULL;
800 return 0;
801 }
802
803 int nouveau_ioctl_getparam(struct drm_device *dev, void *data,
804 struct drm_file *file_priv)
805 {
806 struct drm_nouveau_private *dev_priv = dev->dev_private;
807 struct drm_nouveau_getparam *getparam = data;
808
809 NOUVEAU_CHECK_INITIALISED_WITH_RETURN;
810
811 switch (getparam->param) {
812 case NOUVEAU_GETPARAM_CHIPSET_ID:
813 getparam->value = dev_priv->chipset;
814 break;
815 case NOUVEAU_GETPARAM_PCI_VENDOR:
816 getparam->value = dev->pci_vendor;
817 break;
818 case NOUVEAU_GETPARAM_PCI_DEVICE:
819 getparam->value = dev->pci_device;
820 break;
821 case NOUVEAU_GETPARAM_BUS_TYPE:
822 if (drm_device_is_agp(dev))
823 getparam->value = NV_AGP;
824 else if (drm_device_is_pcie(dev))
825 getparam->value = NV_PCIE;
826 else
827 getparam->value = NV_PCI;
828 break;
829 case NOUVEAU_GETPARAM_FB_PHYSICAL:
830 getparam->value = dev_priv->fb_phys;
831 break;
832 case NOUVEAU_GETPARAM_AGP_PHYSICAL:
833 getparam->value = dev_priv->gart_info.aper_base;
834 break;
835 case NOUVEAU_GETPARAM_PCI_PHYSICAL:
836 if (dev->sg) {
837 getparam->value = (unsigned long)dev->sg->virtual;
838 } else {
839 NV_ERROR(dev, "Requested PCIGART address, "
840 "while no PCIGART was created\n");
841 return -EINVAL;
842 }
843 break;
844 case NOUVEAU_GETPARAM_FB_SIZE:
845 getparam->value = dev_priv->fb_available_size;
846 break;
847 case NOUVEAU_GETPARAM_AGP_SIZE:
848 getparam->value = dev_priv->gart_info.aper_size;
849 break;
850 case NOUVEAU_GETPARAM_VM_VRAM_BASE:
851 getparam->value = dev_priv->vm_vram_base;
852 break;
853 case NOUVEAU_GETPARAM_GRAPH_UNITS:
854 /* NV40 and NV50 versions are quite different, but register
855 * address is the same. User is supposed to know the card
856 * family anyway... */
857 if (dev_priv->chipset >= 0x40) {
858 getparam->value = nv_rd32(dev, NV40_PMC_GRAPH_UNITS);
859 break;
860 }
861 /* FALLTHRU */
862 default:
863 NV_ERROR(dev, "unknown parameter %lld\n", getparam->param);
864 return -EINVAL;
865 }
866
867 return 0;
868 }
869
870 int
871 nouveau_ioctl_setparam(struct drm_device *dev, void *data,
872 struct drm_file *file_priv)
873 {
874 struct drm_nouveau_setparam *setparam = data;
875
876 NOUVEAU_CHECK_INITIALISED_WITH_RETURN;
877
878 switch (setparam->param) {
879 default:
880 NV_ERROR(dev, "unknown parameter %lld\n", setparam->param);
881 return -EINVAL;
882 }
883
884 return 0;
885 }
886
887 /* Wait until (value(reg) & mask) == val, up until timeout has hit */
888 bool nouveau_wait_until(struct drm_device *dev, uint64_t timeout,
889 uint32_t reg, uint32_t mask, uint32_t val)
890 {
891 struct drm_nouveau_private *dev_priv = dev->dev_private;
892 struct nouveau_timer_engine *ptimer = &dev_priv->engine.timer;
893 uint64_t start = ptimer->read(dev);
894
895 do {
896 if ((nv_rd32(dev, reg) & mask) == val)
897 return true;
898 } while (ptimer->read(dev) - start < timeout);
899
900 return false;
901 }
902
903 /* Waits for PGRAPH to go completely idle */
904 bool nouveau_wait_for_idle(struct drm_device *dev)
905 {
906 if (!nv_wait(NV04_PGRAPH_STATUS, 0xffffffff, 0x00000000)) {
907 NV_ERROR(dev, "PGRAPH idle timed out with status 0x%08x\n",
908 nv_rd32(dev, NV04_PGRAPH_STATUS));
909 return false;
910 }
911
912 return true;
913 }
914