UAPI: (Scripted) Convert #include "..." to #include <path/...> in drivers/gpu/
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / drivers / gpu / drm / nouveau / nouveau_display.c
1 /*
2 * Copyright (C) 2008 Maarten Maathuis.
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 */
26
27 #include <drm/drmP.h>
28 #include <drm/drm_crtc_helper.h>
29 #include "nouveau_drv.h"
30 #include "nouveau_fb.h"
31 #include "nouveau_fbcon.h"
32 #include "nouveau_hw.h"
33 #include "nouveau_crtc.h"
34 #include "nouveau_dma.h"
35 #include "nouveau_connector.h"
36 #include "nouveau_software.h"
37 #include "nouveau_gpio.h"
38 #include "nouveau_fence.h"
39 #include "nv50_display.h"
40
41 static void
42 nouveau_user_framebuffer_destroy(struct drm_framebuffer *drm_fb)
43 {
44 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
45
46 if (fb->nvbo)
47 drm_gem_object_unreference_unlocked(fb->nvbo->gem);
48
49 drm_framebuffer_cleanup(drm_fb);
50 kfree(fb);
51 }
52
53 static int
54 nouveau_user_framebuffer_create_handle(struct drm_framebuffer *drm_fb,
55 struct drm_file *file_priv,
56 unsigned int *handle)
57 {
58 struct nouveau_framebuffer *fb = nouveau_framebuffer(drm_fb);
59
60 return drm_gem_handle_create(file_priv, fb->nvbo->gem, handle);
61 }
62
63 static const struct drm_framebuffer_funcs nouveau_framebuffer_funcs = {
64 .destroy = nouveau_user_framebuffer_destroy,
65 .create_handle = nouveau_user_framebuffer_create_handle,
66 };
67
68 int
69 nouveau_framebuffer_init(struct drm_device *dev,
70 struct nouveau_framebuffer *nv_fb,
71 struct drm_mode_fb_cmd2 *mode_cmd,
72 struct nouveau_bo *nvbo)
73 {
74 struct drm_nouveau_private *dev_priv = dev->dev_private;
75 struct drm_framebuffer *fb = &nv_fb->base;
76 int ret;
77
78 ret = drm_framebuffer_init(dev, fb, &nouveau_framebuffer_funcs);
79 if (ret) {
80 return ret;
81 }
82
83 drm_helper_mode_fill_fb_struct(fb, mode_cmd);
84 nv_fb->nvbo = nvbo;
85
86 if (dev_priv->card_type >= NV_50) {
87 u32 tile_flags = nouveau_bo_tile_layout(nvbo);
88 if (tile_flags == 0x7a00 ||
89 tile_flags == 0xfe00)
90 nv_fb->r_dma = NvEvoFB32;
91 else
92 if (tile_flags == 0x7000)
93 nv_fb->r_dma = NvEvoFB16;
94 else
95 nv_fb->r_dma = NvEvoVRAM_LP;
96
97 switch (fb->depth) {
98 case 8: nv_fb->r_format = NV50_EVO_CRTC_FB_DEPTH_8; break;
99 case 15: nv_fb->r_format = NV50_EVO_CRTC_FB_DEPTH_15; break;
100 case 16: nv_fb->r_format = NV50_EVO_CRTC_FB_DEPTH_16; break;
101 case 24:
102 case 32: nv_fb->r_format = NV50_EVO_CRTC_FB_DEPTH_24; break;
103 case 30: nv_fb->r_format = NV50_EVO_CRTC_FB_DEPTH_30; break;
104 default:
105 NV_ERROR(dev, "unknown depth %d\n", fb->depth);
106 return -EINVAL;
107 }
108
109 if (dev_priv->chipset == 0x50)
110 nv_fb->r_format |= (tile_flags << 8);
111
112 if (!tile_flags) {
113 if (dev_priv->card_type < NV_D0)
114 nv_fb->r_pitch = 0x00100000 | fb->pitches[0];
115 else
116 nv_fb->r_pitch = 0x01000000 | fb->pitches[0];
117 } else {
118 u32 mode = nvbo->tile_mode;
119 if (dev_priv->card_type >= NV_C0)
120 mode >>= 4;
121 nv_fb->r_pitch = ((fb->pitches[0] / 4) << 4) | mode;
122 }
123 }
124
125 return 0;
126 }
127
128 static struct drm_framebuffer *
129 nouveau_user_framebuffer_create(struct drm_device *dev,
130 struct drm_file *file_priv,
131 struct drm_mode_fb_cmd2 *mode_cmd)
132 {
133 struct nouveau_framebuffer *nouveau_fb;
134 struct drm_gem_object *gem;
135 int ret;
136
137 gem = drm_gem_object_lookup(dev, file_priv, mode_cmd->handles[0]);
138 if (!gem)
139 return ERR_PTR(-ENOENT);
140
141 nouveau_fb = kzalloc(sizeof(struct nouveau_framebuffer), GFP_KERNEL);
142 if (!nouveau_fb)
143 return ERR_PTR(-ENOMEM);
144
145 ret = nouveau_framebuffer_init(dev, nouveau_fb, mode_cmd, nouveau_gem_object(gem));
146 if (ret) {
147 drm_gem_object_unreference(gem);
148 return ERR_PTR(ret);
149 }
150
151 return &nouveau_fb->base;
152 }
153
154 static const struct drm_mode_config_funcs nouveau_mode_config_funcs = {
155 .fb_create = nouveau_user_framebuffer_create,
156 .output_poll_changed = nouveau_fbcon_output_poll_changed,
157 };
158
159
160 struct nouveau_drm_prop_enum_list {
161 u8 gen_mask;
162 int type;
163 char *name;
164 };
165
166 static struct nouveau_drm_prop_enum_list underscan[] = {
167 { 6, UNDERSCAN_AUTO, "auto" },
168 { 6, UNDERSCAN_OFF, "off" },
169 { 6, UNDERSCAN_ON, "on" },
170 {}
171 };
172
173 static struct nouveau_drm_prop_enum_list dither_mode[] = {
174 { 7, DITHERING_MODE_AUTO, "auto" },
175 { 7, DITHERING_MODE_OFF, "off" },
176 { 1, DITHERING_MODE_ON, "on" },
177 { 6, DITHERING_MODE_STATIC2X2, "static 2x2" },
178 { 6, DITHERING_MODE_DYNAMIC2X2, "dynamic 2x2" },
179 { 4, DITHERING_MODE_TEMPORAL, "temporal" },
180 {}
181 };
182
183 static struct nouveau_drm_prop_enum_list dither_depth[] = {
184 { 6, DITHERING_DEPTH_AUTO, "auto" },
185 { 6, DITHERING_DEPTH_6BPC, "6 bpc" },
186 { 6, DITHERING_DEPTH_8BPC, "8 bpc" },
187 {}
188 };
189
190 #define PROP_ENUM(p,gen,n,list) do { \
191 struct nouveau_drm_prop_enum_list *l = (list); \
192 int c = 0; \
193 while (l->gen_mask) { \
194 if (l->gen_mask & (1 << (gen))) \
195 c++; \
196 l++; \
197 } \
198 if (c) { \
199 p = drm_property_create(dev, DRM_MODE_PROP_ENUM, n, c); \
200 l = (list); \
201 c = 0; \
202 while (p && l->gen_mask) { \
203 if (l->gen_mask & (1 << (gen))) { \
204 drm_property_add_enum(p, c, l->type, l->name); \
205 c++; \
206 } \
207 l++; \
208 } \
209 } \
210 } while(0)
211
212 int
213 nouveau_display_init(struct drm_device *dev)
214 {
215 struct drm_nouveau_private *dev_priv = dev->dev_private;
216 struct nouveau_display_engine *disp = &dev_priv->engine.display;
217 struct drm_connector *connector;
218 int ret;
219
220 ret = disp->init(dev);
221 if (ret)
222 return ret;
223
224 /* power on internal panel if it's not already. the init tables of
225 * some vbios default this to off for some reason, causing the
226 * panel to not work after resume
227 */
228 if (nouveau_gpio_func_get(dev, DCB_GPIO_PANEL_POWER) == 0) {
229 nouveau_gpio_func_set(dev, DCB_GPIO_PANEL_POWER, true);
230 msleep(300);
231 }
232
233 /* enable polling for external displays */
234 drm_kms_helper_poll_enable(dev);
235
236 /* enable hotplug interrupts */
237 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
238 struct nouveau_connector *conn = nouveau_connector(connector);
239 nouveau_gpio_irq(dev, 0, conn->hpd, 0xff, true);
240 }
241
242 return ret;
243 }
244
245 void
246 nouveau_display_fini(struct drm_device *dev)
247 {
248 struct drm_nouveau_private *dev_priv = dev->dev_private;
249 struct nouveau_display_engine *disp = &dev_priv->engine.display;
250 struct drm_connector *connector;
251
252 /* disable hotplug interrupts */
253 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
254 struct nouveau_connector *conn = nouveau_connector(connector);
255 nouveau_gpio_irq(dev, 0, conn->hpd, 0xff, false);
256 }
257
258 drm_kms_helper_poll_disable(dev);
259 disp->fini(dev);
260 }
261
262 int
263 nouveau_display_create(struct drm_device *dev)
264 {
265 struct drm_nouveau_private *dev_priv = dev->dev_private;
266 struct nouveau_display_engine *disp = &dev_priv->engine.display;
267 int ret, gen;
268
269 drm_mode_config_init(dev);
270 drm_mode_create_scaling_mode_property(dev);
271 drm_mode_create_dvi_i_properties(dev);
272
273 if (dev_priv->card_type < NV_50)
274 gen = 0;
275 else
276 if (dev_priv->card_type < NV_D0)
277 gen = 1;
278 else
279 gen = 2;
280
281 PROP_ENUM(disp->dithering_mode, gen, "dithering mode", dither_mode);
282 PROP_ENUM(disp->dithering_depth, gen, "dithering depth", dither_depth);
283 PROP_ENUM(disp->underscan_property, gen, "underscan", underscan);
284
285 disp->underscan_hborder_property =
286 drm_property_create_range(dev, 0, "underscan hborder", 0, 128);
287
288 disp->underscan_vborder_property =
289 drm_property_create_range(dev, 0, "underscan vborder", 0, 128);
290
291 if (gen == 1) {
292 disp->vibrant_hue_property =
293 drm_property_create(dev, DRM_MODE_PROP_RANGE,
294 "vibrant hue", 2);
295 disp->vibrant_hue_property->values[0] = 0;
296 disp->vibrant_hue_property->values[1] = 180; /* -90..+90 */
297
298 disp->color_vibrance_property =
299 drm_property_create(dev, DRM_MODE_PROP_RANGE,
300 "color vibrance", 2);
301 disp->color_vibrance_property->values[0] = 0;
302 disp->color_vibrance_property->values[1] = 200; /* -100..+100 */
303 }
304
305 dev->mode_config.funcs = &nouveau_mode_config_funcs;
306 dev->mode_config.fb_base = pci_resource_start(dev->pdev, 1);
307
308 dev->mode_config.min_width = 0;
309 dev->mode_config.min_height = 0;
310 if (dev_priv->card_type < NV_10) {
311 dev->mode_config.max_width = 2048;
312 dev->mode_config.max_height = 2048;
313 } else
314 if (dev_priv->card_type < NV_50) {
315 dev->mode_config.max_width = 4096;
316 dev->mode_config.max_height = 4096;
317 } else {
318 dev->mode_config.max_width = 8192;
319 dev->mode_config.max_height = 8192;
320 }
321
322 dev->mode_config.preferred_depth = 24;
323 dev->mode_config.prefer_shadow = 1;
324
325 drm_kms_helper_poll_init(dev);
326 drm_kms_helper_poll_disable(dev);
327
328 ret = disp->create(dev);
329 if (ret)
330 goto disp_create_err;
331
332 if (dev->mode_config.num_crtc) {
333 ret = drm_vblank_init(dev, dev->mode_config.num_crtc);
334 if (ret)
335 goto vblank_err;
336 }
337
338 return 0;
339
340 vblank_err:
341 disp->destroy(dev);
342 disp_create_err:
343 drm_kms_helper_poll_fini(dev);
344 drm_mode_config_cleanup(dev);
345 return ret;
346 }
347
348 void
349 nouveau_display_destroy(struct drm_device *dev)
350 {
351 struct drm_nouveau_private *dev_priv = dev->dev_private;
352 struct nouveau_display_engine *disp = &dev_priv->engine.display;
353
354 drm_vblank_cleanup(dev);
355
356 disp->destroy(dev);
357
358 drm_kms_helper_poll_fini(dev);
359 drm_mode_config_cleanup(dev);
360 }
361
362 int
363 nouveau_vblank_enable(struct drm_device *dev, int crtc)
364 {
365 struct drm_nouveau_private *dev_priv = dev->dev_private;
366
367 if (dev_priv->card_type >= NV_50)
368 nv_mask(dev, NV50_PDISPLAY_INTR_EN_1, 0,
369 NV50_PDISPLAY_INTR_EN_1_VBLANK_CRTC_(crtc));
370 else
371 NVWriteCRTC(dev, crtc, NV_PCRTC_INTR_EN_0,
372 NV_PCRTC_INTR_0_VBLANK);
373
374 return 0;
375 }
376
377 void
378 nouveau_vblank_disable(struct drm_device *dev, int crtc)
379 {
380 struct drm_nouveau_private *dev_priv = dev->dev_private;
381
382 if (dev_priv->card_type >= NV_50)
383 nv_mask(dev, NV50_PDISPLAY_INTR_EN_1,
384 NV50_PDISPLAY_INTR_EN_1_VBLANK_CRTC_(crtc), 0);
385 else
386 NVWriteCRTC(dev, crtc, NV_PCRTC_INTR_EN_0, 0);
387 }
388
389 static int
390 nouveau_page_flip_reserve(struct nouveau_bo *old_bo,
391 struct nouveau_bo *new_bo)
392 {
393 int ret;
394
395 ret = nouveau_bo_pin(new_bo, TTM_PL_FLAG_VRAM);
396 if (ret)
397 return ret;
398
399 ret = ttm_bo_reserve(&new_bo->bo, false, false, false, 0);
400 if (ret)
401 goto fail;
402
403 ret = ttm_bo_reserve(&old_bo->bo, false, false, false, 0);
404 if (ret)
405 goto fail_unreserve;
406
407 return 0;
408
409 fail_unreserve:
410 ttm_bo_unreserve(&new_bo->bo);
411 fail:
412 nouveau_bo_unpin(new_bo);
413 return ret;
414 }
415
416 static void
417 nouveau_page_flip_unreserve(struct nouveau_bo *old_bo,
418 struct nouveau_bo *new_bo,
419 struct nouveau_fence *fence)
420 {
421 nouveau_bo_fence(new_bo, fence);
422 ttm_bo_unreserve(&new_bo->bo);
423
424 nouveau_bo_fence(old_bo, fence);
425 ttm_bo_unreserve(&old_bo->bo);
426
427 nouveau_bo_unpin(old_bo);
428 }
429
430 static int
431 nouveau_page_flip_emit(struct nouveau_channel *chan,
432 struct nouveau_bo *old_bo,
433 struct nouveau_bo *new_bo,
434 struct nouveau_page_flip_state *s,
435 struct nouveau_fence **pfence)
436 {
437 struct nouveau_software_chan *swch = chan->engctx[NVOBJ_ENGINE_SW];
438 struct drm_nouveau_private *dev_priv = chan->dev->dev_private;
439 struct drm_device *dev = chan->dev;
440 unsigned long flags;
441 int ret;
442
443 /* Queue it to the pending list */
444 spin_lock_irqsave(&dev->event_lock, flags);
445 list_add_tail(&s->head, &swch->flip);
446 spin_unlock_irqrestore(&dev->event_lock, flags);
447
448 /* Synchronize with the old framebuffer */
449 ret = nouveau_fence_sync(old_bo->bo.sync_obj, chan);
450 if (ret)
451 goto fail;
452
453 /* Emit the pageflip */
454 ret = RING_SPACE(chan, 3);
455 if (ret)
456 goto fail;
457
458 if (dev_priv->card_type < NV_C0) {
459 BEGIN_NV04(chan, NvSubSw, NV_SW_PAGE_FLIP, 1);
460 OUT_RING (chan, 0x00000000);
461 OUT_RING (chan, 0x00000000);
462 } else {
463 BEGIN_NVC0(chan, 0, NV10_SUBCHAN_REF_CNT, 1);
464 OUT_RING (chan, 0);
465 BEGIN_IMC0(chan, 0, NVSW_SUBCHAN_PAGE_FLIP, 0x0000);
466 }
467 FIRE_RING (chan);
468
469 ret = nouveau_fence_new(chan, pfence);
470 if (ret)
471 goto fail;
472
473 return 0;
474 fail:
475 spin_lock_irqsave(&dev->event_lock, flags);
476 list_del(&s->head);
477 spin_unlock_irqrestore(&dev->event_lock, flags);
478 return ret;
479 }
480
481 int
482 nouveau_crtc_page_flip(struct drm_crtc *crtc, struct drm_framebuffer *fb,
483 struct drm_pending_vblank_event *event)
484 {
485 struct drm_device *dev = crtc->dev;
486 struct drm_nouveau_private *dev_priv = dev->dev_private;
487 struct nouveau_bo *old_bo = nouveau_framebuffer(crtc->fb)->nvbo;
488 struct nouveau_bo *new_bo = nouveau_framebuffer(fb)->nvbo;
489 struct nouveau_page_flip_state *s;
490 struct nouveau_channel *chan = NULL;
491 struct nouveau_fence *fence;
492 int ret;
493
494 if (!dev_priv->channel)
495 return -ENODEV;
496
497 s = kzalloc(sizeof(*s), GFP_KERNEL);
498 if (!s)
499 return -ENOMEM;
500
501 /* Don't let the buffers go away while we flip */
502 ret = nouveau_page_flip_reserve(old_bo, new_bo);
503 if (ret)
504 goto fail_free;
505
506 /* Initialize a page flip struct */
507 *s = (struct nouveau_page_flip_state)
508 { { }, event, nouveau_crtc(crtc)->index,
509 fb->bits_per_pixel, fb->pitches[0], crtc->x, crtc->y,
510 new_bo->bo.offset };
511
512 /* Choose the channel the flip will be handled in */
513 fence = new_bo->bo.sync_obj;
514 if (fence)
515 chan = nouveau_channel_get_unlocked(fence->channel);
516 if (!chan)
517 chan = nouveau_channel_get_unlocked(dev_priv->channel);
518 mutex_lock(&chan->mutex);
519
520 /* Emit a page flip */
521 if (dev_priv->card_type >= NV_50) {
522 if (dev_priv->card_type >= NV_D0)
523 ret = nvd0_display_flip_next(crtc, fb, chan, 0);
524 else
525 ret = nv50_display_flip_next(crtc, fb, chan);
526 if (ret) {
527 nouveau_channel_put(&chan);
528 goto fail_unreserve;
529 }
530 }
531
532 ret = nouveau_page_flip_emit(chan, old_bo, new_bo, s, &fence);
533 nouveau_channel_put(&chan);
534 if (ret)
535 goto fail_unreserve;
536
537 /* Update the crtc struct and cleanup */
538 crtc->fb = fb;
539
540 nouveau_page_flip_unreserve(old_bo, new_bo, fence);
541 nouveau_fence_unref(&fence);
542 return 0;
543
544 fail_unreserve:
545 nouveau_page_flip_unreserve(old_bo, new_bo, NULL);
546 fail_free:
547 kfree(s);
548 return ret;
549 }
550
551 int
552 nouveau_finish_page_flip(struct nouveau_channel *chan,
553 struct nouveau_page_flip_state *ps)
554 {
555 struct nouveau_software_chan *swch = chan->engctx[NVOBJ_ENGINE_SW];
556 struct drm_device *dev = chan->dev;
557 struct nouveau_page_flip_state *s;
558 unsigned long flags;
559
560 spin_lock_irqsave(&dev->event_lock, flags);
561
562 if (list_empty(&swch->flip)) {
563 NV_ERROR(dev, "Unexpected pageflip in channel %d.\n", chan->id);
564 spin_unlock_irqrestore(&dev->event_lock, flags);
565 return -EINVAL;
566 }
567
568 s = list_first_entry(&swch->flip, struct nouveau_page_flip_state, head);
569 if (s->event) {
570 struct drm_pending_vblank_event *e = s->event;
571 struct timeval now;
572
573 do_gettimeofday(&now);
574 e->event.sequence = 0;
575 e->event.tv_sec = now.tv_sec;
576 e->event.tv_usec = now.tv_usec;
577 list_add_tail(&e->base.link, &e->base.file_priv->event_list);
578 wake_up_interruptible(&e->base.file_priv->event_wait);
579 }
580
581 list_del(&s->head);
582 if (ps)
583 *ps = *s;
584 kfree(s);
585
586 spin_unlock_irqrestore(&dev->event_lock, flags);
587 return 0;
588 }
589
590 int
591 nouveau_display_dumb_create(struct drm_file *file_priv, struct drm_device *dev,
592 struct drm_mode_create_dumb *args)
593 {
594 struct nouveau_bo *bo;
595 int ret;
596
597 args->pitch = roundup(args->width * (args->bpp / 8), 256);
598 args->size = args->pitch * args->height;
599 args->size = roundup(args->size, PAGE_SIZE);
600
601 ret = nouveau_gem_new(dev, args->size, 0, NOUVEAU_GEM_DOMAIN_VRAM, 0, 0, &bo);
602 if (ret)
603 return ret;
604
605 ret = drm_gem_handle_create(file_priv, bo->gem, &args->handle);
606 drm_gem_object_unreference_unlocked(bo->gem);
607 return ret;
608 }
609
610 int
611 nouveau_display_dumb_destroy(struct drm_file *file_priv, struct drm_device *dev,
612 uint32_t handle)
613 {
614 return drm_gem_handle_delete(file_priv, handle);
615 }
616
617 int
618 nouveau_display_dumb_map_offset(struct drm_file *file_priv,
619 struct drm_device *dev,
620 uint32_t handle, uint64_t *poffset)
621 {
622 struct drm_gem_object *gem;
623
624 gem = drm_gem_object_lookup(dev, file_priv, handle);
625 if (gem) {
626 struct nouveau_bo *bo = gem->driver_private;
627 *poffset = bo->bo.addr_space_offset;
628 drm_gem_object_unreference_unlocked(gem);
629 return 0;
630 }
631
632 return -ENOENT;
633 }