UAPI: (Scripted) Convert #include "..." to #include <path/...> in drivers/gpu/
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / drivers / gpu / drm / nouveau / nv50_crtc.c
1 /*
2 * Copyright (C) 2008 Maarten Maathuis.
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining
6 * a copy of this software and associated documentation files (the
7 * "Software"), to deal in the Software without restriction, including
8 * without limitation the rights to use, copy, modify, merge, publish,
9 * distribute, sublicense, and/or sell copies of the Software, and to
10 * permit persons to whom the Software is furnished to do so, subject to
11 * the following conditions:
12 *
13 * The above copyright notice and this permission notice (including the
14 * next paragraph) shall be included in all copies or substantial
15 * portions of the Software.
16 *
17 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
18 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
19 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
20 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
21 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
22 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
23 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
24 *
25 */
26
27 #include <drm/drmP.h>
28 #include <drm/drm_crtc_helper.h>
29
30 #define NOUVEAU_DMA_DEBUG (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
31 #include "nouveau_reg.h"
32 #include "nouveau_drv.h"
33 #include "nouveau_hw.h"
34 #include "nouveau_encoder.h"
35 #include "nouveau_crtc.h"
36 #include "nouveau_fb.h"
37 #include "nouveau_connector.h"
38 #include "nv50_display.h"
39
40 static void
41 nv50_crtc_lut_load(struct drm_crtc *crtc)
42 {
43 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
44 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
45 int i;
46
47 NV_DEBUG_KMS(crtc->dev, "\n");
48
49 for (i = 0; i < 256; i++) {
50 writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
51 writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
52 writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
53 }
54
55 if (nv_crtc->lut.depth == 30) {
56 writew(nv_crtc->lut.r[i - 1] >> 2, lut + 8*i + 0);
57 writew(nv_crtc->lut.g[i - 1] >> 2, lut + 8*i + 2);
58 writew(nv_crtc->lut.b[i - 1] >> 2, lut + 8*i + 4);
59 }
60 }
61
62 int
63 nv50_crtc_blank(struct nouveau_crtc *nv_crtc, bool blanked)
64 {
65 struct drm_device *dev = nv_crtc->base.dev;
66 struct drm_nouveau_private *dev_priv = dev->dev_private;
67 struct nouveau_channel *evo = nv50_display(dev)->master;
68 int index = nv_crtc->index, ret;
69
70 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
71 NV_DEBUG_KMS(dev, "%s\n", blanked ? "blanked" : "unblanked");
72
73 if (blanked) {
74 nv_crtc->cursor.hide(nv_crtc, false);
75
76 ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 7 : 5);
77 if (ret) {
78 NV_ERROR(dev, "no space while blanking crtc\n");
79 return ret;
80 }
81 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
82 OUT_RING(evo, NV50_EVO_CRTC_CLUT_MODE_BLANK);
83 OUT_RING(evo, 0);
84 if (dev_priv->chipset != 0x50) {
85 BEGIN_NV04(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
86 OUT_RING(evo, NV84_EVO_CRTC_CLUT_DMA_HANDLE_NONE);
87 }
88
89 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
90 OUT_RING(evo, NV50_EVO_CRTC_FB_DMA_HANDLE_NONE);
91 } else {
92 if (nv_crtc->cursor.visible)
93 nv_crtc->cursor.show(nv_crtc, false);
94 else
95 nv_crtc->cursor.hide(nv_crtc, false);
96
97 ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 10 : 8);
98 if (ret) {
99 NV_ERROR(dev, "no space while unblanking crtc\n");
100 return ret;
101 }
102 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
103 OUT_RING(evo, nv_crtc->lut.depth == 8 ?
104 NV50_EVO_CRTC_CLUT_MODE_OFF :
105 NV50_EVO_CRTC_CLUT_MODE_ON);
106 OUT_RING(evo, nv_crtc->lut.nvbo->bo.offset >> 8);
107 if (dev_priv->chipset != 0x50) {
108 BEGIN_NV04(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
109 OUT_RING(evo, NvEvoVRAM);
110 }
111
112 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(index, FB_OFFSET), 2);
113 OUT_RING(evo, nv_crtc->fb.offset >> 8);
114 OUT_RING(evo, 0);
115 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
116 if (dev_priv->chipset != 0x50)
117 if (nv_crtc->fb.tile_flags == 0x7a00 ||
118 nv_crtc->fb.tile_flags == 0xfe00)
119 OUT_RING(evo, NvEvoFB32);
120 else
121 if (nv_crtc->fb.tile_flags == 0x7000)
122 OUT_RING(evo, NvEvoFB16);
123 else
124 OUT_RING(evo, NvEvoVRAM_LP);
125 else
126 OUT_RING(evo, NvEvoVRAM_LP);
127 }
128
129 nv_crtc->fb.blanked = blanked;
130 return 0;
131 }
132
133 static int
134 nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
135 {
136 struct nouveau_channel *evo = nv50_display(nv_crtc->base.dev)->master;
137 struct nouveau_connector *nv_connector;
138 struct drm_connector *connector;
139 int head = nv_crtc->index, ret;
140 u32 mode = 0x00;
141
142 nv_connector = nouveau_crtc_connector_get(nv_crtc);
143 connector = &nv_connector->base;
144 if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
145 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
146 mode = DITHERING_MODE_DYNAMIC2X2;
147 } else {
148 mode = nv_connector->dithering_mode;
149 }
150
151 if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
152 if (connector->display_info.bpc >= 8)
153 mode |= DITHERING_DEPTH_8BPC;
154 } else {
155 mode |= nv_connector->dithering_depth;
156 }
157
158 ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
159 if (ret == 0) {
160 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(head, DITHER_CTRL), 1);
161 OUT_RING (evo, mode);
162 if (update) {
163 BEGIN_NV04(evo, 0, NV50_EVO_UPDATE, 1);
164 OUT_RING (evo, 0);
165 FIRE_RING (evo);
166 }
167 }
168
169 return ret;
170 }
171
172 static int
173 nv50_crtc_set_color_vibrance(struct nouveau_crtc *nv_crtc, bool update)
174 {
175 struct drm_device *dev = nv_crtc->base.dev;
176 struct nouveau_channel *evo = nv50_display(dev)->master;
177 int ret;
178 int adj;
179 u32 hue, vib;
180
181 NV_DEBUG_KMS(dev, "vibrance = %i, hue = %i\n",
182 nv_crtc->color_vibrance, nv_crtc->vibrant_hue);
183
184 ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
185 if (ret) {
186 NV_ERROR(dev, "no space while setting color vibrance\n");
187 return ret;
188 }
189
190 adj = (nv_crtc->color_vibrance > 0) ? 50 : 0;
191 vib = ((nv_crtc->color_vibrance * 2047 + adj) / 100) & 0xfff;
192
193 hue = ((nv_crtc->vibrant_hue * 2047) / 100) & 0xfff;
194
195 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(nv_crtc->index, COLOR_CTRL), 1);
196 OUT_RING (evo, (hue << 20) | (vib << 8));
197
198 if (update) {
199 BEGIN_NV04(evo, 0, NV50_EVO_UPDATE, 1);
200 OUT_RING (evo, 0);
201 FIRE_RING (evo);
202 }
203
204 return 0;
205 }
206
207 struct nouveau_connector *
208 nouveau_crtc_connector_get(struct nouveau_crtc *nv_crtc)
209 {
210 struct drm_device *dev = nv_crtc->base.dev;
211 struct drm_connector *connector;
212 struct drm_crtc *crtc = to_drm_crtc(nv_crtc);
213
214 /* The safest approach is to find an encoder with the right crtc, that
215 * is also linked to a connector. */
216 list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
217 if (connector->encoder)
218 if (connector->encoder->crtc == crtc)
219 return nouveau_connector(connector);
220 }
221
222 return NULL;
223 }
224
225 static int
226 nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
227 {
228 struct nouveau_connector *nv_connector;
229 struct drm_crtc *crtc = &nv_crtc->base;
230 struct drm_device *dev = crtc->dev;
231 struct nouveau_channel *evo = nv50_display(dev)->master;
232 struct drm_display_mode *umode = &crtc->mode;
233 struct drm_display_mode *omode;
234 int scaling_mode, ret;
235 u32 ctrl = 0, oX, oY;
236
237 NV_DEBUG_KMS(dev, "\n");
238
239 nv_connector = nouveau_crtc_connector_get(nv_crtc);
240 if (!nv_connector || !nv_connector->native_mode) {
241 NV_ERROR(dev, "no native mode, forcing panel scaling\n");
242 scaling_mode = DRM_MODE_SCALE_NONE;
243 } else {
244 scaling_mode = nv_connector->scaling_mode;
245 }
246
247 /* start off at the resolution we programmed the crtc for, this
248 * effectively handles NONE/FULL scaling
249 */
250 if (scaling_mode != DRM_MODE_SCALE_NONE)
251 omode = nv_connector->native_mode;
252 else
253 omode = umode;
254
255 oX = omode->hdisplay;
256 oY = omode->vdisplay;
257 if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
258 oY *= 2;
259
260 /* add overscan compensation if necessary, will keep the aspect
261 * ratio the same as the backend mode unless overridden by the
262 * user setting both hborder and vborder properties.
263 */
264 if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
265 (nv_connector->underscan == UNDERSCAN_AUTO &&
266 nv_connector->edid &&
267 drm_detect_hdmi_monitor(nv_connector->edid)))) {
268 u32 bX = nv_connector->underscan_hborder;
269 u32 bY = nv_connector->underscan_vborder;
270 u32 aspect = (oY << 19) / oX;
271
272 if (bX) {
273 oX -= (bX * 2);
274 if (bY) oY -= (bY * 2);
275 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
276 } else {
277 oX -= (oX >> 4) + 32;
278 if (bY) oY -= (bY * 2);
279 else oY = ((oX * aspect) + (aspect / 2)) >> 19;
280 }
281 }
282
283 /* handle CENTER/ASPECT scaling, taking into account the areas
284 * removed already for overscan compensation
285 */
286 switch (scaling_mode) {
287 case DRM_MODE_SCALE_CENTER:
288 oX = min((u32)umode->hdisplay, oX);
289 oY = min((u32)umode->vdisplay, oY);
290 /* fall-through */
291 case DRM_MODE_SCALE_ASPECT:
292 if (oY < oX) {
293 u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
294 oX = ((oY * aspect) + (aspect / 2)) >> 19;
295 } else {
296 u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
297 oY = ((oX * aspect) + (aspect / 2)) >> 19;
298 }
299 break;
300 default:
301 break;
302 }
303
304 if (umode->hdisplay != oX || umode->vdisplay != oY ||
305 umode->flags & DRM_MODE_FLAG_INTERLACE ||
306 umode->flags & DRM_MODE_FLAG_DBLSCAN)
307 ctrl |= NV50_EVO_CRTC_SCALE_CTRL_ACTIVE;
308
309 ret = RING_SPACE(evo, 5);
310 if (ret)
311 return ret;
312
313 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CTRL), 1);
314 OUT_RING (evo, ctrl);
315 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_RES1), 2);
316 OUT_RING (evo, oY << 16 | oX);
317 OUT_RING (evo, oY << 16 | oX);
318
319 if (update) {
320 nv50_display_flip_stop(crtc);
321 nv50_display_sync(dev);
322 nv50_display_flip_next(crtc, crtc->fb, NULL);
323 }
324
325 return 0;
326 }
327
328 int
329 nv50_crtc_set_clock(struct drm_device *dev, int head, int pclk)
330 {
331 struct drm_nouveau_private *dev_priv = dev->dev_private;
332 struct pll_lims pll;
333 uint32_t reg1, reg2;
334 int ret, N1, M1, N2, M2, P;
335
336 ret = get_pll_limits(dev, PLL_VPLL0 + head, &pll);
337 if (ret)
338 return ret;
339
340 if (pll.vco2.maxfreq) {
341 ret = nv50_calc_pll(dev, &pll, pclk, &N1, &M1, &N2, &M2, &P);
342 if (ret <= 0)
343 return 0;
344
345 NV_DEBUG(dev, "pclk %d out %d NM1 %d %d NM2 %d %d P %d\n",
346 pclk, ret, N1, M1, N2, M2, P);
347
348 reg1 = nv_rd32(dev, pll.reg + 4) & 0xff00ff00;
349 reg2 = nv_rd32(dev, pll.reg + 8) & 0x8000ff00;
350 nv_wr32(dev, pll.reg + 0, 0x10000611);
351 nv_wr32(dev, pll.reg + 4, reg1 | (M1 << 16) | N1);
352 nv_wr32(dev, pll.reg + 8, reg2 | (P << 28) | (M2 << 16) | N2);
353 } else
354 if (dev_priv->chipset < NV_C0) {
355 ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
356 if (ret <= 0)
357 return 0;
358
359 NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
360 pclk, ret, N1, N2, M1, P);
361
362 reg1 = nv_rd32(dev, pll.reg + 4) & 0xffc00000;
363 nv_wr32(dev, pll.reg + 0, 0x50000610);
364 nv_wr32(dev, pll.reg + 4, reg1 | (P << 16) | (M1 << 8) | N1);
365 nv_wr32(dev, pll.reg + 8, N2);
366 } else {
367 ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
368 if (ret <= 0)
369 return 0;
370
371 NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
372 pclk, ret, N1, N2, M1, P);
373
374 nv_mask(dev, pll.reg + 0x0c, 0x00000000, 0x00000100);
375 nv_wr32(dev, pll.reg + 0x04, (P << 16) | (N1 << 8) | M1);
376 nv_wr32(dev, pll.reg + 0x10, N2 << 16);
377 }
378
379 return 0;
380 }
381
382 static void
383 nv50_crtc_destroy(struct drm_crtc *crtc)
384 {
385 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
386
387 NV_DEBUG_KMS(crtc->dev, "\n");
388
389 nouveau_bo_unmap(nv_crtc->lut.nvbo);
390 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
391 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
392 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
393 drm_crtc_cleanup(&nv_crtc->base);
394 kfree(nv_crtc);
395 }
396
397 int
398 nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
399 uint32_t buffer_handle, uint32_t width, uint32_t height)
400 {
401 struct drm_device *dev = crtc->dev;
402 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
403 struct nouveau_bo *cursor = NULL;
404 struct drm_gem_object *gem;
405 int ret = 0, i;
406
407 if (!buffer_handle) {
408 nv_crtc->cursor.hide(nv_crtc, true);
409 return 0;
410 }
411
412 if (width != 64 || height != 64)
413 return -EINVAL;
414
415 gem = drm_gem_object_lookup(dev, file_priv, buffer_handle);
416 if (!gem)
417 return -ENOENT;
418 cursor = nouveau_gem_object(gem);
419
420 ret = nouveau_bo_map(cursor);
421 if (ret)
422 goto out;
423
424 /* The simple will do for now. */
425 for (i = 0; i < 64 * 64; i++)
426 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, nouveau_bo_rd32(cursor, i));
427
428 nouveau_bo_unmap(cursor);
429
430 nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset);
431 nv_crtc->cursor.show(nv_crtc, true);
432
433 out:
434 drm_gem_object_unreference_unlocked(gem);
435 return ret;
436 }
437
438 int
439 nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
440 {
441 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
442
443 nv_crtc->cursor.set_pos(nv_crtc, x, y);
444 return 0;
445 }
446
447 static void
448 nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
449 uint32_t start, uint32_t size)
450 {
451 int end = (start + size > 256) ? 256 : start + size, i;
452 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
453
454 for (i = start; i < end; i++) {
455 nv_crtc->lut.r[i] = r[i];
456 nv_crtc->lut.g[i] = g[i];
457 nv_crtc->lut.b[i] = b[i];
458 }
459
460 /* We need to know the depth before we upload, but it's possible to
461 * get called before a framebuffer is bound. If this is the case,
462 * mark the lut values as dirty by setting depth==0, and it'll be
463 * uploaded on the first mode_set_base()
464 */
465 if (!nv_crtc->base.fb) {
466 nv_crtc->lut.depth = 0;
467 return;
468 }
469
470 nv50_crtc_lut_load(crtc);
471 }
472
473 static void
474 nv50_crtc_save(struct drm_crtc *crtc)
475 {
476 NV_ERROR(crtc->dev, "!!\n");
477 }
478
479 static void
480 nv50_crtc_restore(struct drm_crtc *crtc)
481 {
482 NV_ERROR(crtc->dev, "!!\n");
483 }
484
485 static const struct drm_crtc_funcs nv50_crtc_funcs = {
486 .save = nv50_crtc_save,
487 .restore = nv50_crtc_restore,
488 .cursor_set = nv50_crtc_cursor_set,
489 .cursor_move = nv50_crtc_cursor_move,
490 .gamma_set = nv50_crtc_gamma_set,
491 .set_config = drm_crtc_helper_set_config,
492 .page_flip = nouveau_crtc_page_flip,
493 .destroy = nv50_crtc_destroy,
494 };
495
496 static void
497 nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
498 {
499 }
500
501 static void
502 nv50_crtc_prepare(struct drm_crtc *crtc)
503 {
504 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
505 struct drm_device *dev = crtc->dev;
506
507 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
508
509 nv50_display_flip_stop(crtc);
510 drm_vblank_pre_modeset(dev, nv_crtc->index);
511 nv50_crtc_blank(nv_crtc, true);
512 }
513
514 static void
515 nv50_crtc_commit(struct drm_crtc *crtc)
516 {
517 struct drm_device *dev = crtc->dev;
518 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
519
520 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
521
522 nv50_crtc_blank(nv_crtc, false);
523 drm_vblank_post_modeset(dev, nv_crtc->index);
524 nv50_display_sync(dev);
525 nv50_display_flip_next(crtc, crtc->fb, NULL);
526 }
527
528 static bool
529 nv50_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
530 struct drm_display_mode *adjusted_mode)
531 {
532 return true;
533 }
534
535 static int
536 nv50_crtc_do_mode_set_base(struct drm_crtc *crtc,
537 struct drm_framebuffer *passed_fb,
538 int x, int y, bool atomic)
539 {
540 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
541 struct drm_device *dev = nv_crtc->base.dev;
542 struct drm_nouveau_private *dev_priv = dev->dev_private;
543 struct nouveau_channel *evo = nv50_display(dev)->master;
544 struct drm_framebuffer *drm_fb;
545 struct nouveau_framebuffer *fb;
546 int ret;
547
548 NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
549
550 /* no fb bound */
551 if (!atomic && !crtc->fb) {
552 NV_DEBUG_KMS(dev, "No FB bound\n");
553 return 0;
554 }
555
556 /* If atomic, we want to switch to the fb we were passed, so
557 * now we update pointers to do that. (We don't pin; just
558 * assume we're already pinned and update the base address.)
559 */
560 if (atomic) {
561 drm_fb = passed_fb;
562 fb = nouveau_framebuffer(passed_fb);
563 } else {
564 drm_fb = crtc->fb;
565 fb = nouveau_framebuffer(crtc->fb);
566 /* If not atomic, we can go ahead and pin, and unpin the
567 * old fb we were passed.
568 */
569 ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM);
570 if (ret)
571 return ret;
572
573 if (passed_fb) {
574 struct nouveau_framebuffer *ofb = nouveau_framebuffer(passed_fb);
575 nouveau_bo_unpin(ofb->nvbo);
576 }
577 }
578
579 nv_crtc->fb.offset = fb->nvbo->bo.offset;
580 nv_crtc->fb.tile_flags = nouveau_bo_tile_layout(fb->nvbo);
581 nv_crtc->fb.cpp = drm_fb->bits_per_pixel / 8;
582 if (!nv_crtc->fb.blanked && dev_priv->chipset != 0x50) {
583 ret = RING_SPACE(evo, 2);
584 if (ret)
585 return ret;
586
587 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_DMA), 1);
588 OUT_RING (evo, fb->r_dma);
589 }
590
591 ret = RING_SPACE(evo, 12);
592 if (ret)
593 return ret;
594
595 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_OFFSET), 5);
596 OUT_RING (evo, nv_crtc->fb.offset >> 8);
597 OUT_RING (evo, 0);
598 OUT_RING (evo, (drm_fb->height << 16) | drm_fb->width);
599 OUT_RING (evo, fb->r_pitch);
600 OUT_RING (evo, fb->r_format);
601
602 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLUT_MODE), 1);
603 OUT_RING (evo, fb->base.depth == 8 ?
604 NV50_EVO_CRTC_CLUT_MODE_OFF : NV50_EVO_CRTC_CLUT_MODE_ON);
605
606 BEGIN_NV04(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_POS), 1);
607 OUT_RING (evo, (y << 16) | x);
608
609 if (nv_crtc->lut.depth != fb->base.depth) {
610 nv_crtc->lut.depth = fb->base.depth;
611 nv50_crtc_lut_load(crtc);
612 }
613
614 return 0;
615 }
616
617 static int
618 nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
619 struct drm_display_mode *mode, int x, int y,
620 struct drm_framebuffer *old_fb)
621 {
622 struct drm_device *dev = crtc->dev;
623 struct nouveau_channel *evo = nv50_display(dev)->master;
624 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
625 u32 head = nv_crtc->index * 0x400;
626 u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
627 u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
628 u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
629 u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
630 u32 vblan2e = 0, vblan2s = 1;
631 int ret;
632
633 /* hw timing description looks like this:
634 *
635 * <sync> <back porch> <---------display---------> <front porch>
636 * ______
637 * |____________|---------------------------|____________|
638 *
639 * ^ synce ^ blanke ^ blanks ^ active
640 *
641 * interlaced modes also have 2 additional values pointing at the end
642 * and start of the next field's blanking period.
643 */
644
645 hactive = mode->htotal;
646 hsynce = mode->hsync_end - mode->hsync_start - 1;
647 hbackp = mode->htotal - mode->hsync_end;
648 hblanke = hsynce + hbackp;
649 hfrontp = mode->hsync_start - mode->hdisplay;
650 hblanks = mode->htotal - hfrontp - 1;
651
652 vactive = mode->vtotal * vscan / ilace;
653 vsynce = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
654 vbackp = (mode->vtotal - mode->vsync_end) * vscan / ilace;
655 vblanke = vsynce + vbackp;
656 vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
657 vblanks = vactive - vfrontp - 1;
658 if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
659 vblan2e = vactive + vsynce + vbackp;
660 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
661 vactive = (vactive * 2) + 1;
662 }
663
664 ret = RING_SPACE(evo, 18);
665 if (ret == 0) {
666 BEGIN_NV04(evo, 0, 0x0804 + head, 2);
667 OUT_RING (evo, 0x00800000 | mode->clock);
668 OUT_RING (evo, (ilace == 2) ? 2 : 0);
669 BEGIN_NV04(evo, 0, 0x0810 + head, 6);
670 OUT_RING (evo, 0x00000000); /* border colour */
671 OUT_RING (evo, (vactive << 16) | hactive);
672 OUT_RING (evo, ( vsynce << 16) | hsynce);
673 OUT_RING (evo, (vblanke << 16) | hblanke);
674 OUT_RING (evo, (vblanks << 16) | hblanks);
675 OUT_RING (evo, (vblan2e << 16) | vblan2s);
676 BEGIN_NV04(evo, 0, 0x082c + head, 1);
677 OUT_RING (evo, 0x00000000);
678 BEGIN_NV04(evo, 0, 0x0900 + head, 1);
679 OUT_RING (evo, 0x00000311); /* makes sync channel work */
680 BEGIN_NV04(evo, 0, 0x08c8 + head, 1);
681 OUT_RING (evo, (umode->vdisplay << 16) | umode->hdisplay);
682 BEGIN_NV04(evo, 0, 0x08d4 + head, 1);
683 OUT_RING (evo, 0x00000000); /* screen position */
684 }
685
686 nv_crtc->set_dither(nv_crtc, false);
687 nv_crtc->set_scale(nv_crtc, false);
688 nv_crtc->set_color_vibrance(nv_crtc, false);
689
690 return nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
691 }
692
693 static int
694 nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
695 struct drm_framebuffer *old_fb)
696 {
697 int ret;
698
699 nv50_display_flip_stop(crtc);
700 ret = nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
701 if (ret)
702 return ret;
703
704 ret = nv50_display_sync(crtc->dev);
705 if (ret)
706 return ret;
707
708 return nv50_display_flip_next(crtc, crtc->fb, NULL);
709 }
710
711 static int
712 nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
713 struct drm_framebuffer *fb,
714 int x, int y, enum mode_set_atomic state)
715 {
716 int ret;
717
718 nv50_display_flip_stop(crtc);
719 ret = nv50_crtc_do_mode_set_base(crtc, fb, x, y, true);
720 if (ret)
721 return ret;
722
723 return nv50_display_sync(crtc->dev);
724 }
725
726 static const struct drm_crtc_helper_funcs nv50_crtc_helper_funcs = {
727 .dpms = nv50_crtc_dpms,
728 .prepare = nv50_crtc_prepare,
729 .commit = nv50_crtc_commit,
730 .mode_fixup = nv50_crtc_mode_fixup,
731 .mode_set = nv50_crtc_mode_set,
732 .mode_set_base = nv50_crtc_mode_set_base,
733 .mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
734 .load_lut = nv50_crtc_lut_load,
735 };
736
737 int
738 nv50_crtc_create(struct drm_device *dev, int index)
739 {
740 struct nouveau_crtc *nv_crtc = NULL;
741 int ret, i;
742
743 NV_DEBUG_KMS(dev, "\n");
744
745 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
746 if (!nv_crtc)
747 return -ENOMEM;
748
749 nv_crtc->index = index;
750 nv_crtc->set_dither = nv50_crtc_set_dither;
751 nv_crtc->set_scale = nv50_crtc_set_scale;
752 nv_crtc->set_color_vibrance = nv50_crtc_set_color_vibrance;
753 nv_crtc->color_vibrance = 50;
754 nv_crtc->vibrant_hue = 0;
755 nv_crtc->lut.depth = 0;
756 for (i = 0; i < 256; i++) {
757 nv_crtc->lut.r[i] = i << 8;
758 nv_crtc->lut.g[i] = i << 8;
759 nv_crtc->lut.b[i] = i << 8;
760 }
761
762 drm_crtc_init(dev, &nv_crtc->base, &nv50_crtc_funcs);
763 drm_crtc_helper_add(&nv_crtc->base, &nv50_crtc_helper_funcs);
764 drm_mode_crtc_set_gamma_size(&nv_crtc->base, 256);
765
766 ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
767 0, 0x0000, NULL, &nv_crtc->lut.nvbo);
768 if (!ret) {
769 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
770 if (!ret)
771 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
772 if (ret)
773 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
774 }
775
776 if (ret)
777 goto out;
778
779
780 ret = nouveau_bo_new(dev, 64*64*4, 0x100, TTM_PL_FLAG_VRAM,
781 0, 0x0000, NULL, &nv_crtc->cursor.nvbo);
782 if (!ret) {
783 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
784 if (!ret)
785 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
786 if (ret)
787 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
788 }
789
790 if (ret)
791 goto out;
792
793 nv50_cursor_init(nv_crtc);
794 out:
795 if (ret)
796 nv50_crtc_destroy(&nv_crtc->base);
797 return ret;
798 }