UAPI: (Scripted) Convert #include "..." to #include <path/...> in drivers/gpu/
[GitHub/mt8127/android_kernel_alcatel_ttab.git] / drivers / gpu / drm / nouveau / nv10_graph.c
1 /*
2 * Copyright 2007 Matthieu CASTET <castet.matthieu@free.fr>
3 * All Rights Reserved.
4 *
5 * Permission is hereby granted, free of charge, to any person obtaining a
6 * copy of this software and associated documentation files (the "Software"),
7 * to deal in the Software without restriction, including without limitation
8 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
9 * and/or sell copies of the Software, and to permit persons to whom the
10 * Software is furnished to do so, subject to the following conditions:
11 *
12 * The above copyright notice and this permission notice (including the next
13 * paragraph) shall be included in all copies or substantial portions of the
14 * Software.
15 *
16 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
17 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
18 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
19 * PRECISION INSIGHT AND/OR ITS SUPPLIERS BE LIABLE FOR ANY CLAIM, DAMAGES OR
20 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
21 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
22 * DEALINGS IN THE SOFTWARE.
23 */
24
25 #include <drm/drmP.h>
26 #include <drm/nouveau_drm.h>
27 #include "nouveau_drv.h"
28 #include "nouveau_util.h"
29
30 struct nv10_graph_engine {
31 struct nouveau_exec_engine base;
32 };
33
34 struct pipe_state {
35 uint32_t pipe_0x0000[0x040/4];
36 uint32_t pipe_0x0040[0x010/4];
37 uint32_t pipe_0x0200[0x0c0/4];
38 uint32_t pipe_0x4400[0x080/4];
39 uint32_t pipe_0x6400[0x3b0/4];
40 uint32_t pipe_0x6800[0x2f0/4];
41 uint32_t pipe_0x6c00[0x030/4];
42 uint32_t pipe_0x7000[0x130/4];
43 uint32_t pipe_0x7400[0x0c0/4];
44 uint32_t pipe_0x7800[0x0c0/4];
45 };
46
47 static int nv10_graph_ctx_regs[] = {
48 NV10_PGRAPH_CTX_SWITCH(0),
49 NV10_PGRAPH_CTX_SWITCH(1),
50 NV10_PGRAPH_CTX_SWITCH(2),
51 NV10_PGRAPH_CTX_SWITCH(3),
52 NV10_PGRAPH_CTX_SWITCH(4),
53 NV10_PGRAPH_CTX_CACHE(0, 0),
54 NV10_PGRAPH_CTX_CACHE(0, 1),
55 NV10_PGRAPH_CTX_CACHE(0, 2),
56 NV10_PGRAPH_CTX_CACHE(0, 3),
57 NV10_PGRAPH_CTX_CACHE(0, 4),
58 NV10_PGRAPH_CTX_CACHE(1, 0),
59 NV10_PGRAPH_CTX_CACHE(1, 1),
60 NV10_PGRAPH_CTX_CACHE(1, 2),
61 NV10_PGRAPH_CTX_CACHE(1, 3),
62 NV10_PGRAPH_CTX_CACHE(1, 4),
63 NV10_PGRAPH_CTX_CACHE(2, 0),
64 NV10_PGRAPH_CTX_CACHE(2, 1),
65 NV10_PGRAPH_CTX_CACHE(2, 2),
66 NV10_PGRAPH_CTX_CACHE(2, 3),
67 NV10_PGRAPH_CTX_CACHE(2, 4),
68 NV10_PGRAPH_CTX_CACHE(3, 0),
69 NV10_PGRAPH_CTX_CACHE(3, 1),
70 NV10_PGRAPH_CTX_CACHE(3, 2),
71 NV10_PGRAPH_CTX_CACHE(3, 3),
72 NV10_PGRAPH_CTX_CACHE(3, 4),
73 NV10_PGRAPH_CTX_CACHE(4, 0),
74 NV10_PGRAPH_CTX_CACHE(4, 1),
75 NV10_PGRAPH_CTX_CACHE(4, 2),
76 NV10_PGRAPH_CTX_CACHE(4, 3),
77 NV10_PGRAPH_CTX_CACHE(4, 4),
78 NV10_PGRAPH_CTX_CACHE(5, 0),
79 NV10_PGRAPH_CTX_CACHE(5, 1),
80 NV10_PGRAPH_CTX_CACHE(5, 2),
81 NV10_PGRAPH_CTX_CACHE(5, 3),
82 NV10_PGRAPH_CTX_CACHE(5, 4),
83 NV10_PGRAPH_CTX_CACHE(6, 0),
84 NV10_PGRAPH_CTX_CACHE(6, 1),
85 NV10_PGRAPH_CTX_CACHE(6, 2),
86 NV10_PGRAPH_CTX_CACHE(6, 3),
87 NV10_PGRAPH_CTX_CACHE(6, 4),
88 NV10_PGRAPH_CTX_CACHE(7, 0),
89 NV10_PGRAPH_CTX_CACHE(7, 1),
90 NV10_PGRAPH_CTX_CACHE(7, 2),
91 NV10_PGRAPH_CTX_CACHE(7, 3),
92 NV10_PGRAPH_CTX_CACHE(7, 4),
93 NV10_PGRAPH_CTX_USER,
94 NV04_PGRAPH_DMA_START_0,
95 NV04_PGRAPH_DMA_START_1,
96 NV04_PGRAPH_DMA_LENGTH,
97 NV04_PGRAPH_DMA_MISC,
98 NV10_PGRAPH_DMA_PITCH,
99 NV04_PGRAPH_BOFFSET0,
100 NV04_PGRAPH_BBASE0,
101 NV04_PGRAPH_BLIMIT0,
102 NV04_PGRAPH_BOFFSET1,
103 NV04_PGRAPH_BBASE1,
104 NV04_PGRAPH_BLIMIT1,
105 NV04_PGRAPH_BOFFSET2,
106 NV04_PGRAPH_BBASE2,
107 NV04_PGRAPH_BLIMIT2,
108 NV04_PGRAPH_BOFFSET3,
109 NV04_PGRAPH_BBASE3,
110 NV04_PGRAPH_BLIMIT3,
111 NV04_PGRAPH_BOFFSET4,
112 NV04_PGRAPH_BBASE4,
113 NV04_PGRAPH_BLIMIT4,
114 NV04_PGRAPH_BOFFSET5,
115 NV04_PGRAPH_BBASE5,
116 NV04_PGRAPH_BLIMIT5,
117 NV04_PGRAPH_BPITCH0,
118 NV04_PGRAPH_BPITCH1,
119 NV04_PGRAPH_BPITCH2,
120 NV04_PGRAPH_BPITCH3,
121 NV04_PGRAPH_BPITCH4,
122 NV10_PGRAPH_SURFACE,
123 NV10_PGRAPH_STATE,
124 NV04_PGRAPH_BSWIZZLE2,
125 NV04_PGRAPH_BSWIZZLE5,
126 NV04_PGRAPH_BPIXEL,
127 NV10_PGRAPH_NOTIFY,
128 NV04_PGRAPH_PATT_COLOR0,
129 NV04_PGRAPH_PATT_COLOR1,
130 NV04_PGRAPH_PATT_COLORRAM, /* 64 values from 0x400900 to 0x4009fc */
131 0x00400904,
132 0x00400908,
133 0x0040090c,
134 0x00400910,
135 0x00400914,
136 0x00400918,
137 0x0040091c,
138 0x00400920,
139 0x00400924,
140 0x00400928,
141 0x0040092c,
142 0x00400930,
143 0x00400934,
144 0x00400938,
145 0x0040093c,
146 0x00400940,
147 0x00400944,
148 0x00400948,
149 0x0040094c,
150 0x00400950,
151 0x00400954,
152 0x00400958,
153 0x0040095c,
154 0x00400960,
155 0x00400964,
156 0x00400968,
157 0x0040096c,
158 0x00400970,
159 0x00400974,
160 0x00400978,
161 0x0040097c,
162 0x00400980,
163 0x00400984,
164 0x00400988,
165 0x0040098c,
166 0x00400990,
167 0x00400994,
168 0x00400998,
169 0x0040099c,
170 0x004009a0,
171 0x004009a4,
172 0x004009a8,
173 0x004009ac,
174 0x004009b0,
175 0x004009b4,
176 0x004009b8,
177 0x004009bc,
178 0x004009c0,
179 0x004009c4,
180 0x004009c8,
181 0x004009cc,
182 0x004009d0,
183 0x004009d4,
184 0x004009d8,
185 0x004009dc,
186 0x004009e0,
187 0x004009e4,
188 0x004009e8,
189 0x004009ec,
190 0x004009f0,
191 0x004009f4,
192 0x004009f8,
193 0x004009fc,
194 NV04_PGRAPH_PATTERN, /* 2 values from 0x400808 to 0x40080c */
195 0x0040080c,
196 NV04_PGRAPH_PATTERN_SHAPE,
197 NV03_PGRAPH_MONO_COLOR0,
198 NV04_PGRAPH_ROP3,
199 NV04_PGRAPH_CHROMA,
200 NV04_PGRAPH_BETA_AND,
201 NV04_PGRAPH_BETA_PREMULT,
202 0x00400e70,
203 0x00400e74,
204 0x00400e78,
205 0x00400e7c,
206 0x00400e80,
207 0x00400e84,
208 0x00400e88,
209 0x00400e8c,
210 0x00400ea0,
211 0x00400ea4,
212 0x00400ea8,
213 0x00400e90,
214 0x00400e94,
215 0x00400e98,
216 0x00400e9c,
217 NV10_PGRAPH_WINDOWCLIP_HORIZONTAL, /* 8 values from 0x400f00-0x400f1c */
218 NV10_PGRAPH_WINDOWCLIP_VERTICAL, /* 8 values from 0x400f20-0x400f3c */
219 0x00400f04,
220 0x00400f24,
221 0x00400f08,
222 0x00400f28,
223 0x00400f0c,
224 0x00400f2c,
225 0x00400f10,
226 0x00400f30,
227 0x00400f14,
228 0x00400f34,
229 0x00400f18,
230 0x00400f38,
231 0x00400f1c,
232 0x00400f3c,
233 NV10_PGRAPH_XFMODE0,
234 NV10_PGRAPH_XFMODE1,
235 NV10_PGRAPH_GLOBALSTATE0,
236 NV10_PGRAPH_GLOBALSTATE1,
237 NV04_PGRAPH_STORED_FMT,
238 NV04_PGRAPH_SOURCE_COLOR,
239 NV03_PGRAPH_ABS_X_RAM, /* 32 values from 0x400400 to 0x40047c */
240 NV03_PGRAPH_ABS_Y_RAM, /* 32 values from 0x400480 to 0x4004fc */
241 0x00400404,
242 0x00400484,
243 0x00400408,
244 0x00400488,
245 0x0040040c,
246 0x0040048c,
247 0x00400410,
248 0x00400490,
249 0x00400414,
250 0x00400494,
251 0x00400418,
252 0x00400498,
253 0x0040041c,
254 0x0040049c,
255 0x00400420,
256 0x004004a0,
257 0x00400424,
258 0x004004a4,
259 0x00400428,
260 0x004004a8,
261 0x0040042c,
262 0x004004ac,
263 0x00400430,
264 0x004004b0,
265 0x00400434,
266 0x004004b4,
267 0x00400438,
268 0x004004b8,
269 0x0040043c,
270 0x004004bc,
271 0x00400440,
272 0x004004c0,
273 0x00400444,
274 0x004004c4,
275 0x00400448,
276 0x004004c8,
277 0x0040044c,
278 0x004004cc,
279 0x00400450,
280 0x004004d0,
281 0x00400454,
282 0x004004d4,
283 0x00400458,
284 0x004004d8,
285 0x0040045c,
286 0x004004dc,
287 0x00400460,
288 0x004004e0,
289 0x00400464,
290 0x004004e4,
291 0x00400468,
292 0x004004e8,
293 0x0040046c,
294 0x004004ec,
295 0x00400470,
296 0x004004f0,
297 0x00400474,
298 0x004004f4,
299 0x00400478,
300 0x004004f8,
301 0x0040047c,
302 0x004004fc,
303 NV03_PGRAPH_ABS_UCLIP_XMIN,
304 NV03_PGRAPH_ABS_UCLIP_XMAX,
305 NV03_PGRAPH_ABS_UCLIP_YMIN,
306 NV03_PGRAPH_ABS_UCLIP_YMAX,
307 0x00400550,
308 0x00400558,
309 0x00400554,
310 0x0040055c,
311 NV03_PGRAPH_ABS_UCLIPA_XMIN,
312 NV03_PGRAPH_ABS_UCLIPA_XMAX,
313 NV03_PGRAPH_ABS_UCLIPA_YMIN,
314 NV03_PGRAPH_ABS_UCLIPA_YMAX,
315 NV03_PGRAPH_ABS_ICLIP_XMAX,
316 NV03_PGRAPH_ABS_ICLIP_YMAX,
317 NV03_PGRAPH_XY_LOGIC_MISC0,
318 NV03_PGRAPH_XY_LOGIC_MISC1,
319 NV03_PGRAPH_XY_LOGIC_MISC2,
320 NV03_PGRAPH_XY_LOGIC_MISC3,
321 NV03_PGRAPH_CLIPX_0,
322 NV03_PGRAPH_CLIPX_1,
323 NV03_PGRAPH_CLIPY_0,
324 NV03_PGRAPH_CLIPY_1,
325 NV10_PGRAPH_COMBINER0_IN_ALPHA,
326 NV10_PGRAPH_COMBINER1_IN_ALPHA,
327 NV10_PGRAPH_COMBINER0_IN_RGB,
328 NV10_PGRAPH_COMBINER1_IN_RGB,
329 NV10_PGRAPH_COMBINER_COLOR0,
330 NV10_PGRAPH_COMBINER_COLOR1,
331 NV10_PGRAPH_COMBINER0_OUT_ALPHA,
332 NV10_PGRAPH_COMBINER1_OUT_ALPHA,
333 NV10_PGRAPH_COMBINER0_OUT_RGB,
334 NV10_PGRAPH_COMBINER1_OUT_RGB,
335 NV10_PGRAPH_COMBINER_FINAL0,
336 NV10_PGRAPH_COMBINER_FINAL1,
337 0x00400e00,
338 0x00400e04,
339 0x00400e08,
340 0x00400e0c,
341 0x00400e10,
342 0x00400e14,
343 0x00400e18,
344 0x00400e1c,
345 0x00400e20,
346 0x00400e24,
347 0x00400e28,
348 0x00400e2c,
349 0x00400e30,
350 0x00400e34,
351 0x00400e38,
352 0x00400e3c,
353 NV04_PGRAPH_PASSTHRU_0,
354 NV04_PGRAPH_PASSTHRU_1,
355 NV04_PGRAPH_PASSTHRU_2,
356 NV10_PGRAPH_DIMX_TEXTURE,
357 NV10_PGRAPH_WDIMX_TEXTURE,
358 NV10_PGRAPH_DVD_COLORFMT,
359 NV10_PGRAPH_SCALED_FORMAT,
360 NV04_PGRAPH_MISC24_0,
361 NV04_PGRAPH_MISC24_1,
362 NV04_PGRAPH_MISC24_2,
363 NV03_PGRAPH_X_MISC,
364 NV03_PGRAPH_Y_MISC,
365 NV04_PGRAPH_VALID1,
366 NV04_PGRAPH_VALID2,
367 };
368
369 static int nv17_graph_ctx_regs[] = {
370 NV10_PGRAPH_DEBUG_4,
371 0x004006b0,
372 0x00400eac,
373 0x00400eb0,
374 0x00400eb4,
375 0x00400eb8,
376 0x00400ebc,
377 0x00400ec0,
378 0x00400ec4,
379 0x00400ec8,
380 0x00400ecc,
381 0x00400ed0,
382 0x00400ed4,
383 0x00400ed8,
384 0x00400edc,
385 0x00400ee0,
386 0x00400a00,
387 0x00400a04,
388 };
389
390 struct graph_state {
391 int nv10[ARRAY_SIZE(nv10_graph_ctx_regs)];
392 int nv17[ARRAY_SIZE(nv17_graph_ctx_regs)];
393 struct pipe_state pipe_state;
394 uint32_t lma_window[4];
395 };
396
397 #define PIPE_SAVE(dev, state, addr) \
398 do { \
399 int __i; \
400 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
401 for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
402 state[__i] = nv_rd32(dev, NV10_PGRAPH_PIPE_DATA); \
403 } while (0)
404
405 #define PIPE_RESTORE(dev, state, addr) \
406 do { \
407 int __i; \
408 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, addr); \
409 for (__i = 0; __i < ARRAY_SIZE(state); __i++) \
410 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, state[__i]); \
411 } while (0)
412
413 static void nv10_graph_save_pipe(struct nouveau_channel *chan)
414 {
415 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
416 struct pipe_state *pipe = &pgraph_ctx->pipe_state;
417 struct drm_device *dev = chan->dev;
418
419 PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
420 PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
421 PIPE_SAVE(dev, pipe->pipe_0x6400, 0x6400);
422 PIPE_SAVE(dev, pipe->pipe_0x6800, 0x6800);
423 PIPE_SAVE(dev, pipe->pipe_0x6c00, 0x6c00);
424 PIPE_SAVE(dev, pipe->pipe_0x7000, 0x7000);
425 PIPE_SAVE(dev, pipe->pipe_0x7400, 0x7400);
426 PIPE_SAVE(dev, pipe->pipe_0x7800, 0x7800);
427 PIPE_SAVE(dev, pipe->pipe_0x0040, 0x0040);
428 PIPE_SAVE(dev, pipe->pipe_0x0000, 0x0000);
429 }
430
431 static void nv10_graph_load_pipe(struct nouveau_channel *chan)
432 {
433 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
434 struct pipe_state *pipe = &pgraph_ctx->pipe_state;
435 struct drm_device *dev = chan->dev;
436 uint32_t xfmode0, xfmode1;
437 int i;
438
439 nouveau_wait_for_idle(dev);
440 /* XXX check haiku comments */
441 xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
442 xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
443 nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
444 nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
445 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
446 for (i = 0; i < 4; i++)
447 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
448 for (i = 0; i < 4; i++)
449 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
450
451 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
452 for (i = 0; i < 3; i++)
453 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
454
455 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
456 for (i = 0; i < 3; i++)
457 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
458
459 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
460 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
461
462
463 PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
464 nouveau_wait_for_idle(dev);
465
466 /* restore XFMODE */
467 nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
468 nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
469 PIPE_RESTORE(dev, pipe->pipe_0x6400, 0x6400);
470 PIPE_RESTORE(dev, pipe->pipe_0x6800, 0x6800);
471 PIPE_RESTORE(dev, pipe->pipe_0x6c00, 0x6c00);
472 PIPE_RESTORE(dev, pipe->pipe_0x7000, 0x7000);
473 PIPE_RESTORE(dev, pipe->pipe_0x7400, 0x7400);
474 PIPE_RESTORE(dev, pipe->pipe_0x7800, 0x7800);
475 PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
476 PIPE_RESTORE(dev, pipe->pipe_0x0000, 0x0000);
477 PIPE_RESTORE(dev, pipe->pipe_0x0040, 0x0040);
478 nouveau_wait_for_idle(dev);
479 }
480
481 static void nv10_graph_create_pipe(struct nouveau_channel *chan)
482 {
483 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
484 struct pipe_state *fifo_pipe_state = &pgraph_ctx->pipe_state;
485 struct drm_device *dev = chan->dev;
486 uint32_t *fifo_pipe_state_addr;
487 int i;
488 #define PIPE_INIT(addr) \
489 do { \
490 fifo_pipe_state_addr = fifo_pipe_state->pipe_##addr; \
491 } while (0)
492 #define PIPE_INIT_END(addr) \
493 do { \
494 uint32_t *__end_addr = fifo_pipe_state->pipe_##addr + \
495 ARRAY_SIZE(fifo_pipe_state->pipe_##addr); \
496 if (fifo_pipe_state_addr != __end_addr) \
497 NV_ERROR(dev, "incomplete pipe init for 0x%x : %p/%p\n", \
498 addr, fifo_pipe_state_addr, __end_addr); \
499 } while (0)
500 #define NV_WRITE_PIPE_INIT(value) *(fifo_pipe_state_addr++) = value
501
502 PIPE_INIT(0x0200);
503 for (i = 0; i < 48; i++)
504 NV_WRITE_PIPE_INIT(0x00000000);
505 PIPE_INIT_END(0x0200);
506
507 PIPE_INIT(0x6400);
508 for (i = 0; i < 211; i++)
509 NV_WRITE_PIPE_INIT(0x00000000);
510 NV_WRITE_PIPE_INIT(0x3f800000);
511 NV_WRITE_PIPE_INIT(0x40000000);
512 NV_WRITE_PIPE_INIT(0x40000000);
513 NV_WRITE_PIPE_INIT(0x40000000);
514 NV_WRITE_PIPE_INIT(0x40000000);
515 NV_WRITE_PIPE_INIT(0x00000000);
516 NV_WRITE_PIPE_INIT(0x00000000);
517 NV_WRITE_PIPE_INIT(0x3f800000);
518 NV_WRITE_PIPE_INIT(0x00000000);
519 NV_WRITE_PIPE_INIT(0x3f000000);
520 NV_WRITE_PIPE_INIT(0x3f000000);
521 NV_WRITE_PIPE_INIT(0x00000000);
522 NV_WRITE_PIPE_INIT(0x00000000);
523 NV_WRITE_PIPE_INIT(0x00000000);
524 NV_WRITE_PIPE_INIT(0x00000000);
525 NV_WRITE_PIPE_INIT(0x3f800000);
526 NV_WRITE_PIPE_INIT(0x00000000);
527 NV_WRITE_PIPE_INIT(0x00000000);
528 NV_WRITE_PIPE_INIT(0x00000000);
529 NV_WRITE_PIPE_INIT(0x00000000);
530 NV_WRITE_PIPE_INIT(0x00000000);
531 NV_WRITE_PIPE_INIT(0x3f800000);
532 NV_WRITE_PIPE_INIT(0x3f800000);
533 NV_WRITE_PIPE_INIT(0x3f800000);
534 NV_WRITE_PIPE_INIT(0x3f800000);
535 PIPE_INIT_END(0x6400);
536
537 PIPE_INIT(0x6800);
538 for (i = 0; i < 162; i++)
539 NV_WRITE_PIPE_INIT(0x00000000);
540 NV_WRITE_PIPE_INIT(0x3f800000);
541 for (i = 0; i < 25; i++)
542 NV_WRITE_PIPE_INIT(0x00000000);
543 PIPE_INIT_END(0x6800);
544
545 PIPE_INIT(0x6c00);
546 NV_WRITE_PIPE_INIT(0x00000000);
547 NV_WRITE_PIPE_INIT(0x00000000);
548 NV_WRITE_PIPE_INIT(0x00000000);
549 NV_WRITE_PIPE_INIT(0x00000000);
550 NV_WRITE_PIPE_INIT(0xbf800000);
551 NV_WRITE_PIPE_INIT(0x00000000);
552 NV_WRITE_PIPE_INIT(0x00000000);
553 NV_WRITE_PIPE_INIT(0x00000000);
554 NV_WRITE_PIPE_INIT(0x00000000);
555 NV_WRITE_PIPE_INIT(0x00000000);
556 NV_WRITE_PIPE_INIT(0x00000000);
557 NV_WRITE_PIPE_INIT(0x00000000);
558 PIPE_INIT_END(0x6c00);
559
560 PIPE_INIT(0x7000);
561 NV_WRITE_PIPE_INIT(0x00000000);
562 NV_WRITE_PIPE_INIT(0x00000000);
563 NV_WRITE_PIPE_INIT(0x00000000);
564 NV_WRITE_PIPE_INIT(0x00000000);
565 NV_WRITE_PIPE_INIT(0x00000000);
566 NV_WRITE_PIPE_INIT(0x00000000);
567 NV_WRITE_PIPE_INIT(0x00000000);
568 NV_WRITE_PIPE_INIT(0x00000000);
569 NV_WRITE_PIPE_INIT(0x00000000);
570 NV_WRITE_PIPE_INIT(0x00000000);
571 NV_WRITE_PIPE_INIT(0x00000000);
572 NV_WRITE_PIPE_INIT(0x00000000);
573 NV_WRITE_PIPE_INIT(0x7149f2ca);
574 NV_WRITE_PIPE_INIT(0x00000000);
575 NV_WRITE_PIPE_INIT(0x00000000);
576 NV_WRITE_PIPE_INIT(0x00000000);
577 NV_WRITE_PIPE_INIT(0x7149f2ca);
578 NV_WRITE_PIPE_INIT(0x00000000);
579 NV_WRITE_PIPE_INIT(0x00000000);
580 NV_WRITE_PIPE_INIT(0x00000000);
581 NV_WRITE_PIPE_INIT(0x7149f2ca);
582 NV_WRITE_PIPE_INIT(0x00000000);
583 NV_WRITE_PIPE_INIT(0x00000000);
584 NV_WRITE_PIPE_INIT(0x00000000);
585 NV_WRITE_PIPE_INIT(0x7149f2ca);
586 NV_WRITE_PIPE_INIT(0x00000000);
587 NV_WRITE_PIPE_INIT(0x00000000);
588 NV_WRITE_PIPE_INIT(0x00000000);
589 NV_WRITE_PIPE_INIT(0x7149f2ca);
590 NV_WRITE_PIPE_INIT(0x00000000);
591 NV_WRITE_PIPE_INIT(0x00000000);
592 NV_WRITE_PIPE_INIT(0x00000000);
593 NV_WRITE_PIPE_INIT(0x7149f2ca);
594 NV_WRITE_PIPE_INIT(0x00000000);
595 NV_WRITE_PIPE_INIT(0x00000000);
596 NV_WRITE_PIPE_INIT(0x00000000);
597 NV_WRITE_PIPE_INIT(0x7149f2ca);
598 NV_WRITE_PIPE_INIT(0x00000000);
599 NV_WRITE_PIPE_INIT(0x00000000);
600 NV_WRITE_PIPE_INIT(0x00000000);
601 NV_WRITE_PIPE_INIT(0x7149f2ca);
602 for (i = 0; i < 35; i++)
603 NV_WRITE_PIPE_INIT(0x00000000);
604 PIPE_INIT_END(0x7000);
605
606 PIPE_INIT(0x7400);
607 for (i = 0; i < 48; i++)
608 NV_WRITE_PIPE_INIT(0x00000000);
609 PIPE_INIT_END(0x7400);
610
611 PIPE_INIT(0x7800);
612 for (i = 0; i < 48; i++)
613 NV_WRITE_PIPE_INIT(0x00000000);
614 PIPE_INIT_END(0x7800);
615
616 PIPE_INIT(0x4400);
617 for (i = 0; i < 32; i++)
618 NV_WRITE_PIPE_INIT(0x00000000);
619 PIPE_INIT_END(0x4400);
620
621 PIPE_INIT(0x0000);
622 for (i = 0; i < 16; i++)
623 NV_WRITE_PIPE_INIT(0x00000000);
624 PIPE_INIT_END(0x0000);
625
626 PIPE_INIT(0x0040);
627 for (i = 0; i < 4; i++)
628 NV_WRITE_PIPE_INIT(0x00000000);
629 PIPE_INIT_END(0x0040);
630
631 #undef PIPE_INIT
632 #undef PIPE_INIT_END
633 #undef NV_WRITE_PIPE_INIT
634 }
635
636 static int nv10_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
637 {
638 int i;
639 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++) {
640 if (nv10_graph_ctx_regs[i] == reg)
641 return i;
642 }
643 NV_ERROR(dev, "unknow offset nv10_ctx_regs %d\n", reg);
644 return -1;
645 }
646
647 static int nv17_graph_ctx_regs_find_offset(struct drm_device *dev, int reg)
648 {
649 int i;
650 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++) {
651 if (nv17_graph_ctx_regs[i] == reg)
652 return i;
653 }
654 NV_ERROR(dev, "unknow offset nv17_ctx_regs %d\n", reg);
655 return -1;
656 }
657
658 static void nv10_graph_load_dma_vtxbuf(struct nouveau_channel *chan,
659 uint32_t inst)
660 {
661 struct drm_device *dev = chan->dev;
662 uint32_t st2, st2_dl, st2_dh, fifo_ptr, fifo[0x60/4];
663 uint32_t ctx_user, ctx_switch[5];
664 int i, subchan = -1;
665
666 /* NV10TCL_DMA_VTXBUF (method 0x18c) modifies hidden state
667 * that cannot be restored via MMIO. Do it through the FIFO
668 * instead.
669 */
670
671 /* Look for a celsius object */
672 for (i = 0; i < 8; i++) {
673 int class = nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(i, 0)) & 0xfff;
674
675 if (class == 0x56 || class == 0x96 || class == 0x99) {
676 subchan = i;
677 break;
678 }
679 }
680
681 if (subchan < 0 || !inst)
682 return;
683
684 /* Save the current ctx object */
685 ctx_user = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
686 for (i = 0; i < 5; i++)
687 ctx_switch[i] = nv_rd32(dev, NV10_PGRAPH_CTX_SWITCH(i));
688
689 /* Save the FIFO state */
690 st2 = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
691 st2_dl = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DL);
692 st2_dh = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2_DH);
693 fifo_ptr = nv_rd32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR);
694
695 for (i = 0; i < ARRAY_SIZE(fifo); i++)
696 fifo[i] = nv_rd32(dev, 0x4007a0 + 4 * i);
697
698 /* Switch to the celsius subchannel */
699 for (i = 0; i < 5; i++)
700 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i),
701 nv_rd32(dev, NV10_PGRAPH_CTX_CACHE(subchan, i)));
702 nv_mask(dev, NV10_PGRAPH_CTX_USER, 0xe000, subchan << 13);
703
704 /* Inject NV10TCL_DMA_VTXBUF */
705 nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, 0);
706 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2,
707 0x2c000000 | chan->id << 20 | subchan << 16 | 0x18c);
708 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, inst);
709 nv_mask(dev, NV10_PGRAPH_CTX_CONTROL, 0, 0x10000);
710 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
711 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
712
713 /* Restore the FIFO state */
714 for (i = 0; i < ARRAY_SIZE(fifo); i++)
715 nv_wr32(dev, 0x4007a0 + 4 * i, fifo[i]);
716
717 nv_wr32(dev, NV10_PGRAPH_FFINTFC_FIFO_PTR, fifo_ptr);
718 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, st2);
719 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DL, st2_dl);
720 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2_DH, st2_dh);
721
722 /* Restore the current ctx object */
723 for (i = 0; i < 5; i++)
724 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(i), ctx_switch[i]);
725 nv_wr32(dev, NV10_PGRAPH_CTX_USER, ctx_user);
726 }
727
728 static int
729 nv10_graph_load_context(struct nouveau_channel *chan)
730 {
731 struct drm_device *dev = chan->dev;
732 struct drm_nouveau_private *dev_priv = dev->dev_private;
733 struct graph_state *pgraph_ctx = chan->engctx[NVOBJ_ENGINE_GR];
734 uint32_t tmp;
735 int i;
736
737 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
738 nv_wr32(dev, nv10_graph_ctx_regs[i], pgraph_ctx->nv10[i]);
739 if (dev_priv->chipset >= 0x17) {
740 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
741 nv_wr32(dev, nv17_graph_ctx_regs[i],
742 pgraph_ctx->nv17[i]);
743 }
744
745 nv10_graph_load_pipe(chan);
746 nv10_graph_load_dma_vtxbuf(chan, (nv_rd32(dev, NV10_PGRAPH_GLOBALSTATE1)
747 & 0xffff));
748
749 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10010100);
750 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER);
751 nv_wr32(dev, NV10_PGRAPH_CTX_USER, (tmp & 0xffffff) | chan->id << 24);
752 tmp = nv_rd32(dev, NV10_PGRAPH_FFINTFC_ST2);
753 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, tmp & 0xcfffffff);
754 return 0;
755 }
756
757 static int
758 nv10_graph_unload_context(struct drm_device *dev)
759 {
760 struct drm_nouveau_private *dev_priv = dev->dev_private;
761 struct nouveau_channel *chan;
762 struct graph_state *ctx;
763 uint32_t tmp;
764 int i;
765
766 chan = nv10_graph_channel(dev);
767 if (!chan)
768 return 0;
769 ctx = chan->engctx[NVOBJ_ENGINE_GR];
770
771 for (i = 0; i < ARRAY_SIZE(nv10_graph_ctx_regs); i++)
772 ctx->nv10[i] = nv_rd32(dev, nv10_graph_ctx_regs[i]);
773
774 if (dev_priv->chipset >= 0x17) {
775 for (i = 0; i < ARRAY_SIZE(nv17_graph_ctx_regs); i++)
776 ctx->nv17[i] = nv_rd32(dev, nv17_graph_ctx_regs[i]);
777 }
778
779 nv10_graph_save_pipe(chan);
780
781 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000000);
782 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
783 tmp |= 31 << 24;
784 nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
785 return 0;
786 }
787
788 static void
789 nv10_graph_context_switch(struct drm_device *dev)
790 {
791 struct drm_nouveau_private *dev_priv = dev->dev_private;
792 struct nouveau_channel *chan = NULL;
793 int chid;
794
795 nouveau_wait_for_idle(dev);
796
797 /* If previous context is valid, we need to save it */
798 nv10_graph_unload_context(dev);
799
800 /* Load context for next channel */
801 chid = (nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR) >> 20) & 0x1f;
802 chan = dev_priv->channels.ptr[chid];
803 if (chan && chan->engctx[NVOBJ_ENGINE_GR])
804 nv10_graph_load_context(chan);
805 }
806
807 #define NV_WRITE_CTX(reg, val) do { \
808 int offset = nv10_graph_ctx_regs_find_offset(dev, reg); \
809 if (offset > 0) \
810 pgraph_ctx->nv10[offset] = val; \
811 } while (0)
812
813 #define NV17_WRITE_CTX(reg, val) do { \
814 int offset = nv17_graph_ctx_regs_find_offset(dev, reg); \
815 if (offset > 0) \
816 pgraph_ctx->nv17[offset] = val; \
817 } while (0)
818
819 struct nouveau_channel *
820 nv10_graph_channel(struct drm_device *dev)
821 {
822 struct drm_nouveau_private *dev_priv = dev->dev_private;
823 int chid = 31;
824
825 if (nv_rd32(dev, NV10_PGRAPH_CTX_CONTROL) & 0x00010000)
826 chid = nv_rd32(dev, NV10_PGRAPH_CTX_USER) >> 24;
827
828 if (chid >= 31)
829 return NULL;
830
831 return dev_priv->channels.ptr[chid];
832 }
833
834 static int
835 nv10_graph_context_new(struct nouveau_channel *chan, int engine)
836 {
837 struct drm_device *dev = chan->dev;
838 struct drm_nouveau_private *dev_priv = dev->dev_private;
839 struct graph_state *pgraph_ctx;
840
841 NV_DEBUG(dev, "nv10_graph_context_create %d\n", chan->id);
842
843 pgraph_ctx = kzalloc(sizeof(*pgraph_ctx), GFP_KERNEL);
844 if (pgraph_ctx == NULL)
845 return -ENOMEM;
846 chan->engctx[engine] = pgraph_ctx;
847
848 NV_WRITE_CTX(0x00400e88, 0x08000000);
849 NV_WRITE_CTX(0x00400e9c, 0x4b7fffff);
850 NV_WRITE_CTX(NV03_PGRAPH_XY_LOGIC_MISC0, 0x0001ffff);
851 NV_WRITE_CTX(0x00400e10, 0x00001000);
852 NV_WRITE_CTX(0x00400e14, 0x00001000);
853 NV_WRITE_CTX(0x00400e30, 0x00080008);
854 NV_WRITE_CTX(0x00400e34, 0x00080008);
855 if (dev_priv->chipset >= 0x17) {
856 /* is it really needed ??? */
857 NV17_WRITE_CTX(NV10_PGRAPH_DEBUG_4,
858 nv_rd32(dev, NV10_PGRAPH_DEBUG_4));
859 NV17_WRITE_CTX(0x004006b0, nv_rd32(dev, 0x004006b0));
860 NV17_WRITE_CTX(0x00400eac, 0x0fff0000);
861 NV17_WRITE_CTX(0x00400eb0, 0x0fff0000);
862 NV17_WRITE_CTX(0x00400ec0, 0x00000080);
863 NV17_WRITE_CTX(0x00400ed0, 0x00000080);
864 }
865 NV_WRITE_CTX(NV10_PGRAPH_CTX_USER, chan->id << 24);
866
867 nv10_graph_create_pipe(chan);
868 return 0;
869 }
870
871 static void
872 nv10_graph_context_del(struct nouveau_channel *chan, int engine)
873 {
874 struct drm_device *dev = chan->dev;
875 struct drm_nouveau_private *dev_priv = dev->dev_private;
876 struct graph_state *pgraph_ctx = chan->engctx[engine];
877 unsigned long flags;
878
879 spin_lock_irqsave(&dev_priv->context_switch_lock, flags);
880 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
881
882 /* Unload the context if it's the currently active one */
883 if (nv10_graph_channel(dev) == chan)
884 nv10_graph_unload_context(dev);
885
886 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
887 spin_unlock_irqrestore(&dev_priv->context_switch_lock, flags);
888
889 /* Free the context resources */
890 chan->engctx[engine] = NULL;
891 kfree(pgraph_ctx);
892 }
893
894 static void
895 nv10_graph_set_tile_region(struct drm_device *dev, int i)
896 {
897 struct drm_nouveau_private *dev_priv = dev->dev_private;
898 struct nouveau_tile_reg *tile = &dev_priv->tile.reg[i];
899
900 nv_wr32(dev, NV10_PGRAPH_TLIMIT(i), tile->limit);
901 nv_wr32(dev, NV10_PGRAPH_TSIZE(i), tile->pitch);
902 nv_wr32(dev, NV10_PGRAPH_TILE(i), tile->addr);
903 }
904
905 static int
906 nv10_graph_init(struct drm_device *dev, int engine)
907 {
908 struct drm_nouveau_private *dev_priv = dev->dev_private;
909 u32 tmp;
910 int i;
911
912 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) &
913 ~NV_PMC_ENABLE_PGRAPH);
914 nv_wr32(dev, NV03_PMC_ENABLE, nv_rd32(dev, NV03_PMC_ENABLE) |
915 NV_PMC_ENABLE_PGRAPH);
916
917 nv_wr32(dev, NV03_PGRAPH_INTR , 0xFFFFFFFF);
918 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0xFFFFFFFF);
919
920 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0xFFFFFFFF);
921 nv_wr32(dev, NV04_PGRAPH_DEBUG_0, 0x00000000);
922 nv_wr32(dev, NV04_PGRAPH_DEBUG_1, 0x00118700);
923 /* nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x24E00810); */ /* 0x25f92ad9 */
924 nv_wr32(dev, NV04_PGRAPH_DEBUG_2, 0x25f92ad9);
925 nv_wr32(dev, NV04_PGRAPH_DEBUG_3, 0x55DE0830 |
926 (1<<29) |
927 (1<<31));
928 if (dev_priv->chipset >= 0x17) {
929 nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x1f000000);
930 nv_wr32(dev, 0x400a10, 0x3ff3fb6);
931 nv_wr32(dev, 0x400838, 0x2f8684);
932 nv_wr32(dev, 0x40083c, 0x115f3f);
933 nv_wr32(dev, 0x004006b0, 0x40000020);
934 } else
935 nv_wr32(dev, NV10_PGRAPH_DEBUG_4, 0x00000000);
936
937 /* Turn all the tiling regions off. */
938 for (i = 0; i < NV10_PFB_TILE__SIZE; i++)
939 nv10_graph_set_tile_region(dev, i);
940
941 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(0), 0x00000000);
942 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(1), 0x00000000);
943 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(2), 0x00000000);
944 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(3), 0x00000000);
945 nv_wr32(dev, NV10_PGRAPH_CTX_SWITCH(4), 0x00000000);
946 nv_wr32(dev, NV10_PGRAPH_STATE, 0xFFFFFFFF);
947
948 tmp = nv_rd32(dev, NV10_PGRAPH_CTX_USER) & 0x00ffffff;
949 tmp |= 31 << 24;
950 nv_wr32(dev, NV10_PGRAPH_CTX_USER, tmp);
951 nv_wr32(dev, NV10_PGRAPH_CTX_CONTROL, 0x10000100);
952 nv_wr32(dev, NV10_PGRAPH_FFINTFC_ST2, 0x08000000);
953
954 return 0;
955 }
956
957 static int
958 nv10_graph_fini(struct drm_device *dev, int engine, bool suspend)
959 {
960 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000000);
961 if (!nv_wait(dev, NV04_PGRAPH_STATUS, ~0, 0) && suspend) {
962 nv_mask(dev, NV04_PGRAPH_FIFO, 0x00000001, 0x00000001);
963 return -EBUSY;
964 }
965 nv10_graph_unload_context(dev);
966 nv_wr32(dev, NV03_PGRAPH_INTR_EN, 0x00000000);
967 return 0;
968 }
969
970 static int
971 nv17_graph_mthd_lma_window(struct nouveau_channel *chan,
972 u32 class, u32 mthd, u32 data)
973 {
974 struct graph_state *ctx = chan->engctx[NVOBJ_ENGINE_GR];
975 struct drm_device *dev = chan->dev;
976 struct pipe_state *pipe = &ctx->pipe_state;
977 uint32_t pipe_0x0040[1], pipe_0x64c0[8], pipe_0x6a80[3], pipe_0x6ab0[3];
978 uint32_t xfmode0, xfmode1;
979 int i;
980
981 ctx->lma_window[(mthd - 0x1638) / 4] = data;
982
983 if (mthd != 0x1644)
984 return 0;
985
986 nouveau_wait_for_idle(dev);
987
988 PIPE_SAVE(dev, pipe_0x0040, 0x0040);
989 PIPE_SAVE(dev, pipe->pipe_0x0200, 0x0200);
990
991 PIPE_RESTORE(dev, ctx->lma_window, 0x6790);
992
993 nouveau_wait_for_idle(dev);
994
995 xfmode0 = nv_rd32(dev, NV10_PGRAPH_XFMODE0);
996 xfmode1 = nv_rd32(dev, NV10_PGRAPH_XFMODE1);
997
998 PIPE_SAVE(dev, pipe->pipe_0x4400, 0x4400);
999 PIPE_SAVE(dev, pipe_0x64c0, 0x64c0);
1000 PIPE_SAVE(dev, pipe_0x6ab0, 0x6ab0);
1001 PIPE_SAVE(dev, pipe_0x6a80, 0x6a80);
1002
1003 nouveau_wait_for_idle(dev);
1004
1005 nv_wr32(dev, NV10_PGRAPH_XFMODE0, 0x10000000);
1006 nv_wr32(dev, NV10_PGRAPH_XFMODE1, 0x00000000);
1007 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000064c0);
1008 for (i = 0; i < 4; i++)
1009 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
1010 for (i = 0; i < 4; i++)
1011 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1012
1013 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006ab0);
1014 for (i = 0; i < 3; i++)
1015 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x3f800000);
1016
1017 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00006a80);
1018 for (i = 0; i < 3; i++)
1019 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1020
1021 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x00000040);
1022 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000008);
1023
1024 PIPE_RESTORE(dev, pipe->pipe_0x0200, 0x0200);
1025
1026 nouveau_wait_for_idle(dev);
1027
1028 PIPE_RESTORE(dev, pipe_0x0040, 0x0040);
1029
1030 nv_wr32(dev, NV10_PGRAPH_XFMODE0, xfmode0);
1031 nv_wr32(dev, NV10_PGRAPH_XFMODE1, xfmode1);
1032
1033 PIPE_RESTORE(dev, pipe_0x64c0, 0x64c0);
1034 PIPE_RESTORE(dev, pipe_0x6ab0, 0x6ab0);
1035 PIPE_RESTORE(dev, pipe_0x6a80, 0x6a80);
1036 PIPE_RESTORE(dev, pipe->pipe_0x4400, 0x4400);
1037
1038 nv_wr32(dev, NV10_PGRAPH_PIPE_ADDRESS, 0x000000c0);
1039 nv_wr32(dev, NV10_PGRAPH_PIPE_DATA, 0x00000000);
1040
1041 nouveau_wait_for_idle(dev);
1042
1043 return 0;
1044 }
1045
1046 static int
1047 nv17_graph_mthd_lma_enable(struct nouveau_channel *chan,
1048 u32 class, u32 mthd, u32 data)
1049 {
1050 struct drm_device *dev = chan->dev;
1051
1052 nouveau_wait_for_idle(dev);
1053
1054 nv_wr32(dev, NV10_PGRAPH_DEBUG_4,
1055 nv_rd32(dev, NV10_PGRAPH_DEBUG_4) | 0x1 << 8);
1056 nv_wr32(dev, 0x004006b0,
1057 nv_rd32(dev, 0x004006b0) | 0x8 << 24);
1058
1059 return 0;
1060 }
1061
1062 struct nouveau_bitfield nv10_graph_intr[] = {
1063 { NV_PGRAPH_INTR_NOTIFY, "NOTIFY" },
1064 { NV_PGRAPH_INTR_ERROR, "ERROR" },
1065 {}
1066 };
1067
1068 struct nouveau_bitfield nv10_graph_nstatus[] = {
1069 { NV10_PGRAPH_NSTATUS_STATE_IN_USE, "STATE_IN_USE" },
1070 { NV10_PGRAPH_NSTATUS_INVALID_STATE, "INVALID_STATE" },
1071 { NV10_PGRAPH_NSTATUS_BAD_ARGUMENT, "BAD_ARGUMENT" },
1072 { NV10_PGRAPH_NSTATUS_PROTECTION_FAULT, "PROTECTION_FAULT" },
1073 {}
1074 };
1075
1076 static void
1077 nv10_graph_isr(struct drm_device *dev)
1078 {
1079 u32 stat;
1080
1081 while ((stat = nv_rd32(dev, NV03_PGRAPH_INTR))) {
1082 u32 nsource = nv_rd32(dev, NV03_PGRAPH_NSOURCE);
1083 u32 nstatus = nv_rd32(dev, NV03_PGRAPH_NSTATUS);
1084 u32 addr = nv_rd32(dev, NV04_PGRAPH_TRAPPED_ADDR);
1085 u32 chid = (addr & 0x01f00000) >> 20;
1086 u32 subc = (addr & 0x00070000) >> 16;
1087 u32 mthd = (addr & 0x00001ffc);
1088 u32 data = nv_rd32(dev, NV04_PGRAPH_TRAPPED_DATA);
1089 u32 class = nv_rd32(dev, 0x400160 + subc * 4) & 0xfff;
1090 u32 show = stat;
1091
1092 if (stat & NV_PGRAPH_INTR_ERROR) {
1093 if (nsource & NV03_PGRAPH_NSOURCE_ILLEGAL_MTHD) {
1094 if (!nouveau_gpuobj_mthd_call2(dev, chid, class, mthd, data))
1095 show &= ~NV_PGRAPH_INTR_ERROR;
1096 }
1097 }
1098
1099 if (stat & NV_PGRAPH_INTR_CONTEXT_SWITCH) {
1100 nv_wr32(dev, NV03_PGRAPH_INTR, NV_PGRAPH_INTR_CONTEXT_SWITCH);
1101 stat &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1102 show &= ~NV_PGRAPH_INTR_CONTEXT_SWITCH;
1103 nv10_graph_context_switch(dev);
1104 }
1105
1106 nv_wr32(dev, NV03_PGRAPH_INTR, stat);
1107 nv_wr32(dev, NV04_PGRAPH_FIFO, 0x00000001);
1108
1109 if (show && nouveau_ratelimit()) {
1110 NV_INFO(dev, "PGRAPH -");
1111 nouveau_bitfield_print(nv10_graph_intr, show);
1112 printk(" nsource:");
1113 nouveau_bitfield_print(nv04_graph_nsource, nsource);
1114 printk(" nstatus:");
1115 nouveau_bitfield_print(nv10_graph_nstatus, nstatus);
1116 printk("\n");
1117 NV_INFO(dev, "PGRAPH - ch %d/%d class 0x%04x "
1118 "mthd 0x%04x data 0x%08x\n",
1119 chid, subc, class, mthd, data);
1120 }
1121 }
1122 }
1123
1124 static void
1125 nv10_graph_destroy(struct drm_device *dev, int engine)
1126 {
1127 struct nv10_graph_engine *pgraph = nv_engine(dev, engine);
1128
1129 nouveau_irq_unregister(dev, 12);
1130 kfree(pgraph);
1131 }
1132
1133 int
1134 nv10_graph_create(struct drm_device *dev)
1135 {
1136 struct drm_nouveau_private *dev_priv = dev->dev_private;
1137 struct nv10_graph_engine *pgraph;
1138
1139 pgraph = kzalloc(sizeof(*pgraph), GFP_KERNEL);
1140 if (!pgraph)
1141 return -ENOMEM;
1142
1143 pgraph->base.destroy = nv10_graph_destroy;
1144 pgraph->base.init = nv10_graph_init;
1145 pgraph->base.fini = nv10_graph_fini;
1146 pgraph->base.context_new = nv10_graph_context_new;
1147 pgraph->base.context_del = nv10_graph_context_del;
1148 pgraph->base.object_new = nv04_graph_object_new;
1149 pgraph->base.set_tile_region = nv10_graph_set_tile_region;
1150
1151 NVOBJ_ENGINE_ADD(dev, GR, &pgraph->base);
1152 nouveau_irq_register(dev, 12, nv10_graph_isr);
1153
1154 NVOBJ_CLASS(dev, 0x0030, GR); /* null */
1155 NVOBJ_CLASS(dev, 0x0039, GR); /* m2mf */
1156 NVOBJ_CLASS(dev, 0x004a, GR); /* gdirect */
1157 NVOBJ_CLASS(dev, 0x005f, GR); /* imageblit */
1158 NVOBJ_CLASS(dev, 0x009f, GR); /* imageblit (nv12) */
1159 NVOBJ_CLASS(dev, 0x008a, GR); /* ifc */
1160 NVOBJ_CLASS(dev, 0x0089, GR); /* sifm */
1161 NVOBJ_CLASS(dev, 0x0062, GR); /* surf2d */
1162 NVOBJ_CLASS(dev, 0x0043, GR); /* rop */
1163 NVOBJ_CLASS(dev, 0x0012, GR); /* beta1 */
1164 NVOBJ_CLASS(dev, 0x0072, GR); /* beta4 */
1165 NVOBJ_CLASS(dev, 0x0019, GR); /* cliprect */
1166 NVOBJ_CLASS(dev, 0x0044, GR); /* pattern */
1167 NVOBJ_CLASS(dev, 0x0052, GR); /* swzsurf */
1168 NVOBJ_CLASS(dev, 0x0093, GR); /* surf3d */
1169 NVOBJ_CLASS(dev, 0x0094, GR); /* tex_tri */
1170 NVOBJ_CLASS(dev, 0x0095, GR); /* multitex_tri */
1171
1172 /* celcius */
1173 if (dev_priv->chipset <= 0x10) {
1174 NVOBJ_CLASS(dev, 0x0056, GR);
1175 } else
1176 if (dev_priv->chipset < 0x17 || dev_priv->chipset == 0x1a) {
1177 NVOBJ_CLASS(dev, 0x0096, GR);
1178 } else {
1179 NVOBJ_CLASS(dev, 0x0099, GR);
1180 NVOBJ_MTHD (dev, 0x0099, 0x1638, nv17_graph_mthd_lma_window);
1181 NVOBJ_MTHD (dev, 0x0099, 0x163c, nv17_graph_mthd_lma_window);
1182 NVOBJ_MTHD (dev, 0x0099, 0x1640, nv17_graph_mthd_lma_window);
1183 NVOBJ_MTHD (dev, 0x0099, 0x1644, nv17_graph_mthd_lma_window);
1184 NVOBJ_MTHD (dev, 0x0099, 0x1658, nv17_graph_mthd_lma_enable);
1185 }
1186
1187 return 0;
1188 }