drm/nvd0/disp: support creation of fb dma objects on older chipsets
[firefly-linux-kernel-4.4.55.git] / drivers / gpu / drm / nouveau / nvd0_display.c
1 /*
2  * Copyright 2011 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24
25 #include <linux/dma-mapping.h>
26
27 #include <drm/drmP.h>
28 #include <drm/drm_crtc_helper.h>
29
30 #include "nouveau_drm.h"
31 #include "nouveau_dma.h"
32 #include "nouveau_gem.h"
33 #include "nouveau_connector.h"
34 #include "nouveau_encoder.h"
35 #include "nouveau_crtc.h"
36 #include "nouveau_fence.h"
37 #include "nv50_display.h"
38
39 #include <core/client.h>
40 #include <core/gpuobj.h>
41 #include <core/class.h>
42
43 #include <subdev/timer.h>
44 #include <subdev/bar.h>
45 #include <subdev/fb.h>
46
47 #define EVO_DMA_NR 9
48
49 #define EVO_MASTER  (0x00)
50 #define EVO_FLIP(c) (0x01 + (c))
51 #define EVO_OVLY(c) (0x05 + (c))
52 #define EVO_OIMM(c) (0x09 + (c))
53 #define EVO_CURS(c) (0x0d + (c))
54
55 /* offsets in shared sync bo of various structures */
56 #define EVO_SYNC(c, o) ((c) * 0x0100 + (o))
57 #define EVO_MAST_NTFY     EVO_SYNC(  0, 0x00)
58 #define EVO_FLIP_SEM0(c)  EVO_SYNC((c), 0x00)
59 #define EVO_FLIP_SEM1(c)  EVO_SYNC((c), 0x10)
60
61 #define EVO_CORE_HANDLE      (0xd1500000)
62 #define EVO_CHAN_HANDLE(t,i) (0xd15c0000 | (((t) & 0x00ff) << 8) | (i))
63 #define EVO_CHAN_OCLASS(t,c) ((nv_hclass(c) & 0xff00) | ((t) & 0x00ff))
64 #define EVO_PUSH_HANDLE(t,i) (0xd15b0000 | (i) |                               \
65                               (((NV50_DISP_##t##_CLASS) & 0x00ff) << 8))
66
67 /******************************************************************************
68  * EVO channel
69  *****************************************************************************/
70
71 struct nvd0_chan {
72         struct nouveau_object *user;
73         u32 handle;
74 };
75
76 static int
77 nvd0_chan_create(struct nouveau_object *core, u32 bclass, u8 head,
78                  void *data, u32 size, struct nvd0_chan *chan)
79 {
80         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
81         const u32 oclass = EVO_CHAN_OCLASS(bclass, core);
82         const u32 handle = EVO_CHAN_HANDLE(bclass, head);
83         int ret;
84
85         ret = nouveau_object_new(client, EVO_CORE_HANDLE, handle,
86                                  oclass, data, size, &chan->user);
87         if (ret)
88                 return ret;
89
90         chan->handle = handle;
91         return 0;
92 }
93
94 static void
95 nvd0_chan_destroy(struct nouveau_object *core, struct nvd0_chan *chan)
96 {
97         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
98         if (chan->handle)
99                 nouveau_object_del(client, EVO_CORE_HANDLE, chan->handle);
100 }
101
102 /******************************************************************************
103  * PIO EVO channel
104  *****************************************************************************/
105
106 struct nvd0_pioc {
107         struct nvd0_chan base;
108 };
109
110 static void
111 nvd0_pioc_destroy(struct nouveau_object *core, struct nvd0_pioc *pioc)
112 {
113         nvd0_chan_destroy(core, &pioc->base);
114 }
115
116 static int
117 nvd0_pioc_create(struct nouveau_object *core, u32 bclass, u8 head,
118                  void *data, u32 size, struct nvd0_pioc *pioc)
119 {
120         return nvd0_chan_create(core, bclass, head, data, size, &pioc->base);
121 }
122
123 /******************************************************************************
124  * DMA EVO channel
125  *****************************************************************************/
126
127 struct nvd0_dmac {
128         struct nvd0_chan base;
129         dma_addr_t handle;
130         u32 *ptr;
131 };
132
133 static void
134 nvd0_dmac_destroy(struct nouveau_object *core, struct nvd0_dmac *dmac)
135 {
136         if (dmac->ptr) {
137                 struct pci_dev *pdev = nv_device(core)->pdev;
138                 pci_free_consistent(pdev, PAGE_SIZE, dmac->ptr, dmac->handle);
139         }
140
141         nvd0_chan_destroy(core, &dmac->base);
142 }
143
144 static int
145 nv50_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
146 {
147         struct nouveau_fb *pfb = nouveau_fb(core);
148         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
149         struct nouveau_object *object;
150         int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
151                                      NV_DMA_IN_MEMORY_CLASS,
152                                      &(struct nv_dma_class) {
153                                         .flags = NV_DMA_TARGET_VRAM |
154                                                  NV_DMA_ACCESS_RDWR,
155                                         .start = 0,
156                                         .limit = pfb->ram.size - 1,
157                                         .conf0 = NV50_DMA_CONF0_ENABLE |
158                                                  NV50_DMA_CONF0_PART_256,
159                                      }, sizeof(struct nv_dma_class), &object);
160         if (ret)
161                 return ret;
162
163         ret = nouveau_object_new(client, parent, NvEvoFB16,
164                                  NV_DMA_IN_MEMORY_CLASS,
165                                  &(struct nv_dma_class) {
166                                         .flags = NV_DMA_TARGET_VRAM |
167                                                  NV_DMA_ACCESS_RDWR,
168                                         .start = 0,
169                                         .limit = pfb->ram.size - 1,
170                                         .conf0 = NV50_DMA_CONF0_ENABLE | 0x70 |
171                                                  NV50_DMA_CONF0_PART_256,
172                                  }, sizeof(struct nv_dma_class), &object);
173         if (ret)
174                 return ret;
175
176         ret = nouveau_object_new(client, parent, NvEvoFB32,
177                                  NV_DMA_IN_MEMORY_CLASS,
178                                  &(struct nv_dma_class) {
179                                         .flags = NV_DMA_TARGET_VRAM |
180                                                  NV_DMA_ACCESS_RDWR,
181                                         .start = 0,
182                                         .limit = pfb->ram.size - 1,
183                                         .conf0 = NV50_DMA_CONF0_ENABLE | 0x7a |
184                                                  NV50_DMA_CONF0_PART_256,
185                                  }, sizeof(struct nv_dma_class), &object);
186         return ret;
187 }
188
189 static int
190 nvc0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
191 {
192         struct nouveau_fb *pfb = nouveau_fb(core);
193         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
194         struct nouveau_object *object;
195         int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
196                                      NV_DMA_IN_MEMORY_CLASS,
197                                      &(struct nv_dma_class) {
198                                         .flags = NV_DMA_TARGET_VRAM |
199                                                  NV_DMA_ACCESS_RDWR,
200                                         .start = 0,
201                                         .limit = pfb->ram.size - 1,
202                                         .conf0 = NVC0_DMA_CONF0_ENABLE,
203                                      }, sizeof(struct nv_dma_class), &object);
204         if (ret)
205                 return ret;
206
207         ret = nouveau_object_new(client, parent, NvEvoFB16,
208                                  NV_DMA_IN_MEMORY_CLASS,
209                                  &(struct nv_dma_class) {
210                                         .flags = NV_DMA_TARGET_VRAM |
211                                                  NV_DMA_ACCESS_RDWR,
212                                         .start = 0,
213                                         .limit = pfb->ram.size - 1,
214                                         .conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
215                                  }, sizeof(struct nv_dma_class), &object);
216         if (ret)
217                 return ret;
218
219         ret = nouveau_object_new(client, parent, NvEvoFB32,
220                                  NV_DMA_IN_MEMORY_CLASS,
221                                  &(struct nv_dma_class) {
222                                         .flags = NV_DMA_TARGET_VRAM |
223                                                  NV_DMA_ACCESS_RDWR,
224                                         .start = 0,
225                                         .limit = pfb->ram.size - 1,
226                                         .conf0 = NVC0_DMA_CONF0_ENABLE | 0xfe,
227                                  }, sizeof(struct nv_dma_class), &object);
228         return ret;
229 }
230
231 static int
232 nvd0_dmac_create_fbdma(struct nouveau_object *core, u32 parent)
233 {
234         struct nouveau_fb *pfb = nouveau_fb(core);
235         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
236         struct nouveau_object *object;
237         int ret = nouveau_object_new(client, parent, NvEvoVRAM_LP,
238                                      NV_DMA_IN_MEMORY_CLASS,
239                                      &(struct nv_dma_class) {
240                                         .flags = NV_DMA_TARGET_VRAM |
241                                                  NV_DMA_ACCESS_RDWR,
242                                         .start = 0,
243                                         .limit = pfb->ram.size - 1,
244                                         .conf0 = NVD0_DMA_CONF0_ENABLE |
245                                                  NVD0_DMA_CONF0_PAGE_LP,
246                                      }, sizeof(struct nv_dma_class), &object);
247         if (ret)
248                 return ret;
249
250         ret = nouveau_object_new(client, parent, NvEvoFB32,
251                                  NV_DMA_IN_MEMORY_CLASS,
252                                  &(struct nv_dma_class) {
253                                         .flags = NV_DMA_TARGET_VRAM |
254                                                  NV_DMA_ACCESS_RDWR,
255                                         .start = 0,
256                                         .limit = pfb->ram.size - 1,
257                                         .conf0 = NVD0_DMA_CONF0_ENABLE | 0xfe |
258                                                  NVD0_DMA_CONF0_PAGE_LP,
259                                  }, sizeof(struct nv_dma_class), &object);
260         return ret;
261 }
262
263 static int
264 nvd0_dmac_create(struct nouveau_object *core, u32 bclass, u8 head,
265                  void *data, u32 size, u64 syncbuf,
266                  struct nvd0_dmac *dmac)
267 {
268         struct nouveau_fb *pfb = nouveau_fb(core);
269         struct nouveau_object *client = nv_pclass(core, NV_CLIENT_CLASS);
270         struct nouveau_object *object;
271         u32 pushbuf = *(u32 *)data;
272         int ret;
273
274         dmac->ptr = pci_alloc_consistent(nv_device(core)->pdev, PAGE_SIZE,
275                                         &dmac->handle);
276         if (!dmac->ptr)
277                 return -ENOMEM;
278
279         ret = nouveau_object_new(client, NVDRM_DEVICE, pushbuf,
280                                  NV_DMA_FROM_MEMORY_CLASS,
281                                  &(struct nv_dma_class) {
282                                         .flags = NV_DMA_TARGET_PCI_US |
283                                                  NV_DMA_ACCESS_RD,
284                                         .start = dmac->handle + 0x0000,
285                                         .limit = dmac->handle + 0x0fff,
286                                  }, sizeof(struct nv_dma_class), &object);
287         if (ret)
288                 return ret;
289
290         ret = nvd0_chan_create(core, bclass, head, data, size, &dmac->base);
291         if (ret)
292                 return ret;
293
294         ret = nouveau_object_new(client, dmac->base.handle, NvEvoSync,
295                                  NV_DMA_IN_MEMORY_CLASS,
296                                  &(struct nv_dma_class) {
297                                         .flags = NV_DMA_TARGET_VRAM |
298                                                  NV_DMA_ACCESS_RDWR,
299                                         .start = syncbuf + 0x0000,
300                                         .limit = syncbuf + 0x0fff,
301                                  }, sizeof(struct nv_dma_class), &object);
302         if (ret)
303                 return ret;
304
305         ret = nouveau_object_new(client, dmac->base.handle, NvEvoVRAM,
306                                  NV_DMA_IN_MEMORY_CLASS,
307                                  &(struct nv_dma_class) {
308                                         .flags = NV_DMA_TARGET_VRAM |
309                                                  NV_DMA_ACCESS_RDWR,
310                                         .start = 0,
311                                         .limit = pfb->ram.size - 1,
312                                  }, sizeof(struct nv_dma_class), &object);
313         if (ret)
314                 return ret;
315
316         if (nv_device(core)->card_type < NV_C0)
317                 ret = nv50_dmac_create_fbdma(core, dmac->base.handle);
318         else
319         if (nv_device(core)->card_type < NV_D0)
320                 ret = nvc0_dmac_create_fbdma(core, dmac->base.handle);
321         else
322                 ret = nvd0_dmac_create_fbdma(core, dmac->base.handle);
323         return ret;
324 }
325
326 struct nvd0_mast {
327         struct nvd0_dmac base;
328 };
329
330 struct nvd0_curs {
331         struct nvd0_pioc base;
332 };
333
334 struct nvd0_sync {
335         struct nvd0_dmac base;
336         struct {
337                 u32 offset;
338                 u16 value;
339         } sem;
340 };
341
342 struct nvd0_ovly {
343         struct nvd0_dmac base;
344 };
345
346 struct nvd0_oimm {
347         struct nvd0_pioc base;
348 };
349
350 struct nvd0_head {
351         struct nouveau_crtc base;
352         struct nvd0_curs curs;
353         struct nvd0_sync sync;
354         struct nvd0_ovly ovly;
355         struct nvd0_oimm oimm;
356 };
357
358 #define nvd0_head(c) ((struct nvd0_head *)nouveau_crtc(c))
359 #define nvd0_curs(c) (&nvd0_head(c)->curs)
360 #define nvd0_sync(c) (&nvd0_head(c)->sync)
361 #define nvd0_ovly(c) (&nvd0_head(c)->ovly)
362 #define nvd0_oimm(c) (&nvd0_head(c)->oimm)
363 #define nvd0_chan(c) (&(c)->base.base)
364 #define nvd0_vers(c) nv_mclass(nvd0_chan(c)->user)
365
366 struct nvd0_disp {
367         struct nouveau_object *core;
368         struct nvd0_mast mast;
369
370         u32 modeset;
371
372         struct nouveau_bo *sync;
373 };
374
375 static struct nvd0_disp *
376 nvd0_disp(struct drm_device *dev)
377 {
378         return nouveau_display(dev)->priv;
379 }
380
381 #define nvd0_mast(d) (&nvd0_disp(d)->mast)
382
383 static struct drm_crtc *
384 nvd0_display_crtc_get(struct drm_encoder *encoder)
385 {
386         return nouveau_encoder(encoder)->crtc;
387 }
388
389 /******************************************************************************
390  * EVO channel helpers
391  *****************************************************************************/
392 static u32 *
393 evo_wait(void *evoc, int nr)
394 {
395         struct nvd0_dmac *dmac = evoc;
396         u32 put = nv_ro32(dmac->base.user, 0x0000) / 4;
397
398         if (put + nr >= (PAGE_SIZE / 4) - 8) {
399                 dmac->ptr[put] = 0x20000000;
400
401                 nv_wo32(dmac->base.user, 0x0000, 0x00000000);
402                 if (!nv_wait(dmac->base.user, 0x0004, ~0, 0x00000000)) {
403                         NV_ERROR(dmac->base.user, "channel stalled\n");
404                         return NULL;
405                 }
406
407                 put = 0;
408         }
409
410         return dmac->ptr + put;
411 }
412
413 static void
414 evo_kick(u32 *push, void *evoc)
415 {
416         struct nvd0_dmac *dmac = evoc;
417         nv_wo32(dmac->base.user, 0x0000, (push - dmac->ptr) << 2);
418 }
419
420 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
421 #define evo_data(p,d)   *((p)++) = (d)
422
423 static bool
424 evo_sync_wait(void *data)
425 {
426         return nouveau_bo_rd32(data, EVO_MAST_NTFY) != 0x00000000;
427 }
428
429 static int
430 evo_sync(struct drm_device *dev)
431 {
432         struct nouveau_device *device = nouveau_dev(dev);
433         struct nvd0_disp *disp = nvd0_disp(dev);
434         struct nvd0_mast *mast = nvd0_mast(dev);
435         u32 *push = evo_wait(mast, 8);
436         if (push) {
437                 nouveau_bo_wr32(disp->sync, EVO_MAST_NTFY, 0x00000000);
438                 evo_mthd(push, 0x0084, 1);
439                 evo_data(push, 0x80000000 | EVO_MAST_NTFY);
440                 evo_mthd(push, 0x0080, 2);
441                 evo_data(push, 0x00000000);
442                 evo_data(push, 0x00000000);
443                 evo_kick(push, mast);
444                 if (nv_wait_cb(device, evo_sync_wait, disp->sync))
445                         return 0;
446         }
447
448         return -EBUSY;
449 }
450
451 /******************************************************************************
452  * Page flipping channel
453  *****************************************************************************/
454 struct nouveau_bo *
455 nvd0_display_crtc_sema(struct drm_device *dev, int crtc)
456 {
457         return nvd0_disp(dev)->sync;
458 }
459
460 void
461 nvd0_display_flip_stop(struct drm_crtc *crtc)
462 {
463         struct nvd0_sync *sync = nvd0_sync(crtc);
464         u32 *push;
465
466         push = evo_wait(sync, 8);
467         if (push) {
468                 evo_mthd(push, 0x0084, 1);
469                 evo_data(push, 0x00000000);
470                 evo_mthd(push, 0x0094, 1);
471                 evo_data(push, 0x00000000);
472                 evo_mthd(push, 0x00c0, 1);
473                 evo_data(push, 0x00000000);
474                 evo_mthd(push, 0x0080, 1);
475                 evo_data(push, 0x00000000);
476                 evo_kick(push, sync);
477         }
478 }
479
480 int
481 nvd0_display_flip_next(struct drm_crtc *crtc, struct drm_framebuffer *fb,
482                        struct nouveau_channel *chan, u32 swap_interval)
483 {
484         struct nouveau_framebuffer *nv_fb = nouveau_framebuffer(fb);
485         struct nvd0_disp *disp = nvd0_disp(crtc->dev);
486         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
487         struct nvd0_sync *sync = nvd0_sync(crtc);
488         u32 *push;
489         int ret;
490
491         swap_interval <<= 4;
492         if (swap_interval == 0)
493                 swap_interval |= 0x100;
494
495         push = evo_wait(sync, 128);
496         if (unlikely(push == NULL))
497                 return -EBUSY;
498
499         /* synchronise with the rendering channel, if necessary */
500         if (likely(chan)) {
501                 ret = RING_SPACE(chan, 10);
502                 if (ret)
503                         return ret;
504
505                 if (nv_mclass(chan->object) < NVC0_CHANNEL_IND_CLASS) {
506                         BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 2);
507                         OUT_RING  (chan, NvEvoSema0 + nv_crtc->index);
508                         OUT_RING  (chan, sync->sem.offset);
509                         BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_RELEASE, 1);
510                         OUT_RING  (chan, 0xf00d0000 | sync->sem.value);
511                         BEGIN_NV04(chan, 0, NV11_SUBCHAN_SEMAPHORE_OFFSET, 2);
512                         OUT_RING  (chan, sync->sem.offset ^ 0x10);
513                         OUT_RING  (chan, 0x74b1e000);
514                         BEGIN_NV04(chan, 0, NV11_SUBCHAN_DMA_SEMAPHORE, 1);
515                         if (nv_mclass(chan->object) < NV84_CHANNEL_DMA_CLASS)
516                                 OUT_RING  (chan, NvSema);
517                         else
518                                 OUT_RING  (chan, chan->vram);
519                 } else {
520                         u64 offset = nvc0_fence_crtc(chan, nv_crtc->index);
521                         offset += sync->sem.offset;
522
523                         BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
524                         OUT_RING  (chan, upper_32_bits(offset));
525                         OUT_RING  (chan, lower_32_bits(offset));
526                         OUT_RING  (chan, 0xf00d0000 | sync->sem.value);
527                         OUT_RING  (chan, 0x1002);
528                         BEGIN_NVC0(chan, 0, NV84_SUBCHAN_SEMAPHORE_ADDRESS_HIGH, 4);
529                         OUT_RING  (chan, upper_32_bits(offset));
530                         OUT_RING  (chan, lower_32_bits(offset ^ 0x10));
531                         OUT_RING  (chan, 0x74b1e000);
532                         OUT_RING  (chan, 0x1001);
533                 }
534
535                 FIRE_RING (chan);
536         } else {
537                 nouveau_bo_wr32(disp->sync, sync->sem.offset / 4,
538                                 0xf00d0000 | sync->sem.value);
539                 evo_sync(crtc->dev);
540         }
541
542         /* queue the flip */
543         evo_mthd(push, 0x0100, 1);
544         evo_data(push, 0xfffe0000);
545         evo_mthd(push, 0x0084, 1);
546         evo_data(push, swap_interval);
547         if (!(swap_interval & 0x00000100)) {
548                 evo_mthd(push, 0x00e0, 1);
549                 evo_data(push, 0x40000000);
550         }
551         evo_mthd(push, 0x0088, 4);
552         evo_data(push, sync->sem.offset);
553         evo_data(push, 0xf00d0000 | sync->sem.value);
554         evo_data(push, 0x74b1e000);
555         evo_data(push, NvEvoSync);
556         evo_mthd(push, 0x00a0, 2);
557         evo_data(push, 0x00000000);
558         evo_data(push, 0x00000000);
559         evo_mthd(push, 0x00c0, 1);
560         evo_data(push, nv_fb->r_dma);
561         evo_mthd(push, 0x0110, 2);
562         evo_data(push, 0x00000000);
563         evo_data(push, 0x00000000);
564         if (nvd0_vers(sync) < NVD0_DISP_SYNC_CLASS) {
565                 evo_mthd(push, 0x0800, 5);
566                 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
567                 evo_data(push, 0);
568                 evo_data(push, (fb->height << 16) | fb->width);
569                 evo_data(push, nv_fb->r_pitch);
570                 evo_data(push, nv_fb->r_format);
571         } else {
572                 evo_mthd(push, 0x0400, 5);
573                 evo_data(push, nv_fb->nvbo->bo.offset >> 8);
574                 evo_data(push, 0);
575                 evo_data(push, (fb->height << 16) | fb->width);
576                 evo_data(push, nv_fb->r_pitch);
577                 evo_data(push, nv_fb->r_format);
578         }
579         evo_mthd(push, 0x0080, 1);
580         evo_data(push, 0x00000000);
581         evo_kick(push, sync);
582
583         sync->sem.offset ^= 0x10;
584         sync->sem.value++;
585         return 0;
586 }
587
588 /******************************************************************************
589  * CRTC
590  *****************************************************************************/
591 static int
592 nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
593 {
594         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
595         struct nouveau_connector *nv_connector;
596         struct drm_connector *connector;
597         u32 *push, mode = 0x00;
598
599         nv_connector = nouveau_crtc_connector_get(nv_crtc);
600         connector = &nv_connector->base;
601         if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
602                 if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
603                         mode = DITHERING_MODE_DYNAMIC2X2;
604         } else {
605                 mode = nv_connector->dithering_mode;
606         }
607
608         if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
609                 if (connector->display_info.bpc >= 8)
610                         mode |= DITHERING_DEPTH_8BPC;
611         } else {
612                 mode |= nv_connector->dithering_depth;
613         }
614
615         push = evo_wait(mast, 4);
616         if (push) {
617                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
618                         evo_mthd(push, 0x08a0 + (nv_crtc->index * 0x0400), 1);
619                         evo_data(push, mode);
620                 } else
621                 if (nvd0_vers(mast) < NVE0_DISP_MAST_CLASS) {
622                         evo_mthd(push, 0x0490 + (nv_crtc->index * 0x0300), 1);
623                         evo_data(push, mode);
624                 } else {
625                         evo_mthd(push, 0x04a0 + (nv_crtc->index * 0x0300), 1);
626                         evo_data(push, mode);
627                 }
628
629                 if (update) {
630                         evo_mthd(push, 0x0080, 1);
631                         evo_data(push, 0x00000000);
632                 }
633                 evo_kick(push, mast);
634         }
635
636         return 0;
637 }
638
639 static int
640 nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
641 {
642         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
643         struct drm_display_mode *omode, *umode = &nv_crtc->base.mode;
644         struct drm_crtc *crtc = &nv_crtc->base;
645         struct nouveau_connector *nv_connector;
646         int mode = DRM_MODE_SCALE_NONE;
647         u32 oX, oY, *push;
648
649         /* start off at the resolution we programmed the crtc for, this
650          * effectively handles NONE/FULL scaling
651          */
652         nv_connector = nouveau_crtc_connector_get(nv_crtc);
653         if (nv_connector && nv_connector->native_mode)
654                 mode = nv_connector->scaling_mode;
655
656         if (mode != DRM_MODE_SCALE_NONE)
657                 omode = nv_connector->native_mode;
658         else
659                 omode = umode;
660
661         oX = omode->hdisplay;
662         oY = omode->vdisplay;
663         if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
664                 oY *= 2;
665
666         /* add overscan compensation if necessary, will keep the aspect
667          * ratio the same as the backend mode unless overridden by the
668          * user setting both hborder and vborder properties.
669          */
670         if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
671                              (nv_connector->underscan == UNDERSCAN_AUTO &&
672                               nv_connector->edid &&
673                               drm_detect_hdmi_monitor(nv_connector->edid)))) {
674                 u32 bX = nv_connector->underscan_hborder;
675                 u32 bY = nv_connector->underscan_vborder;
676                 u32 aspect = (oY << 19) / oX;
677
678                 if (bX) {
679                         oX -= (bX * 2);
680                         if (bY) oY -= (bY * 2);
681                         else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
682                 } else {
683                         oX -= (oX >> 4) + 32;
684                         if (bY) oY -= (bY * 2);
685                         else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
686                 }
687         }
688
689         /* handle CENTER/ASPECT scaling, taking into account the areas
690          * removed already for overscan compensation
691          */
692         switch (mode) {
693         case DRM_MODE_SCALE_CENTER:
694                 oX = min((u32)umode->hdisplay, oX);
695                 oY = min((u32)umode->vdisplay, oY);
696                 /* fall-through */
697         case DRM_MODE_SCALE_ASPECT:
698                 if (oY < oX) {
699                         u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
700                         oX = ((oY * aspect) + (aspect / 2)) >> 19;
701                 } else {
702                         u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
703                         oY = ((oX * aspect) + (aspect / 2)) >> 19;
704                 }
705                 break;
706         default:
707                 break;
708         }
709
710         push = evo_wait(mast, 8);
711         if (push) {
712                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
713                         /*XXX: SCALE_CTRL_ACTIVE??? */
714                         evo_mthd(push, 0x08d8 + (nv_crtc->index * 0x400), 2);
715                         evo_data(push, (oY << 16) | oX);
716                         evo_data(push, (oY << 16) | oX);
717                         evo_mthd(push, 0x08a4 + (nv_crtc->index * 0x400), 1);
718                         evo_data(push, 0x00000000);
719                         evo_mthd(push, 0x08c8 + (nv_crtc->index * 0x400), 1);
720                         evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
721                 } else {
722                         evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
723                         evo_data(push, (oY << 16) | oX);
724                         evo_data(push, (oY << 16) | oX);
725                         evo_data(push, (oY << 16) | oX);
726                         evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
727                         evo_data(push, 0x00000000);
728                         evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
729                         evo_data(push, umode->vdisplay << 16 | umode->hdisplay);
730                 }
731
732                 evo_kick(push, mast);
733
734                 if (update) {
735                         nvd0_display_flip_stop(crtc);
736                         nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
737                 }
738         }
739
740         return 0;
741 }
742
743 static int
744 nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
745                     int x, int y, bool update)
746 {
747         struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
748         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
749         u32 *push;
750
751         push = evo_wait(mast, 16);
752         if (push) {
753                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
754                         evo_mthd(push, 0x0860 + (nv_crtc->index * 0x400), 1);
755                         evo_data(push, nvfb->nvbo->bo.offset >> 8);
756                         evo_mthd(push, 0x0868 + (nv_crtc->index * 0x400), 3);
757                         evo_data(push, (fb->height << 16) | fb->width);
758                         evo_data(push, nvfb->r_pitch);
759                         evo_data(push, nvfb->r_format);
760                         evo_mthd(push, 0x08c0 + (nv_crtc->index * 0x400), 1);
761                         evo_data(push, (y << 16) | x);
762                         if (nvd0_vers(mast) > NV50_DISP_MAST_CLASS) {
763                                 evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
764                                 evo_data(push, nvfb->r_dma);
765                         }
766                 } else {
767                         evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
768                         evo_data(push, nvfb->nvbo->bo.offset >> 8);
769                         evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
770                         evo_data(push, (fb->height << 16) | fb->width);
771                         evo_data(push, nvfb->r_pitch);
772                         evo_data(push, nvfb->r_format);
773                         evo_data(push, nvfb->r_dma);
774                         evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
775                         evo_data(push, (y << 16) | x);
776                 }
777
778                 if (update) {
779                         evo_mthd(push, 0x0080, 1);
780                         evo_data(push, 0x00000000);
781                 }
782                 evo_kick(push, mast);
783         }
784
785         nv_crtc->fb.tile_flags = nvfb->r_dma;
786         return 0;
787 }
788
789 static void
790 nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc)
791 {
792         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
793         u32 *push = evo_wait(mast, 16);
794         if (push) {
795                 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
796                         evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
797                         evo_data(push, 0x85000000);
798                         evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
799                 } else
800                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
801                         evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 2);
802                         evo_data(push, 0x85000000);
803                         evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
804                         evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
805                         evo_data(push, NvEvoVRAM);
806                 } else {
807                         evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
808                         evo_data(push, 0x85000000);
809                         evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
810                         evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
811                         evo_data(push, NvEvoVRAM);
812                 }
813                 evo_kick(push, mast);
814         }
815 }
816
817 static void
818 nvd0_crtc_cursor_hide(struct nouveau_crtc *nv_crtc)
819 {
820         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
821         u32 *push = evo_wait(mast, 16);
822         if (push) {
823                 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
824                         evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
825                         evo_data(push, 0x05000000);
826                 } else
827                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
828                         evo_mthd(push, 0x0880 + (nv_crtc->index * 0x400), 1);
829                         evo_data(push, 0x05000000);
830                         evo_mthd(push, 0x089c + (nv_crtc->index * 0x400), 1);
831                         evo_data(push, 0x00000000);
832                 } else {
833                         evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
834                         evo_data(push, 0x05000000);
835                         evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
836                         evo_data(push, 0x00000000);
837                 }
838                 evo_kick(push, mast);
839         }
840 }
841
842 static void
843 nvd0_crtc_cursor_show_hide(struct nouveau_crtc *nv_crtc, bool show, bool update)
844 {
845         struct nvd0_mast *mast = nvd0_mast(nv_crtc->base.dev);
846
847         if (show)
848                 nvd0_crtc_cursor_show(nv_crtc);
849         else
850                 nvd0_crtc_cursor_hide(nv_crtc);
851
852         if (update) {
853                 u32 *push = evo_wait(mast, 2);
854                 if (push) {
855                         evo_mthd(push, 0x0080, 1);
856                         evo_data(push, 0x00000000);
857                         evo_kick(push, mast);
858                 }
859         }
860 }
861
862 static void
863 nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
864 {
865 }
866
867 static void
868 nvd0_crtc_prepare(struct drm_crtc *crtc)
869 {
870         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
871         struct nvd0_mast *mast = nvd0_mast(crtc->dev);
872         u32 *push;
873
874         nvd0_display_flip_stop(crtc);
875
876         push = evo_wait(mast, 2);
877         if (push) {
878                 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
879                         evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
880                         evo_data(push, 0x00000000);
881                         evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
882                         evo_data(push, 0x40000000);
883                 } else
884                 if (nvd0_vers(mast) <  NVD0_DISP_MAST_CLASS) {
885                         evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
886                         evo_data(push, 0x00000000);
887                         evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 1);
888                         evo_data(push, 0x40000000);
889                         evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
890                         evo_data(push, 0x00000000);
891                 } else {
892                         evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
893                         evo_data(push, 0x00000000);
894                         evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
895                         evo_data(push, 0x03000000);
896                         evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
897                         evo_data(push, 0x00000000);
898                 }
899
900                 evo_kick(push, mast);
901         }
902
903         nvd0_crtc_cursor_show_hide(nv_crtc, false, false);
904 }
905
906 static void
907 nvd0_crtc_commit(struct drm_crtc *crtc)
908 {
909         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
910         struct nvd0_mast *mast = nvd0_mast(crtc->dev);
911         u32 *push;
912
913         push = evo_wait(mast, 32);
914         if (push) {
915                 if (nvd0_vers(mast) < NV84_DISP_MAST_CLASS) {
916                         evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
917                         evo_data(push, NvEvoVRAM_LP);
918                         evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
919                         evo_data(push, 0xc0000000);
920                         evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
921                 } else
922                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
923                         evo_mthd(push, 0x0874 + (nv_crtc->index * 0x400), 1);
924                         evo_data(push, nv_crtc->fb.tile_flags);
925                         evo_mthd(push, 0x0840 + (nv_crtc->index * 0x400), 2);
926                         evo_data(push, 0xc0000000);
927                         evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
928                         evo_mthd(push, 0x085c + (nv_crtc->index * 0x400), 1);
929                         evo_data(push, NvEvoVRAM);
930                 } else {
931                         evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
932                         evo_data(push, nv_crtc->fb.tile_flags);
933                         evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
934                         evo_data(push, 0x83000000);
935                         evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
936                         evo_data(push, 0x00000000);
937                         evo_data(push, 0x00000000);
938                         evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
939                         evo_data(push, NvEvoVRAM);
940                         evo_mthd(push, 0x0430 + (nv_crtc->index * 0x300), 1);
941                         evo_data(push, 0xffffff00);
942                 }
943
944                 evo_kick(push, mast);
945         }
946
947         nvd0_crtc_cursor_show_hide(nv_crtc, nv_crtc->cursor.visible, true);
948         nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
949 }
950
951 static bool
952 nvd0_crtc_mode_fixup(struct drm_crtc *crtc, const struct drm_display_mode *mode,
953                      struct drm_display_mode *adjusted_mode)
954 {
955         return true;
956 }
957
958 static int
959 nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
960 {
961         struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
962         int ret;
963
964         ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
965         if (ret)
966                 return ret;
967
968         if (old_fb) {
969                 nvfb = nouveau_framebuffer(old_fb);
970                 nouveau_bo_unpin(nvfb->nvbo);
971         }
972
973         return 0;
974 }
975
976 static int
977 nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
978                    struct drm_display_mode *mode, int x, int y,
979                    struct drm_framebuffer *old_fb)
980 {
981         struct nvd0_mast *mast = nvd0_mast(crtc->dev);
982         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
983         struct nouveau_connector *nv_connector;
984         u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
985         u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
986         u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
987         u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
988         u32 vblan2e = 0, vblan2s = 1;
989         u32 *push;
990         int ret;
991
992         hactive = mode->htotal;
993         hsynce  = mode->hsync_end - mode->hsync_start - 1;
994         hbackp  = mode->htotal - mode->hsync_end;
995         hblanke = hsynce + hbackp;
996         hfrontp = mode->hsync_start - mode->hdisplay;
997         hblanks = mode->htotal - hfrontp - 1;
998
999         vactive = mode->vtotal * vscan / ilace;
1000         vsynce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
1001         vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
1002         vblanke = vsynce + vbackp;
1003         vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
1004         vblanks = vactive - vfrontp - 1;
1005         if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
1006                 vblan2e = vactive + vsynce + vbackp;
1007                 vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
1008                 vactive = (vactive * 2) + 1;
1009         }
1010
1011         ret = nvd0_crtc_swap_fbs(crtc, old_fb);
1012         if (ret)
1013                 return ret;
1014
1015         push = evo_wait(mast, 64);
1016         if (push) {
1017                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1018                         evo_mthd(push, 0x0804 + (nv_crtc->index * 0x400), 2);
1019                         evo_data(push, 0x00800000 | mode->clock);
1020                         evo_data(push, (ilace == 2) ? 2 : 0);
1021                         evo_mthd(push, 0x0810 + (nv_crtc->index * 0x400), 6);
1022                         evo_data(push, 0x00000000);
1023                         evo_data(push, (vactive << 16) | hactive);
1024                         evo_data(push, ( vsynce << 16) | hsynce);
1025                         evo_data(push, (vblanke << 16) | hblanke);
1026                         evo_data(push, (vblanks << 16) | hblanks);
1027                         evo_data(push, (vblan2e << 16) | vblan2s);
1028                         evo_mthd(push, 0x082c + (nv_crtc->index * 0x400), 1);
1029                         evo_data(push, 0x00000000);
1030                         evo_mthd(push, 0x0900 + (nv_crtc->index * 0x400), 2);
1031                         evo_data(push, 0x00000311);
1032                         evo_data(push, 0x00000100);
1033                 } else {
1034                         evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 6);
1035                         evo_data(push, 0x00000000);
1036                         evo_data(push, (vactive << 16) | hactive);
1037                         evo_data(push, ( vsynce << 16) | hsynce);
1038                         evo_data(push, (vblanke << 16) | hblanke);
1039                         evo_data(push, (vblanks << 16) | hblanks);
1040                         evo_data(push, (vblan2e << 16) | vblan2s);
1041                         evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
1042                         evo_data(push, 0x00000000); /* ??? */
1043                         evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
1044                         evo_data(push, mode->clock * 1000);
1045                         evo_data(push, 0x00200000); /* ??? */
1046                         evo_data(push, mode->clock * 1000);
1047                         evo_mthd(push, 0x04d0 + (nv_crtc->index * 0x300), 2);
1048                         evo_data(push, 0x00000311);
1049                         evo_data(push, 0x00000100);
1050                 }
1051
1052                 evo_kick(push, mast);
1053         }
1054
1055         nv_connector = nouveau_crtc_connector_get(nv_crtc);
1056         nvd0_crtc_set_dither(nv_crtc, false);
1057         nvd0_crtc_set_scale(nv_crtc, false);
1058         nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
1059         return 0;
1060 }
1061
1062 static int
1063 nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
1064                         struct drm_framebuffer *old_fb)
1065 {
1066         struct nouveau_drm *drm = nouveau_drm(crtc->dev);
1067         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1068         int ret;
1069
1070         if (!crtc->fb) {
1071                 NV_DEBUG(drm, "No FB bound\n");
1072                 return 0;
1073         }
1074
1075         ret = nvd0_crtc_swap_fbs(crtc, old_fb);
1076         if (ret)
1077                 return ret;
1078
1079         nvd0_display_flip_stop(crtc);
1080         nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
1081         nvd0_display_flip_next(crtc, crtc->fb, NULL, 1);
1082         return 0;
1083 }
1084
1085 static int
1086 nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
1087                                struct drm_framebuffer *fb, int x, int y,
1088                                enum mode_set_atomic state)
1089 {
1090         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1091         nvd0_display_flip_stop(crtc);
1092         nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
1093         return 0;
1094 }
1095
1096 static void
1097 nvd0_crtc_lut_load(struct drm_crtc *crtc)
1098 {
1099         struct nvd0_disp *disp = nvd0_disp(crtc->dev);
1100         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1101         void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
1102         int i;
1103
1104         for (i = 0; i < 256; i++) {
1105                 u16 r = nv_crtc->lut.r[i] >> 2;
1106                 u16 g = nv_crtc->lut.g[i] >> 2;
1107                 u16 b = nv_crtc->lut.b[i] >> 2;
1108
1109                 if (nv_mclass(disp->core) < NVD0_DISP_CLASS) {
1110                         writew(r + 0x0000, lut + (i * 0x08) + 0);
1111                         writew(g + 0x0000, lut + (i * 0x08) + 2);
1112                         writew(b + 0x0000, lut + (i * 0x08) + 4);
1113                 } else {
1114                         writew(r + 0x6000, lut + (i * 0x20) + 0);
1115                         writew(g + 0x6000, lut + (i * 0x20) + 2);
1116                         writew(b + 0x6000, lut + (i * 0x20) + 4);
1117                 }
1118         }
1119 }
1120
1121 static int
1122 nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
1123                      uint32_t handle, uint32_t width, uint32_t height)
1124 {
1125         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1126         struct drm_device *dev = crtc->dev;
1127         struct drm_gem_object *gem;
1128         struct nouveau_bo *nvbo;
1129         bool visible = (handle != 0);
1130         int i, ret = 0;
1131
1132         if (visible) {
1133                 if (width != 64 || height != 64)
1134                         return -EINVAL;
1135
1136                 gem = drm_gem_object_lookup(dev, file_priv, handle);
1137                 if (unlikely(!gem))
1138                         return -ENOENT;
1139                 nvbo = nouveau_gem_object(gem);
1140
1141                 ret = nouveau_bo_map(nvbo);
1142                 if (ret == 0) {
1143                         for (i = 0; i < 64 * 64; i++) {
1144                                 u32 v = nouveau_bo_rd32(nvbo, i);
1145                                 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
1146                         }
1147                         nouveau_bo_unmap(nvbo);
1148                 }
1149
1150                 drm_gem_object_unreference_unlocked(gem);
1151         }
1152
1153         if (visible != nv_crtc->cursor.visible) {
1154                 nvd0_crtc_cursor_show_hide(nv_crtc, visible, true);
1155                 nv_crtc->cursor.visible = visible;
1156         }
1157
1158         return ret;
1159 }
1160
1161 static int
1162 nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
1163 {
1164         struct nvd0_curs *curs = nvd0_curs(crtc);
1165         struct nvd0_chan *chan = nvd0_chan(curs);
1166         nv_wo32(chan->user, 0x0084, (y << 16) | (x & 0xffff));
1167         nv_wo32(chan->user, 0x0080, 0x00000000);
1168         return 0;
1169 }
1170
1171 static void
1172 nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
1173                     uint32_t start, uint32_t size)
1174 {
1175         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1176         u32 end = max(start + size, (u32)256);
1177         u32 i;
1178
1179         for (i = start; i < end; i++) {
1180                 nv_crtc->lut.r[i] = r[i];
1181                 nv_crtc->lut.g[i] = g[i];
1182                 nv_crtc->lut.b[i] = b[i];
1183         }
1184
1185         nvd0_crtc_lut_load(crtc);
1186 }
1187
1188 static void
1189 nvd0_crtc_destroy(struct drm_crtc *crtc)
1190 {
1191         struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
1192         struct nvd0_disp *disp = nvd0_disp(crtc->dev);
1193         struct nvd0_head *head = nvd0_head(crtc);
1194         nvd0_dmac_destroy(disp->core, &head->ovly.base);
1195         nvd0_pioc_destroy(disp->core, &head->oimm.base);
1196         nvd0_dmac_destroy(disp->core, &head->sync.base);
1197         nvd0_pioc_destroy(disp->core, &head->curs.base);
1198         nouveau_bo_unmap(nv_crtc->cursor.nvbo);
1199         nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
1200         nouveau_bo_unmap(nv_crtc->lut.nvbo);
1201         nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
1202         drm_crtc_cleanup(crtc);
1203         kfree(crtc);
1204 }
1205
1206 static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
1207         .dpms = nvd0_crtc_dpms,
1208         .prepare = nvd0_crtc_prepare,
1209         .commit = nvd0_crtc_commit,
1210         .mode_fixup = nvd0_crtc_mode_fixup,
1211         .mode_set = nvd0_crtc_mode_set,
1212         .mode_set_base = nvd0_crtc_mode_set_base,
1213         .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
1214         .load_lut = nvd0_crtc_lut_load,
1215 };
1216
1217 static const struct drm_crtc_funcs nvd0_crtc_func = {
1218         .cursor_set = nvd0_crtc_cursor_set,
1219         .cursor_move = nvd0_crtc_cursor_move,
1220         .gamma_set = nvd0_crtc_gamma_set,
1221         .set_config = drm_crtc_helper_set_config,
1222         .destroy = nvd0_crtc_destroy,
1223         .page_flip = nouveau_crtc_page_flip,
1224 };
1225
1226 static void
1227 nvd0_cursor_set_pos(struct nouveau_crtc *nv_crtc, int x, int y)
1228 {
1229 }
1230
1231 static void
1232 nvd0_cursor_set_offset(struct nouveau_crtc *nv_crtc, uint32_t offset)
1233 {
1234 }
1235
1236 static int
1237 nvd0_crtc_create(struct drm_device *dev, struct nouveau_object *core, int index)
1238 {
1239         struct nvd0_disp *disp = nvd0_disp(dev);
1240         struct nvd0_head *head;
1241         struct drm_crtc *crtc;
1242         int ret, i;
1243
1244         head = kzalloc(sizeof(*head), GFP_KERNEL);
1245         if (!head)
1246                 return -ENOMEM;
1247
1248         head->base.index = index;
1249         head->base.set_dither = nvd0_crtc_set_dither;
1250         head->base.set_scale = nvd0_crtc_set_scale;
1251         head->base.cursor.set_offset = nvd0_cursor_set_offset;
1252         head->base.cursor.set_pos = nvd0_cursor_set_pos;
1253         for (i = 0; i < 256; i++) {
1254                 head->base.lut.r[i] = i << 8;
1255                 head->base.lut.g[i] = i << 8;
1256                 head->base.lut.b[i] = i << 8;
1257         }
1258
1259         crtc = &head->base.base;
1260         drm_crtc_init(dev, crtc, &nvd0_crtc_func);
1261         drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
1262         drm_mode_crtc_set_gamma_size(crtc, 256);
1263
1264         ret = nouveau_bo_new(dev, 8192, 0x100, TTM_PL_FLAG_VRAM,
1265                              0, 0x0000, NULL, &head->base.lut.nvbo);
1266         if (!ret) {
1267                 ret = nouveau_bo_pin(head->base.lut.nvbo, TTM_PL_FLAG_VRAM);
1268                 if (!ret)
1269                         ret = nouveau_bo_map(head->base.lut.nvbo);
1270                 if (ret)
1271                         nouveau_bo_ref(NULL, &head->base.lut.nvbo);
1272         }
1273
1274         if (ret)
1275                 goto out;
1276
1277         nvd0_crtc_lut_load(crtc);
1278
1279         /* allocate cursor resources */
1280         ret = nvd0_pioc_create(disp->core, NV50_DISP_CURS_CLASS, index,
1281                               &(struct nv50_display_curs_class) {
1282                                         .head = index,
1283                               }, sizeof(struct nv50_display_curs_class),
1284                               &head->curs.base);
1285         if (ret)
1286                 goto out;
1287
1288         ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
1289                              0, 0x0000, NULL, &head->base.cursor.nvbo);
1290         if (!ret) {
1291                 ret = nouveau_bo_pin(head->base.cursor.nvbo, TTM_PL_FLAG_VRAM);
1292                 if (!ret)
1293                         ret = nouveau_bo_map(head->base.cursor.nvbo);
1294                 if (ret)
1295                         nouveau_bo_ref(NULL, &head->base.cursor.nvbo);
1296         }
1297
1298         if (ret)
1299                 goto out;
1300
1301         /* allocate page flip / sync resources */
1302         ret = nvd0_dmac_create(disp->core, NV50_DISP_SYNC_CLASS, index,
1303                               &(struct nv50_display_sync_class) {
1304                                         .pushbuf = EVO_PUSH_HANDLE(SYNC, index),
1305                                         .head = index,
1306                               }, sizeof(struct nv50_display_sync_class),
1307                               disp->sync->bo.offset, &head->sync.base);
1308         if (ret)
1309                 goto out;
1310
1311         head->sync.sem.offset = EVO_SYNC(1 + index, 0x00);
1312
1313         /* allocate overlay resources */
1314         ret = nvd0_pioc_create(disp->core, NV50_DISP_OIMM_CLASS, index,
1315                               &(struct nv50_display_oimm_class) {
1316                                         .head = index,
1317                               }, sizeof(struct nv50_display_oimm_class),
1318                               &head->oimm.base);
1319         if (ret)
1320                 goto out;
1321
1322         ret = nvd0_dmac_create(disp->core, NV50_DISP_OVLY_CLASS, index,
1323                               &(struct nv50_display_ovly_class) {
1324                                         .pushbuf = EVO_PUSH_HANDLE(OVLY, index),
1325                                         .head = index,
1326                               }, sizeof(struct nv50_display_ovly_class),
1327                               disp->sync->bo.offset, &head->ovly.base);
1328         if (ret)
1329                 goto out;
1330
1331 out:
1332         if (ret)
1333                 nvd0_crtc_destroy(crtc);
1334         return ret;
1335 }
1336
1337 /******************************************************************************
1338  * DAC
1339  *****************************************************************************/
1340 static void
1341 nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
1342 {
1343         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1344         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1345         int or = nv_encoder->or;
1346         u32 dpms_ctrl;
1347
1348         dpms_ctrl = 0x00000000;
1349         if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
1350                 dpms_ctrl |= 0x00000001;
1351         if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
1352                 dpms_ctrl |= 0x00000004;
1353
1354         nv_call(disp->core, NV50_DISP_DAC_PWR + or, dpms_ctrl);
1355 }
1356
1357 static bool
1358 nvd0_dac_mode_fixup(struct drm_encoder *encoder,
1359                     const struct drm_display_mode *mode,
1360                     struct drm_display_mode *adjusted_mode)
1361 {
1362         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1363         struct nouveau_connector *nv_connector;
1364
1365         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1366         if (nv_connector && nv_connector->native_mode) {
1367                 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1368                         int id = adjusted_mode->base.id;
1369                         *adjusted_mode = *nv_connector->native_mode;
1370                         adjusted_mode->base.id = id;
1371                 }
1372         }
1373
1374         return true;
1375 }
1376
1377 static void
1378 nvd0_dac_commit(struct drm_encoder *encoder)
1379 {
1380 }
1381
1382 static void
1383 nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
1384                   struct drm_display_mode *adjusted_mode)
1385 {
1386         struct nvd0_mast *mast = nvd0_mast(encoder->dev);
1387         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1388         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1389         u32 *push;
1390
1391         nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
1392
1393         push = evo_wait(mast, 8);
1394         if (push) {
1395                 if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1396                         u32 syncs = 0x00000000;
1397
1398                         if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1399                                 syncs |= 0x00000001;
1400                         if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1401                                 syncs |= 0x00000002;
1402
1403                         evo_mthd(push, 0x0400 + (nv_encoder->or * 0x080), 2);
1404                         evo_data(push, 1 << nv_crtc->index);
1405                         evo_data(push, syncs);
1406                 } else {
1407                         u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1408                         u32 syncs = 0x00000001;
1409
1410                         if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1411                                 syncs |= 0x00000008;
1412                         if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1413                                 syncs |= 0x00000010;
1414
1415                         if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1416                                 magic |= 0x00000001;
1417
1418                         evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1419                         evo_data(push, syncs);
1420                         evo_data(push, magic);
1421                         evo_mthd(push, 0x0180 + (nv_encoder->or * 0x020), 1);
1422                         evo_data(push, 1 << nv_crtc->index);
1423                 }
1424
1425                 evo_kick(push, mast);
1426         }
1427
1428         nv_encoder->crtc = encoder->crtc;
1429 }
1430
1431 static void
1432 nvd0_dac_disconnect(struct drm_encoder *encoder)
1433 {
1434         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1435         struct nvd0_mast *mast = nvd0_mast(encoder->dev);
1436         const int or = nv_encoder->or;
1437         u32 *push;
1438
1439         if (nv_encoder->crtc) {
1440                 nvd0_crtc_prepare(nv_encoder->crtc);
1441
1442                 push = evo_wait(mast, 4);
1443                 if (push) {
1444                         if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1445                                 evo_mthd(push, 0x0400 + (or * 0x080), 1);
1446                                 evo_data(push, 0x00000000);
1447                         } else {
1448                                 evo_mthd(push, 0x0180 + (or * 0x020), 1);
1449                                 evo_data(push, 0x00000000);
1450                         }
1451
1452                         evo_mthd(push, 0x0080, 1);
1453                         evo_data(push, 0x00000000);
1454                         evo_kick(push, mast);
1455                 }
1456         }
1457
1458         nv_encoder->crtc = NULL;
1459 }
1460
1461 static enum drm_connector_status
1462 nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
1463 {
1464         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1465         int ret, or = nouveau_encoder(encoder)->or;
1466         u32 load = 0;
1467
1468         ret = nv_exec(disp->core, NV50_DISP_DAC_LOAD + or, &load, sizeof(load));
1469         if (ret || load != 7)
1470                 return connector_status_disconnected;
1471
1472         return connector_status_connected;
1473 }
1474
1475 static void
1476 nvd0_dac_destroy(struct drm_encoder *encoder)
1477 {
1478         drm_encoder_cleanup(encoder);
1479         kfree(encoder);
1480 }
1481
1482 static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
1483         .dpms = nvd0_dac_dpms,
1484         .mode_fixup = nvd0_dac_mode_fixup,
1485         .prepare = nvd0_dac_disconnect,
1486         .commit = nvd0_dac_commit,
1487         .mode_set = nvd0_dac_mode_set,
1488         .disable = nvd0_dac_disconnect,
1489         .get_crtc = nvd0_display_crtc_get,
1490         .detect = nvd0_dac_detect
1491 };
1492
1493 static const struct drm_encoder_funcs nvd0_dac_func = {
1494         .destroy = nvd0_dac_destroy,
1495 };
1496
1497 static int
1498 nvd0_dac_create(struct drm_connector *connector, struct dcb_output *dcbe)
1499 {
1500         struct drm_device *dev = connector->dev;
1501         struct nouveau_encoder *nv_encoder;
1502         struct drm_encoder *encoder;
1503
1504         nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1505         if (!nv_encoder)
1506                 return -ENOMEM;
1507         nv_encoder->dcb = dcbe;
1508         nv_encoder->or = ffs(dcbe->or) - 1;
1509
1510         encoder = to_drm_encoder(nv_encoder);
1511         encoder->possible_crtcs = dcbe->heads;
1512         encoder->possible_clones = 0;
1513         drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
1514         drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
1515
1516         drm_mode_connector_attach_encoder(connector, encoder);
1517         return 0;
1518 }
1519
1520 /******************************************************************************
1521  * Audio
1522  *****************************************************************************/
1523 static void
1524 nvd0_audio_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1525 {
1526         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1527         struct nouveau_connector *nv_connector;
1528         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1529
1530         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1531         if (!drm_detect_monitor_audio(nv_connector->edid))
1532                 return;
1533
1534         drm_edid_to_eld(&nv_connector->base, nv_connector->edid);
1535
1536         nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or,
1537                             nv_connector->base.eld,
1538                             nv_connector->base.eld[2] * 4);
1539 }
1540
1541 static void
1542 nvd0_audio_disconnect(struct drm_encoder *encoder)
1543 {
1544         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1545         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1546
1547         nv_exec(disp->core, NVA3_DISP_SOR_HDA_ELD + nv_encoder->or, NULL, 0);
1548 }
1549
1550 /******************************************************************************
1551  * HDMI
1552  *****************************************************************************/
1553 static void
1554 nvd0_hdmi_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode)
1555 {
1556         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1557         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1558         struct nouveau_connector *nv_connector;
1559         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1560         const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1561         u32 rekey = 56; /* binary driver, and tegra constant */
1562         u32 max_ac_packet;
1563
1564         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1565         if (!drm_detect_hdmi_monitor(nv_connector->edid))
1566                 return;
1567
1568         max_ac_packet  = mode->htotal - mode->hdisplay;
1569         max_ac_packet -= rekey;
1570         max_ac_packet -= 18; /* constant from tegra */
1571         max_ac_packet /= 32;
1572
1573         nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff,
1574                             NV84_DISP_SOR_HDMI_PWR_STATE_ON |
1575                             (max_ac_packet << 16) | rekey);
1576
1577         nvd0_audio_mode_set(encoder, mode);
1578 }
1579
1580 static void
1581 nvd0_hdmi_disconnect(struct drm_encoder *encoder)
1582 {
1583         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1584         struct nouveau_crtc *nv_crtc = nouveau_crtc(nv_encoder->crtc);
1585         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1586         const u32 moff = (nv_crtc->index << 3) | nv_encoder->or;
1587
1588         nvd0_audio_disconnect(encoder);
1589
1590         nv_call(disp->core, NV84_DISP_SOR_HDMI_PWR + moff, 0x00000000);
1591 }
1592
1593 /******************************************************************************
1594  * SOR
1595  *****************************************************************************/
1596 static void
1597 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
1598 {
1599         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1600         struct drm_device *dev = encoder->dev;
1601         struct nvd0_disp *disp = nvd0_disp(dev);
1602         struct drm_encoder *partner;
1603         int or = nv_encoder->or;
1604
1605         nv_encoder->last_dpms = mode;
1606
1607         list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
1608                 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
1609
1610                 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
1611                         continue;
1612
1613                 if (nv_partner != nv_encoder &&
1614                     nv_partner->dcb->or == nv_encoder->dcb->or) {
1615                         if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
1616                                 return;
1617                         break;
1618                 }
1619         }
1620
1621         nv_call(disp->core, NV50_DISP_SOR_PWR + or, (mode == DRM_MODE_DPMS_ON));
1622
1623         if (nv_encoder->dcb->type == DCB_OUTPUT_DP)
1624                 nouveau_dp_dpms(encoder, mode, nv_encoder->dp.datarate, disp->core);
1625 }
1626
1627 static bool
1628 nvd0_sor_mode_fixup(struct drm_encoder *encoder,
1629                     const struct drm_display_mode *mode,
1630                     struct drm_display_mode *adjusted_mode)
1631 {
1632         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1633         struct nouveau_connector *nv_connector;
1634
1635         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1636         if (nv_connector && nv_connector->native_mode) {
1637                 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
1638                         int id = adjusted_mode->base.id;
1639                         *adjusted_mode = *nv_connector->native_mode;
1640                         adjusted_mode->base.id = id;
1641                 }
1642         }
1643
1644         return true;
1645 }
1646
1647 static void
1648 nvd0_sor_disconnect(struct drm_encoder *encoder)
1649 {
1650         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1651         struct nvd0_mast *mast = nvd0_mast(encoder->dev);
1652         const int or = nv_encoder->or;
1653         u32 *push;
1654
1655         if (nv_encoder->crtc) {
1656                 nvd0_crtc_prepare(nv_encoder->crtc);
1657
1658                 push = evo_wait(mast, 4);
1659                 if (push) {
1660                         if (nvd0_vers(mast) < NVD0_DISP_MAST_CLASS) {
1661                                 evo_mthd(push, 0x0600 + (or * 0x40), 1);
1662                                 evo_data(push, 0x00000000);
1663                         } else {
1664                                 evo_mthd(push, 0x0200 + (or * 0x20), 1);
1665                                 evo_data(push, 0x00000000);
1666                         }
1667
1668                         evo_mthd(push, 0x0080, 1);
1669                         evo_data(push, 0x00000000);
1670                         evo_kick(push, mast);
1671                 }
1672
1673                 nvd0_hdmi_disconnect(encoder);
1674         }
1675
1676         nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1677         nv_encoder->crtc = NULL;
1678 }
1679
1680 static void
1681 nvd0_sor_prepare(struct drm_encoder *encoder)
1682 {
1683         nvd0_sor_disconnect(encoder);
1684         if (nouveau_encoder(encoder)->dcb->type == DCB_OUTPUT_DP)
1685                 evo_sync(encoder->dev);
1686 }
1687
1688 static void
1689 nvd0_sor_commit(struct drm_encoder *encoder)
1690 {
1691 }
1692
1693 static void
1694 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *umode,
1695                   struct drm_display_mode *mode)
1696 {
1697         struct nvd0_disp *disp = nvd0_disp(encoder->dev);
1698         struct nvd0_mast *mast = nvd0_mast(encoder->dev);
1699         struct drm_device *dev = encoder->dev;
1700         struct nouveau_drm *drm = nouveau_drm(dev);
1701         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
1702         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
1703         struct nouveau_connector *nv_connector;
1704         struct nvbios *bios = &drm->vbios;
1705         u32 *push, lvds = 0;
1706         u8 owner = 1 << nv_crtc->index;
1707         u8 proto = 0xf;
1708         u8 depth = 0x0;
1709
1710         nv_connector = nouveau_encoder_connector_get(nv_encoder);
1711         switch (nv_encoder->dcb->type) {
1712         case DCB_OUTPUT_TMDS:
1713                 if (nv_encoder->dcb->sorconf.link & 1) {
1714                         if (mode->clock < 165000)
1715                                 proto = 0x1;
1716                         else
1717                                 proto = 0x5;
1718                 } else {
1719                         proto = 0x2;
1720                 }
1721
1722                 nvd0_hdmi_mode_set(encoder, mode);
1723                 break;
1724         case DCB_OUTPUT_LVDS:
1725                 proto = 0x0;
1726
1727                 if (bios->fp_no_ddc) {
1728                         if (bios->fp.dual_link)
1729                                 lvds |= 0x0100;
1730                         if (bios->fp.if_is_24bit)
1731                                 lvds |= 0x0200;
1732                 } else {
1733                         if (nv_connector->type == DCB_CONNECTOR_LVDS_SPWG) {
1734                                 if (((u8 *)nv_connector->edid)[121] == 2)
1735                                         lvds |= 0x0100;
1736                         } else
1737                         if (mode->clock >= bios->fp.duallink_transition_clk) {
1738                                 lvds |= 0x0100;
1739                         }
1740
1741                         if (lvds & 0x0100) {
1742                                 if (bios->fp.strapless_is_24bit & 2)
1743                                         lvds |= 0x0200;
1744                         } else {
1745                                 if (bios->fp.strapless_is_24bit & 1)
1746                                         lvds |= 0x0200;
1747                         }
1748
1749                         if (nv_connector->base.display_info.bpc == 8)
1750                                 lvds |= 0x0200;
1751                 }
1752
1753                 nv_call(disp->core, NV50_DISP_SOR_LVDS_SCRIPT + nv_encoder->or, lvds);
1754                 break;
1755         case DCB_OUTPUT_DP:
1756                 if (nv_connector->base.display_info.bpc == 6) {
1757                         nv_encoder->dp.datarate = mode->clock * 18 / 8;
1758                         depth = 0x2;
1759                 } else {
1760                         nv_encoder->dp.datarate = mode->clock * 24 / 8;
1761                         depth = 0x5;
1762                 }
1763
1764                 if (nv_encoder->dcb->sorconf.link & 1)
1765                         proto = 0x8;
1766                 else
1767                         proto = 0x9;
1768                 break;
1769         default:
1770                 BUG_ON(1);
1771                 break;
1772         }
1773
1774         nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
1775
1776         push = evo_wait(nvd0_mast(dev), 8);
1777         if (push) {
1778                 if (nvd0_vers(mast) < NVD0_DISP_CLASS) {
1779                         evo_mthd(push, 0x0600 + (nv_encoder->or * 0x040), 1);
1780                         evo_data(push, (depth << 16) | (proto << 8) | owner);
1781                 } else {
1782                         u32 magic = 0x31ec6000 | (nv_crtc->index << 25);
1783                         u32 syncs = 0x00000001;
1784
1785                         if (mode->flags & DRM_MODE_FLAG_NHSYNC)
1786                                 syncs |= 0x00000008;
1787                         if (mode->flags & DRM_MODE_FLAG_NVSYNC)
1788                                 syncs |= 0x00000010;
1789
1790                         if (mode->flags & DRM_MODE_FLAG_INTERLACE)
1791                                 magic |= 0x00000001;
1792
1793                         evo_mthd(push, 0x0404 + (nv_crtc->index * 0x300), 2);
1794                         evo_data(push, syncs | (depth << 6));
1795                         evo_data(push, magic);
1796                         evo_mthd(push, 0x0200 + (nv_encoder->or * 0x020), 1);
1797                         evo_data(push, owner | (proto << 8));
1798                 }
1799
1800                 evo_kick(push, mast);
1801         }
1802
1803         nv_encoder->crtc = encoder->crtc;
1804 }
1805
1806 static void
1807 nvd0_sor_destroy(struct drm_encoder *encoder)
1808 {
1809         drm_encoder_cleanup(encoder);
1810         kfree(encoder);
1811 }
1812
1813 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
1814         .dpms = nvd0_sor_dpms,
1815         .mode_fixup = nvd0_sor_mode_fixup,
1816         .prepare = nvd0_sor_prepare,
1817         .commit = nvd0_sor_commit,
1818         .mode_set = nvd0_sor_mode_set,
1819         .disable = nvd0_sor_disconnect,
1820         .get_crtc = nvd0_display_crtc_get,
1821 };
1822
1823 static const struct drm_encoder_funcs nvd0_sor_func = {
1824         .destroy = nvd0_sor_destroy,
1825 };
1826
1827 static int
1828 nvd0_sor_create(struct drm_connector *connector, struct dcb_output *dcbe)
1829 {
1830         struct drm_device *dev = connector->dev;
1831         struct nouveau_encoder *nv_encoder;
1832         struct drm_encoder *encoder;
1833
1834         nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
1835         if (!nv_encoder)
1836                 return -ENOMEM;
1837         nv_encoder->dcb = dcbe;
1838         nv_encoder->or = ffs(dcbe->or) - 1;
1839         nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
1840
1841         encoder = to_drm_encoder(nv_encoder);
1842         encoder->possible_crtcs = dcbe->heads;
1843         encoder->possible_clones = 0;
1844         drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
1845         drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
1846
1847         drm_mode_connector_attach_encoder(connector, encoder);
1848         return 0;
1849 }
1850
1851 /******************************************************************************
1852  * Init
1853  *****************************************************************************/
1854 void
1855 nvd0_display_fini(struct drm_device *dev)
1856 {
1857 }
1858
1859 int
1860 nvd0_display_init(struct drm_device *dev)
1861 {
1862         u32 *push = evo_wait(nvd0_mast(dev), 32);
1863         if (push) {
1864                 evo_mthd(push, 0x0088, 1);
1865                 evo_data(push, NvEvoSync);
1866                 evo_mthd(push, 0x0084, 1);
1867                 evo_data(push, 0x00000000);
1868                 evo_mthd(push, 0x0084, 1);
1869                 evo_data(push, 0x80000000);
1870                 evo_mthd(push, 0x008c, 1);
1871                 evo_data(push, 0x00000000);
1872                 evo_kick(push, nvd0_mast(dev));
1873                 return 0;
1874         }
1875
1876         return -EBUSY;
1877 }
1878
1879 void
1880 nvd0_display_destroy(struct drm_device *dev)
1881 {
1882         struct nvd0_disp *disp = nvd0_disp(dev);
1883
1884         nvd0_dmac_destroy(disp->core, &disp->mast.base);
1885
1886         nouveau_bo_unmap(disp->sync);
1887         nouveau_bo_ref(NULL, &disp->sync);
1888
1889         nouveau_display(dev)->priv = NULL;
1890         kfree(disp);
1891 }
1892
1893 int
1894 nvd0_display_create(struct drm_device *dev)
1895 {
1896         static const u16 oclass[] = {
1897                 NVE0_DISP_CLASS,
1898                 NVD0_DISP_CLASS,
1899         };
1900         struct nouveau_device *device = nouveau_dev(dev);
1901         struct nouveau_drm *drm = nouveau_drm(dev);
1902         struct dcb_table *dcb = &drm->vbios.dcb;
1903         struct drm_connector *connector, *tmp;
1904         struct nvd0_disp *disp;
1905         struct dcb_output *dcbe;
1906         int crtcs, ret, i;
1907
1908         disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1909         if (!disp)
1910                 return -ENOMEM;
1911
1912         nouveau_display(dev)->priv = disp;
1913         nouveau_display(dev)->dtor = nvd0_display_destroy;
1914         nouveau_display(dev)->init = nvd0_display_init;
1915         nouveau_display(dev)->fini = nvd0_display_fini;
1916
1917         /* small shared memory area we use for notifiers and semaphores */
1918         ret = nouveau_bo_new(dev, 4096, 0x1000, TTM_PL_FLAG_VRAM,
1919                              0, 0x0000, NULL, &disp->sync);
1920         if (!ret) {
1921                 ret = nouveau_bo_pin(disp->sync, TTM_PL_FLAG_VRAM);
1922                 if (!ret)
1923                         ret = nouveau_bo_map(disp->sync);
1924                 if (ret)
1925                         nouveau_bo_ref(NULL, &disp->sync);
1926         }
1927
1928         if (ret)
1929                 goto out;
1930
1931         /* attempt to allocate a supported evo display class */
1932         ret = -ENODEV;
1933         for (i = 0; ret && i < ARRAY_SIZE(oclass); i++) {
1934                 ret = nouveau_object_new(nv_object(drm), NVDRM_DEVICE,
1935                                          0xd1500000, oclass[i], NULL, 0,
1936                                          &disp->core);
1937         }
1938
1939         if (ret)
1940                 goto out;
1941
1942         /* allocate master evo channel */
1943         ret = nvd0_dmac_create(disp->core, NV50_DISP_MAST_CLASS, 0,
1944                               &(struct nv50_display_mast_class) {
1945                                         .pushbuf = EVO_PUSH_HANDLE(MAST, 0),
1946                               }, sizeof(struct nv50_display_mast_class),
1947                               disp->sync->bo.offset, &disp->mast.base);
1948         if (ret)
1949                 goto out;
1950
1951         /* create crtc objects to represent the hw heads */
1952         crtcs = nv_rd32(device, 0x022448);
1953         for (i = 0; i < crtcs; i++) {
1954                 ret = nvd0_crtc_create(dev, disp->core, i);
1955                 if (ret)
1956                         goto out;
1957         }
1958
1959         /* create encoder/connector objects based on VBIOS DCB table */
1960         for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1961                 connector = nouveau_connector_create(dev, dcbe->connector);
1962                 if (IS_ERR(connector))
1963                         continue;
1964
1965                 if (dcbe->location != DCB_LOC_ON_CHIP) {
1966                         NV_WARN(drm, "skipping off-chip encoder %d/%d\n",
1967                                 dcbe->type, ffs(dcbe->or) - 1);
1968                         continue;
1969                 }
1970
1971                 switch (dcbe->type) {
1972                 case DCB_OUTPUT_TMDS:
1973                 case DCB_OUTPUT_LVDS:
1974                 case DCB_OUTPUT_DP:
1975                         nvd0_sor_create(connector, dcbe);
1976                         break;
1977                 case DCB_OUTPUT_ANALOG:
1978                         nvd0_dac_create(connector, dcbe);
1979                         break;
1980                 default:
1981                         NV_WARN(drm, "skipping unsupported encoder %d/%d\n",
1982                                 dcbe->type, ffs(dcbe->or) - 1);
1983                         continue;
1984                 }
1985         }
1986
1987         /* cull any connectors we created that don't have an encoder */
1988         list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1989                 if (connector->encoder_ids[0])
1990                         continue;
1991
1992                 NV_WARN(drm, "%s has no encoders, removing\n",
1993                         drm_get_connector_name(connector));
1994                 connector->funcs->destroy(connector);
1995         }
1996
1997 out:
1998         if (ret)
1999                 nvd0_display_destroy(dev);
2000         return ret;
2001 }