3a2a4bb1276de153abb3fbdab4a96b4bb260a846
[firefly-linux-kernel-4.4.55.git] / drivers / gpu / drm / nouveau / nvd0_display.c
1 /*
2  * Copyright 2011 Red Hat Inc.
3  *
4  * Permission is hereby granted, free of charge, to any person obtaining a
5  * copy of this software and associated documentation files (the "Software"),
6  * to deal in the Software without restriction, including without limitation
7  * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8  * and/or sell copies of the Software, and to permit persons to whom the
9  * Software is furnished to do so, subject to the following conditions:
10  *
11  * The above copyright notice and this permission notice shall be included in
12  * all copies or substantial portions of the Software.
13  *
14  * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15  * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16  * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
17  * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18  * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19  * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20  * OTHER DEALINGS IN THE SOFTWARE.
21  *
22  * Authors: Ben Skeggs
23  */
24
25 #include <linux/dma-mapping.h>
26
27 #include "drmP.h"
28 #include "drm_crtc_helper.h"
29
30 #include "nouveau_drv.h"
31 #include "nouveau_connector.h"
32 #include "nouveau_encoder.h"
33 #include "nouveau_crtc.h"
34
35 #define MEM_SYNC 0xe0000001
36 #define MEM_VRAM 0xe0010000
37
38 struct nvd0_display {
39         struct nouveau_gpuobj *mem;
40         struct {
41                 dma_addr_t handle;
42                 u32 *ptr;
43         } evo[1];
44 };
45
46 static struct nvd0_display *
47 nvd0_display(struct drm_device *dev)
48 {
49         struct drm_nouveau_private *dev_priv = dev->dev_private;
50         return dev_priv->engine.display.priv;
51 }
52
53 static int
54 evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
55 {
56         int ret = 0;
57         nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
58         nv_wr32(dev, 0x610704 + (id * 0x10), data);
59         nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
60         if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
61                 ret = -EBUSY;
62         nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
63         return ret;
64 }
65
66 static u32 *
67 evo_wait(struct drm_device *dev, int id, int nr)
68 {
69         struct nvd0_display *disp = nvd0_display(dev);
70         u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
71
72         if (put + nr >= (PAGE_SIZE / 4)) {
73                 disp->evo[id].ptr[put] = 0x20000000;
74
75                 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
76                 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
77                         NV_ERROR(dev, "evo %d dma stalled\n", id);
78                         return NULL;
79                 }
80
81                 put = 0;
82         }
83
84         return disp->evo[id].ptr + put;
85 }
86
87 static void
88 evo_kick(u32 *push, struct drm_device *dev, int id)
89 {
90         struct nvd0_display *disp = nvd0_display(dev);
91         nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
92 }
93
94 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
95 #define evo_data(p,d)   *((p)++) = (d)
96
97 static struct drm_crtc *
98 nvd0_display_crtc_get(struct drm_encoder *encoder)
99 {
100         return nouveau_encoder(encoder)->crtc;
101 }
102
103 /******************************************************************************
104  * DAC
105  *****************************************************************************/
106
107 /******************************************************************************
108  * SOR
109  *****************************************************************************/
110 static void
111 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
112 {
113         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
114         struct drm_device *dev = encoder->dev;
115         struct drm_encoder *partner;
116         int or = nv_encoder->or;
117         u32 dpms_ctrl;
118
119         nv_encoder->last_dpms = mode;
120
121         list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
122                 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
123
124                 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
125                         continue;
126
127                 if (nv_partner != nv_encoder &&
128                     nv_partner->dcb->or == nv_encoder->or) {
129                         if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
130                                 return;
131                         break;
132                 }
133         }
134
135         dpms_ctrl  = (mode == DRM_MODE_DPMS_ON);
136         dpms_ctrl |= 0x80000000;
137
138         nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
139         nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
140         nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
141         nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
142 }
143
144 static bool
145 nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
146                     struct drm_display_mode *adjusted_mode)
147 {
148         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
149         struct nouveau_connector *nv_connector;
150
151         nv_connector = nouveau_encoder_connector_get(nv_encoder);
152         if (nv_connector && nv_connector->native_mode) {
153                 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
154                         int id = adjusted_mode->base.id;
155                         *adjusted_mode = *nv_connector->native_mode;
156                         adjusted_mode->base.id = id;
157                 }
158         }
159
160         return true;
161 }
162
163 static void
164 nvd0_sor_prepare(struct drm_encoder *encoder)
165 {
166 }
167
168 static void
169 nvd0_sor_commit(struct drm_encoder *encoder)
170 {
171 }
172
173 static void
174 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
175                   struct drm_display_mode *adjusted_mode)
176 {
177         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
178         struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
179         u32 mode_ctrl = (1 << nv_crtc->index);
180         u32 *push;
181
182         if (nv_encoder->dcb->sorconf.link & 1) {
183                 if (adjusted_mode->clock < 165000)
184                         mode_ctrl |= 0x00000100;
185                 else
186                         mode_ctrl |= 0x00000500;
187         } else {
188                 mode_ctrl |= 0x00000200;
189         }
190
191         nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
192
193         push = evo_wait(encoder->dev, 0, 2);
194         if (push) {
195                 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
196                 evo_data(push, mode_ctrl);
197         }
198
199         nv_encoder->crtc = encoder->crtc;
200 }
201
202 static void
203 nvd0_sor_disconnect(struct drm_encoder *encoder)
204 {
205         struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
206         struct drm_device *dev = encoder->dev;
207
208         if (nv_encoder->crtc) {
209                 u32 *push = evo_wait(dev, 0, 4);
210                 if (push) {
211                         evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
212                         evo_data(push, 0x00000000);
213                         evo_mthd(push, 0x0080, 1);
214                         evo_data(push, 0x00000000);
215                         evo_kick(push, dev, 0);
216                 }
217
218                 nv_encoder->crtc = NULL;
219                 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
220         }
221 }
222
223 static void
224 nvd0_sor_destroy(struct drm_encoder *encoder)
225 {
226         drm_encoder_cleanup(encoder);
227         kfree(encoder);
228 }
229
230 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
231         .dpms = nvd0_sor_dpms,
232         .mode_fixup = nvd0_sor_mode_fixup,
233         .prepare = nvd0_sor_prepare,
234         .commit = nvd0_sor_commit,
235         .mode_set = nvd0_sor_mode_set,
236         .disable = nvd0_sor_disconnect,
237         .get_crtc = nvd0_display_crtc_get,
238 };
239
240 static const struct drm_encoder_funcs nvd0_sor_func = {
241         .destroy = nvd0_sor_destroy,
242 };
243
244 static int
245 nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
246 {
247         struct drm_device *dev = connector->dev;
248         struct nouveau_encoder *nv_encoder;
249         struct drm_encoder *encoder;
250
251         nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
252         if (!nv_encoder)
253                 return -ENOMEM;
254         nv_encoder->dcb = dcbe;
255         nv_encoder->or = ffs(dcbe->or) - 1;
256         nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
257
258         encoder = to_drm_encoder(nv_encoder);
259         encoder->possible_crtcs = dcbe->heads;
260         encoder->possible_clones = 0;
261         drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
262         drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
263
264         drm_mode_connector_attach_encoder(connector, encoder);
265         return 0;
266 }
267
268 /******************************************************************************
269  * IRQ
270  *****************************************************************************/
271 static void
272 nvd0_display_intr(struct drm_device *dev)
273 {
274         u32 intr = nv_rd32(dev, 0x610088);
275
276         if (intr & 0x00000002) {
277                 u32 stat = nv_rd32(dev, 0x61009c);
278                 int chid = ffs(stat) - 1;
279                 if (chid >= 0) {
280                         u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
281                         u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
282                         u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
283
284                         NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
285                                      "0x%08x 0x%08x\n",
286                                 chid, (mthd & 0x0000ffc), data, mthd, unkn);
287                         nv_wr32(dev, 0x61009c, (1 << chid));
288                         nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
289                 }
290
291                 intr &= ~0x00000002;
292         }
293
294         if (intr & 0x01000000) {
295                 u32 stat = nv_rd32(dev, 0x6100bc);
296                 nv_wr32(dev, 0x6100bc, stat);
297                 intr &= ~0x01000000;
298         }
299
300         if (intr & 0x02000000) {
301                 u32 stat = nv_rd32(dev, 0x6108bc);
302                 nv_wr32(dev, 0x6108bc, stat);
303                 intr &= ~0x02000000;
304         }
305
306         if (intr)
307                 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
308 }
309
310 /******************************************************************************
311  * Init
312  *****************************************************************************/
313 static void
314 nvd0_display_fini(struct drm_device *dev)
315 {
316         int i;
317
318         /* fini cursors */
319         for (i = 14; i >= 13; i--) {
320                 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
321                         continue;
322
323                 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
324                 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
325                 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
326                 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
327         }
328
329         /* fini master */
330         if (nv_rd32(dev, 0x610490) & 0x00000010) {
331                 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
332                 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
333                 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
334                 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
335                 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
336         }
337 }
338
339 int
340 nvd0_display_init(struct drm_device *dev)
341 {
342         struct nvd0_display *disp = nvd0_display(dev);
343         u32 *push;
344         int i;
345
346         if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
347                 nv_wr32(dev, 0x6100ac, 0x00000100);
348                 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
349                 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
350                         NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
351                                  nv_rd32(dev, 0x6194e8));
352                         return -EBUSY;
353                 }
354         }
355
356         nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
357
358         /* init master */
359         nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
360         nv_wr32(dev, 0x610498, 0x00010000);
361         nv_wr32(dev, 0x61049c, 0x00000001);
362         nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
363         nv_wr32(dev, 0x640000, 0x00000000);
364         nv_wr32(dev, 0x610490, 0x01000013);
365         if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
366                 NV_ERROR(dev, "PDISP: master 0x%08x\n",
367                          nv_rd32(dev, 0x610490));
368                 return -EBUSY;
369         }
370         nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
371         nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
372
373         /* init cursors */
374         for (i = 13; i <= 14; i++) {
375                 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
376                 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
377                         NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
378                                  nv_rd32(dev, 0x610490 + (i * 0x10)));
379                         return -EBUSY;
380                 }
381
382                 nv_mask(dev, 0x610090, 1 << i, 1 << i);
383                 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
384         }
385
386         push = evo_wait(dev, 0, 32);
387         if (!push)
388                 return -EBUSY;
389         evo_mthd(push, 0x0088, 1);
390         evo_data(push, MEM_SYNC);
391         evo_mthd(push, 0x0084, 1);
392         evo_data(push, 0x00000000);
393         evo_mthd(push, 0x0084, 1);
394         evo_data(push, 0x80000000);
395         evo_mthd(push, 0x008c, 1);
396         evo_data(push, 0x00000000);
397         evo_kick(push, dev, 0);
398
399         return 0;
400 }
401
402 void
403 nvd0_display_destroy(struct drm_device *dev)
404 {
405         struct drm_nouveau_private *dev_priv = dev->dev_private;
406         struct nvd0_display *disp = nvd0_display(dev);
407         struct pci_dev *pdev = dev->pdev;
408
409         nvd0_display_fini(dev);
410
411         pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
412         nouveau_gpuobj_ref(NULL, &disp->mem);
413         nouveau_irq_unregister(dev, 26);
414
415         dev_priv->engine.display.priv = NULL;
416         kfree(disp);
417 }
418
419 int
420 nvd0_display_create(struct drm_device *dev)
421 {
422         struct drm_nouveau_private *dev_priv = dev->dev_private;
423         struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
424         struct dcb_table *dcb = &dev_priv->vbios.dcb;
425         struct drm_connector *connector, *tmp;
426         struct pci_dev *pdev = dev->pdev;
427         struct nvd0_display *disp;
428         struct dcb_entry *dcbe;
429         int ret, i;
430
431         disp = kzalloc(sizeof(*disp), GFP_KERNEL);
432         if (!disp)
433                 return -ENOMEM;
434         dev_priv->engine.display.priv = disp;
435
436         /* create encoder/connector objects based on VBIOS DCB table */
437         for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
438                 connector = nouveau_connector_create(dev, dcbe->connector);
439                 if (IS_ERR(connector))
440                         continue;
441
442                 if (dcbe->location != DCB_LOC_ON_CHIP) {
443                         NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
444                                 dcbe->type, ffs(dcbe->or) - 1);
445                         continue;
446                 }
447
448                 switch (dcbe->type) {
449                 case OUTPUT_TMDS:
450                         nvd0_sor_create(connector, dcbe);
451                         break;
452                 default:
453                         NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
454                                 dcbe->type, ffs(dcbe->or) - 1);
455                         continue;
456                 }
457         }
458
459         /* cull any connectors we created that don't have an encoder */
460         list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
461                 if (connector->encoder_ids[0])
462                         continue;
463
464                 NV_WARN(dev, "%s has no encoders, removing\n",
465                         drm_get_connector_name(connector));
466                 connector->funcs->destroy(connector);
467         }
468
469         /* setup interrupt handling */
470         nouveau_irq_register(dev, 26, nvd0_display_intr);
471
472         /* hash table and dma objects for the memory areas we care about */
473         ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
474                                  NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
475         if (ret)
476                 goto out;
477
478         nv_wo32(disp->mem, 0x1000, 0x00000049);
479         nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
480         nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
481         nv_wo32(disp->mem, 0x100c, 0x00000000);
482         nv_wo32(disp->mem, 0x1010, 0x00000000);
483         nv_wo32(disp->mem, 0x1014, 0x00000000);
484         nv_wo32(disp->mem, 0x0000, MEM_SYNC);
485         nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
486
487         nv_wo32(disp->mem, 0x1020, 0x00000009);
488         nv_wo32(disp->mem, 0x1024, 0x00000000);
489         nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
490         nv_wo32(disp->mem, 0x102c, 0x00000000);
491         nv_wo32(disp->mem, 0x1030, 0x00000000);
492         nv_wo32(disp->mem, 0x1034, 0x00000000);
493         nv_wo32(disp->mem, 0x0008, MEM_VRAM);
494         nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
495
496         pinstmem->flush(dev);
497
498         /* push buffers for evo channels */
499         disp->evo[0].ptr =
500                 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
501         if (!disp->evo[0].ptr) {
502                 ret = -ENOMEM;
503                 goto out;
504         }
505
506         ret = nvd0_display_init(dev);
507         if (ret)
508                 goto out;
509
510 out:
511         if (ret)
512                 nvd0_display_destroy(dev);
513         return ret;
514 }