2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <linux/dma-mapping.h>
28 #include "drm_crtc_helper.h"
30 #include "nouveau_drv.h"
31 #include "nouveau_connector.h"
32 #include "nouveau_encoder.h"
33 #include "nouveau_crtc.h"
35 #define MEM_SYNC 0xe0000001
36 #define MEM_VRAM 0xe0010000
39 struct nouveau_gpuobj *mem;
46 static struct nvd0_display *
47 nvd0_display(struct drm_device *dev)
49 struct drm_nouveau_private *dev_priv = dev->dev_private;
50 return dev_priv->engine.display.priv;
54 evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
57 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
58 nv_wr32(dev, 0x610704 + (id * 0x10), data);
59 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
60 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
62 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
67 evo_wait(struct drm_device *dev, int id, int nr)
69 struct nvd0_display *disp = nvd0_display(dev);
70 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
72 if (put + nr >= (PAGE_SIZE / 4)) {
73 disp->evo[id].ptr[put] = 0x20000000;
75 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
76 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
77 NV_ERROR(dev, "evo %d dma stalled\n", id);
84 return disp->evo[id].ptr + put;
88 evo_kick(u32 *push, struct drm_device *dev, int id)
90 struct nvd0_display *disp = nvd0_display(dev);
91 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
94 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
95 #define evo_data(p,d) *((p)++) = (d)
97 static struct drm_crtc *
98 nvd0_display_crtc_get(struct drm_encoder *encoder)
100 return nouveau_encoder(encoder)->crtc;
103 /******************************************************************************
105 *****************************************************************************/
107 /******************************************************************************
109 *****************************************************************************/
111 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
113 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
114 struct drm_device *dev = encoder->dev;
115 struct drm_encoder *partner;
116 int or = nv_encoder->or;
119 nv_encoder->last_dpms = mode;
121 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
122 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
124 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
127 if (nv_partner != nv_encoder &&
128 nv_partner->dcb->or == nv_encoder->or) {
129 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
135 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
136 dpms_ctrl |= 0x80000000;
138 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
139 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
140 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
141 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
145 nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
146 struct drm_display_mode *adjusted_mode)
148 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
149 struct nouveau_connector *nv_connector;
151 nv_connector = nouveau_encoder_connector_get(nv_encoder);
152 if (nv_connector && nv_connector->native_mode) {
153 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
154 int id = adjusted_mode->base.id;
155 *adjusted_mode = *nv_connector->native_mode;
156 adjusted_mode->base.id = id;
164 nvd0_sor_prepare(struct drm_encoder *encoder)
169 nvd0_sor_commit(struct drm_encoder *encoder)
174 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
175 struct drm_display_mode *adjusted_mode)
177 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
178 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
179 u32 mode_ctrl = (1 << nv_crtc->index);
182 if (nv_encoder->dcb->sorconf.link & 1) {
183 if (adjusted_mode->clock < 165000)
184 mode_ctrl |= 0x00000100;
186 mode_ctrl |= 0x00000500;
188 mode_ctrl |= 0x00000200;
191 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
193 push = evo_wait(encoder->dev, 0, 2);
195 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
196 evo_data(push, mode_ctrl);
199 nv_encoder->crtc = encoder->crtc;
203 nvd0_sor_disconnect(struct drm_encoder *encoder)
205 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
206 struct drm_device *dev = encoder->dev;
208 if (nv_encoder->crtc) {
209 u32 *push = evo_wait(dev, 0, 4);
211 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
212 evo_data(push, 0x00000000);
213 evo_mthd(push, 0x0080, 1);
214 evo_data(push, 0x00000000);
215 evo_kick(push, dev, 0);
218 nv_encoder->crtc = NULL;
219 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
224 nvd0_sor_destroy(struct drm_encoder *encoder)
226 drm_encoder_cleanup(encoder);
230 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
231 .dpms = nvd0_sor_dpms,
232 .mode_fixup = nvd0_sor_mode_fixup,
233 .prepare = nvd0_sor_prepare,
234 .commit = nvd0_sor_commit,
235 .mode_set = nvd0_sor_mode_set,
236 .disable = nvd0_sor_disconnect,
237 .get_crtc = nvd0_display_crtc_get,
240 static const struct drm_encoder_funcs nvd0_sor_func = {
241 .destroy = nvd0_sor_destroy,
245 nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
247 struct drm_device *dev = connector->dev;
248 struct nouveau_encoder *nv_encoder;
249 struct drm_encoder *encoder;
251 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
254 nv_encoder->dcb = dcbe;
255 nv_encoder->or = ffs(dcbe->or) - 1;
256 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
258 encoder = to_drm_encoder(nv_encoder);
259 encoder->possible_crtcs = dcbe->heads;
260 encoder->possible_clones = 0;
261 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
262 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
264 drm_mode_connector_attach_encoder(connector, encoder);
268 /******************************************************************************
270 *****************************************************************************/
272 nvd0_display_intr(struct drm_device *dev)
274 u32 intr = nv_rd32(dev, 0x610088);
276 if (intr & 0x00000002) {
277 u32 stat = nv_rd32(dev, 0x61009c);
278 int chid = ffs(stat) - 1;
280 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
281 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
282 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
284 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
286 chid, (mthd & 0x0000ffc), data, mthd, unkn);
287 nv_wr32(dev, 0x61009c, (1 << chid));
288 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
294 if (intr & 0x01000000) {
295 u32 stat = nv_rd32(dev, 0x6100bc);
296 nv_wr32(dev, 0x6100bc, stat);
300 if (intr & 0x02000000) {
301 u32 stat = nv_rd32(dev, 0x6108bc);
302 nv_wr32(dev, 0x6108bc, stat);
307 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
310 /******************************************************************************
312 *****************************************************************************/
314 nvd0_display_fini(struct drm_device *dev)
319 for (i = 14; i >= 13; i--) {
320 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
323 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
324 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
325 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
326 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
330 if (nv_rd32(dev, 0x610490) & 0x00000010) {
331 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
332 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
333 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
334 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
335 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
340 nvd0_display_init(struct drm_device *dev)
342 struct nvd0_display *disp = nvd0_display(dev);
346 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
347 nv_wr32(dev, 0x6100ac, 0x00000100);
348 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
349 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
350 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
351 nv_rd32(dev, 0x6194e8));
356 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
359 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
360 nv_wr32(dev, 0x610498, 0x00010000);
361 nv_wr32(dev, 0x61049c, 0x00000001);
362 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
363 nv_wr32(dev, 0x640000, 0x00000000);
364 nv_wr32(dev, 0x610490, 0x01000013);
365 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
366 NV_ERROR(dev, "PDISP: master 0x%08x\n",
367 nv_rd32(dev, 0x610490));
370 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
371 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
374 for (i = 13; i <= 14; i++) {
375 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
376 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
377 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
378 nv_rd32(dev, 0x610490 + (i * 0x10)));
382 nv_mask(dev, 0x610090, 1 << i, 1 << i);
383 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
386 push = evo_wait(dev, 0, 32);
389 evo_mthd(push, 0x0088, 1);
390 evo_data(push, MEM_SYNC);
391 evo_mthd(push, 0x0084, 1);
392 evo_data(push, 0x00000000);
393 evo_mthd(push, 0x0084, 1);
394 evo_data(push, 0x80000000);
395 evo_mthd(push, 0x008c, 1);
396 evo_data(push, 0x00000000);
397 evo_kick(push, dev, 0);
403 nvd0_display_destroy(struct drm_device *dev)
405 struct drm_nouveau_private *dev_priv = dev->dev_private;
406 struct nvd0_display *disp = nvd0_display(dev);
407 struct pci_dev *pdev = dev->pdev;
409 nvd0_display_fini(dev);
411 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
412 nouveau_gpuobj_ref(NULL, &disp->mem);
413 nouveau_irq_unregister(dev, 26);
415 dev_priv->engine.display.priv = NULL;
420 nvd0_display_create(struct drm_device *dev)
422 struct drm_nouveau_private *dev_priv = dev->dev_private;
423 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
424 struct dcb_table *dcb = &dev_priv->vbios.dcb;
425 struct drm_connector *connector, *tmp;
426 struct pci_dev *pdev = dev->pdev;
427 struct nvd0_display *disp;
428 struct dcb_entry *dcbe;
431 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
434 dev_priv->engine.display.priv = disp;
436 /* create encoder/connector objects based on VBIOS DCB table */
437 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
438 connector = nouveau_connector_create(dev, dcbe->connector);
439 if (IS_ERR(connector))
442 if (dcbe->location != DCB_LOC_ON_CHIP) {
443 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
444 dcbe->type, ffs(dcbe->or) - 1);
448 switch (dcbe->type) {
450 nvd0_sor_create(connector, dcbe);
453 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
454 dcbe->type, ffs(dcbe->or) - 1);
459 /* cull any connectors we created that don't have an encoder */
460 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
461 if (connector->encoder_ids[0])
464 NV_WARN(dev, "%s has no encoders, removing\n",
465 drm_get_connector_name(connector));
466 connector->funcs->destroy(connector);
469 /* setup interrupt handling */
470 nouveau_irq_register(dev, 26, nvd0_display_intr);
472 /* hash table and dma objects for the memory areas we care about */
473 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
474 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
478 nv_wo32(disp->mem, 0x1000, 0x00000049);
479 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
480 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
481 nv_wo32(disp->mem, 0x100c, 0x00000000);
482 nv_wo32(disp->mem, 0x1010, 0x00000000);
483 nv_wo32(disp->mem, 0x1014, 0x00000000);
484 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
485 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
487 nv_wo32(disp->mem, 0x1020, 0x00000009);
488 nv_wo32(disp->mem, 0x1024, 0x00000000);
489 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
490 nv_wo32(disp->mem, 0x102c, 0x00000000);
491 nv_wo32(disp->mem, 0x1030, 0x00000000);
492 nv_wo32(disp->mem, 0x1034, 0x00000000);
493 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
494 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
496 pinstmem->flush(dev);
498 /* push buffers for evo channels */
500 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
501 if (!disp->evo[0].ptr) {
506 ret = nvd0_display_init(dev);
512 nvd0_display_destroy(dev);