2 * Copyright 2011 Red Hat Inc.
4 * Permission is hereby granted, free of charge, to any person obtaining a
5 * copy of this software and associated documentation files (the "Software"),
6 * to deal in the Software without restriction, including without limitation
7 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
8 * and/or sell copies of the Software, and to permit persons to whom the
9 * Software is furnished to do so, subject to the following conditions:
11 * The above copyright notice and this permission notice shall be included in
12 * all copies or substantial portions of the Software.
14 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
15 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
16 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
17 * THE COPYRIGHT HOLDER(S) OR AUTHOR(S) BE LIABLE FOR ANY CLAIM, DAMAGES OR
18 * OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE,
19 * ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
20 * OTHER DEALINGS IN THE SOFTWARE.
25 #include <linux/dma-mapping.h>
28 #include "drm_crtc_helper.h"
30 #include "nouveau_drv.h"
31 #include "nouveau_connector.h"
32 #include "nouveau_encoder.h"
33 #include "nouveau_crtc.h"
34 #include "nouveau_fb.h"
36 #define MEM_SYNC 0xe0000001
37 #define MEM_VRAM 0xe0010000
38 #include "nouveau_dma.h"
41 struct nouveau_gpuobj *mem;
48 static struct nvd0_display *
49 nvd0_display(struct drm_device *dev)
51 struct drm_nouveau_private *dev_priv = dev->dev_private;
52 return dev_priv->engine.display.priv;
56 evo_icmd(struct drm_device *dev, int id, u32 mthd, u32 data)
59 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000001);
60 nv_wr32(dev, 0x610704 + (id * 0x10), data);
61 nv_mask(dev, 0x610704 + (id * 0x10), 0x80000ffc, 0x80000000 | mthd);
62 if (!nv_wait(dev, 0x610704 + (id * 0x10), 0x80000000, 0x00000000))
64 nv_mask(dev, 0x610700 + (id * 0x10), 0x00000001, 0x00000000);
69 evo_wait(struct drm_device *dev, int id, int nr)
71 struct nvd0_display *disp = nvd0_display(dev);
72 u32 put = nv_rd32(dev, 0x640000 + (id * 0x1000)) / 4;
74 if (put + nr >= (PAGE_SIZE / 4)) {
75 disp->evo[id].ptr[put] = 0x20000000;
77 nv_wr32(dev, 0x640000 + (id * 0x1000), 0x00000000);
78 if (!nv_wait(dev, 0x640004 + (id * 0x1000), ~0, 0x00000000)) {
79 NV_ERROR(dev, "evo %d dma stalled\n", id);
86 return disp->evo[id].ptr + put;
90 evo_kick(u32 *push, struct drm_device *dev, int id)
92 struct nvd0_display *disp = nvd0_display(dev);
93 nv_wr32(dev, 0x640000 + (id * 0x1000), (push - disp->evo[id].ptr) << 2);
96 #define evo_mthd(p,m,s) *((p)++) = (((s) << 18) | (m))
97 #define evo_data(p,d) *((p)++) = (d)
99 static struct drm_crtc *
100 nvd0_display_crtc_get(struct drm_encoder *encoder)
102 return nouveau_encoder(encoder)->crtc;
105 /******************************************************************************
107 *****************************************************************************/
109 nvd0_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool on, bool update)
111 struct drm_device *dev = nv_crtc->base.dev;
116 /* 0x11: 6bpc dynamic 2x2
117 * 0x13: 8bpc dynamic 2x2
118 * 0x19: 6bpc static 2x2
119 * 0x1b: 8bpc static 2x2
120 * 0x21: 6bpc temporal
121 * 0x23: 8bpc temporal
126 push = evo_wait(dev, 0, 4);
128 evo_mthd(push, 0x0490 + (nv_crtc->index * 0x300), 1);
129 evo_data(push, mode);
131 evo_mthd(push, 0x0080, 1);
132 evo_data(push, 0x00000000);
134 evo_kick(push, dev, 0);
141 nvd0_crtc_set_scale(struct nouveau_crtc *nv_crtc, int type, bool update)
143 struct drm_display_mode *mode = &nv_crtc->base.mode;
144 struct drm_device *dev = nv_crtc->base.dev;
147 /*XXX: actually handle scaling */
149 push = evo_wait(dev, 0, 16);
151 evo_mthd(push, 0x04c0 + (nv_crtc->index * 0x300), 3);
152 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
153 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
154 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
155 evo_mthd(push, 0x0494 + (nv_crtc->index * 0x300), 1);
156 evo_data(push, 0x00000000);
157 evo_mthd(push, 0x04b0 + (nv_crtc->index * 0x300), 1);
158 evo_data(push, 0x00000000);
159 evo_mthd(push, 0x04b8 + (nv_crtc->index * 0x300), 1);
160 evo_data(push, (mode->vdisplay << 16) | mode->hdisplay);
162 evo_mthd(push, 0x0080, 1);
163 evo_data(push, 0x00000000);
165 evo_kick(push, dev, 0);
172 nvd0_crtc_set_image(struct nouveau_crtc *nv_crtc, struct drm_framebuffer *fb,
173 int x, int y, bool update)
175 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(fb);
178 push = evo_wait(fb->dev, 0, 16);
180 evo_mthd(push, 0x0460 + (nv_crtc->index * 0x300), 1);
181 evo_data(push, nvfb->nvbo->bo.offset >> 8);
182 evo_mthd(push, 0x0468 + (nv_crtc->index * 0x300), 4);
183 evo_data(push, (fb->height << 16) | fb->width);
184 evo_data(push, nvfb->r_pitch);
185 evo_data(push, nvfb->r_format);
186 evo_data(push, nvfb->r_dma);
187 evo_kick(push, fb->dev, 0);
190 nv_crtc->fb.tile_flags = nvfb->r_dma;
195 nvd0_crtc_cursor_show(struct nouveau_crtc *nv_crtc, bool show, bool update)
197 struct drm_device *dev = nv_crtc->base.dev;
198 u32 *push = evo_wait(dev, 0, 16);
201 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 2);
202 evo_data(push, 0x85000000);
203 evo_data(push, nv_crtc->cursor.nvbo->bo.offset >> 8);
204 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
205 evo_data(push, MEM_VRAM);
207 evo_mthd(push, 0x0480 + (nv_crtc->index * 0x300), 1);
208 evo_data(push, 0x05000000);
209 evo_mthd(push, 0x048c + (nv_crtc->index * 0x300), 1);
210 evo_data(push, 0x00000000);
214 evo_mthd(push, 0x0080, 1);
215 evo_data(push, 0x00000000);
218 evo_kick(push, dev, 0);
223 nvd0_crtc_dpms(struct drm_crtc *crtc, int mode)
228 nvd0_crtc_prepare(struct drm_crtc *crtc)
230 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
233 push = evo_wait(crtc->dev, 0, 2);
235 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
236 evo_data(push, 0x00000000);
237 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 1);
238 evo_data(push, 0x03000000);
239 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
240 evo_data(push, 0x00000000);
241 evo_kick(push, crtc->dev, 0);
244 nvd0_crtc_cursor_show(nv_crtc, false, false);
248 nvd0_crtc_commit(struct drm_crtc *crtc)
250 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
253 push = evo_wait(crtc->dev, 0, 32);
255 evo_mthd(push, 0x0474 + (nv_crtc->index * 0x300), 1);
256 evo_data(push, nv_crtc->fb.tile_flags);
257 evo_mthd(push, 0x0440 + (nv_crtc->index * 0x300), 4);
258 evo_data(push, 0x83000000);
259 evo_data(push, nv_crtc->lut.nvbo->bo.offset >> 8);
260 evo_data(push, 0x00000000);
261 evo_data(push, 0x00000000);
262 evo_mthd(push, 0x045c + (nv_crtc->index * 0x300), 1);
263 evo_data(push, MEM_VRAM);
264 evo_kick(push, crtc->dev, 0);
267 nvd0_crtc_cursor_show(nv_crtc, nv_crtc->cursor.visible, true);
271 nvd0_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
272 struct drm_display_mode *adjusted_mode)
278 nvd0_crtc_swap_fbs(struct drm_crtc *crtc, struct drm_framebuffer *old_fb)
280 struct nouveau_framebuffer *nvfb = nouveau_framebuffer(crtc->fb);
283 ret = nouveau_bo_pin(nvfb->nvbo, TTM_PL_FLAG_VRAM);
288 nvfb = nouveau_framebuffer(old_fb);
289 nouveau_bo_unpin(nvfb->nvbo);
296 nvd0_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
297 struct drm_display_mode *mode, int x, int y,
298 struct drm_framebuffer *old_fb)
300 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
301 struct nouveau_connector *nv_connector;
302 u32 htotal = mode->htotal;
303 u32 vtotal = mode->vtotal;
304 u32 hsyncw = mode->hsync_end - mode->hsync_start - 1;
305 u32 vsyncw = mode->vsync_end - mode->vsync_start - 1;
306 u32 hfrntp = mode->hsync_start - mode->hdisplay;
307 u32 vfrntp = mode->vsync_start - mode->vdisplay;
308 u32 hbackp = mode->htotal - mode->hsync_end;
309 u32 vbackp = mode->vtotal - mode->vsync_end;
310 u32 hss2be = hsyncw + hbackp;
311 u32 vss2be = vsyncw + vbackp;
312 u32 hss2de = htotal - hfrntp;
313 u32 vss2de = vtotal - vfrntp;
319 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
323 push = evo_wait(crtc->dev, 0, 64);
325 evo_mthd(push, 0x0410 + (nv_crtc->index * 0x300), 5);
326 evo_data(push, (vstart << 16) | hstart);
327 evo_data(push, (vtotal << 16) | htotal);
328 evo_data(push, (vsyncw << 16) | hsyncw);
329 evo_data(push, (vss2be << 16) | hss2be);
330 evo_data(push, (vss2de << 16) | hss2de);
331 evo_mthd(push, 0x042c + (nv_crtc->index * 0x300), 1);
332 evo_data(push, 0x00000000); /* ??? */
333 evo_mthd(push, 0x0450 + (nv_crtc->index * 0x300), 3);
334 evo_data(push, mode->clock * 1000);
335 evo_data(push, 0x00200000); /* ??? */
336 evo_data(push, mode->clock * 1000);
337 evo_mthd(push, 0x0408 + (nv_crtc->index * 0x300), 1);
338 evo_data(push, 0x31ec6000); /* ??? */
339 evo_kick(push, crtc->dev, 0);
342 nv_connector = nouveau_crtc_connector_get(nv_crtc);
343 nvd0_crtc_set_dither(nv_crtc, nv_connector->use_dithering, false);
344 nvd0_crtc_set_scale(nv_crtc, nv_connector->scaling_mode, false);
345 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, false);
350 nvd0_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
351 struct drm_framebuffer *old_fb)
353 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
356 ret = nvd0_crtc_swap_fbs(crtc, old_fb);
360 nvd0_crtc_set_image(nv_crtc, crtc->fb, x, y, true);
365 nvd0_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
366 struct drm_framebuffer *fb, int x, int y,
367 enum mode_set_atomic state)
369 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
370 nvd0_crtc_set_image(nv_crtc, fb, x, y, true);
375 nvd0_crtc_lut_load(struct drm_crtc *crtc)
377 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
378 void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
381 for (i = 0; i < 256; i++) {
382 writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
383 writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
384 writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
389 nvd0_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
390 uint32_t handle, uint32_t width, uint32_t height)
392 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
393 struct drm_device *dev = crtc->dev;
394 struct drm_gem_object *gem;
395 struct nouveau_bo *nvbo;
396 bool visible = (handle != 0);
400 if (width != 64 || height != 64)
403 gem = drm_gem_object_lookup(dev, file_priv, handle);
406 nvbo = nouveau_gem_object(gem);
408 ret = nouveau_bo_map(nvbo);
410 for (i = 0; i < 64 * 64; i++) {
411 u32 v = nouveau_bo_rd32(nvbo, i);
412 nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, v);
414 nouveau_bo_unmap(nvbo);
417 drm_gem_object_unreference_unlocked(gem);
420 if (visible != nv_crtc->cursor.visible) {
421 nvd0_crtc_cursor_show(nv_crtc, visible, true);
422 nv_crtc->cursor.visible = visible;
429 nvd0_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
431 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
432 const u32 data = (y << 16) | x;
434 nv_wr32(crtc->dev, 0x64d084 + (nv_crtc->index * 0x1000), data);
435 nv_wr32(crtc->dev, 0x64d080 + (nv_crtc->index * 0x1000), 0x00000000);
440 nvd0_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
441 uint32_t start, uint32_t size)
443 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
444 u32 end = max(start + size, (u32)256);
447 for (i = start; i < end; i++) {
448 nv_crtc->lut.r[i] = r[i];
449 nv_crtc->lut.g[i] = g[i];
450 nv_crtc->lut.b[i] = b[i];
453 nvd0_crtc_lut_load(crtc);
457 nvd0_crtc_destroy(struct drm_crtc *crtc)
459 struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
460 nouveau_bo_unmap(nv_crtc->cursor.nvbo);
461 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
462 nouveau_bo_unmap(nv_crtc->lut.nvbo);
463 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
464 drm_crtc_cleanup(crtc);
468 static const struct drm_crtc_helper_funcs nvd0_crtc_hfunc = {
469 .dpms = nvd0_crtc_dpms,
470 .prepare = nvd0_crtc_prepare,
471 .commit = nvd0_crtc_commit,
472 .mode_fixup = nvd0_crtc_mode_fixup,
473 .mode_set = nvd0_crtc_mode_set,
474 .mode_set_base = nvd0_crtc_mode_set_base,
475 .mode_set_base_atomic = nvd0_crtc_mode_set_base_atomic,
476 .load_lut = nvd0_crtc_lut_load,
479 static const struct drm_crtc_funcs nvd0_crtc_func = {
480 .cursor_set = nvd0_crtc_cursor_set,
481 .cursor_move = nvd0_crtc_cursor_move,
482 .gamma_set = nvd0_crtc_gamma_set,
483 .set_config = drm_crtc_helper_set_config,
484 .destroy = nvd0_crtc_destroy,
488 nvd0_crtc_create(struct drm_device *dev, int index)
490 struct nouveau_crtc *nv_crtc;
491 struct drm_crtc *crtc;
494 nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
498 nv_crtc->index = index;
499 nv_crtc->set_dither = nvd0_crtc_set_dither;
500 nv_crtc->set_scale = nvd0_crtc_set_scale;
501 for (i = 0; i < 256; i++) {
502 nv_crtc->lut.r[i] = i << 8;
503 nv_crtc->lut.g[i] = i << 8;
504 nv_crtc->lut.b[i] = i << 8;
507 crtc = &nv_crtc->base;
508 drm_crtc_init(dev, crtc, &nvd0_crtc_func);
509 drm_crtc_helper_add(crtc, &nvd0_crtc_hfunc);
510 drm_mode_crtc_set_gamma_size(crtc, 256);
512 ret = nouveau_bo_new(dev, 64 * 64 * 4, 0x100, TTM_PL_FLAG_VRAM,
513 0, 0x0000, &nv_crtc->cursor.nvbo);
515 ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
517 ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
519 nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
525 ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
526 0, 0x0000, &nv_crtc->lut.nvbo);
528 ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
530 ret = nouveau_bo_map(nv_crtc->lut.nvbo);
532 nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
538 nvd0_crtc_lut_load(crtc);
542 nvd0_crtc_destroy(crtc);
546 /******************************************************************************
548 *****************************************************************************/
550 nvd0_dac_dpms(struct drm_encoder *encoder, int mode)
552 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
553 struct drm_device *dev = encoder->dev;
554 int or = nv_encoder->or;
557 dpms_ctrl = 0x80000000;
558 if (mode == DRM_MODE_DPMS_STANDBY || mode == DRM_MODE_DPMS_OFF)
559 dpms_ctrl |= 0x00000001;
560 if (mode == DRM_MODE_DPMS_SUSPEND || mode == DRM_MODE_DPMS_OFF)
561 dpms_ctrl |= 0x00000004;
563 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
564 nv_mask(dev, 0x61a004 + (or * 0x0800), 0xc000007f, dpms_ctrl);
565 nv_wait(dev, 0x61a004 + (or * 0x0800), 0x80000000, 0x00000000);
569 nvd0_dac_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
570 struct drm_display_mode *adjusted_mode)
572 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
573 struct nouveau_connector *nv_connector;
575 nv_connector = nouveau_encoder_connector_get(nv_encoder);
576 if (nv_connector && nv_connector->native_mode) {
577 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
578 int id = adjusted_mode->base.id;
579 *adjusted_mode = *nv_connector->native_mode;
580 adjusted_mode->base.id = id;
588 nvd0_dac_prepare(struct drm_encoder *encoder)
593 nvd0_dac_commit(struct drm_encoder *encoder)
598 nvd0_dac_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
599 struct drm_display_mode *adjusted_mode)
601 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
602 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
605 nvd0_dac_dpms(encoder, DRM_MODE_DPMS_ON);
607 push = evo_wait(encoder->dev, 0, 2);
609 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
610 evo_data(push, 1 << nv_crtc->index);
611 evo_kick(push, encoder->dev, 0);
614 nv_encoder->crtc = encoder->crtc;
618 nvd0_dac_disconnect(struct drm_encoder *encoder)
620 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
621 struct drm_device *dev = encoder->dev;
624 if (nv_encoder->crtc) {
625 nvd0_crtc_prepare(nv_encoder->crtc);
627 push = evo_wait(dev, 0, 4);
629 evo_mthd(push, 0x0180 + (nv_encoder->or * 0x20), 1);
630 evo_data(push, 0x00000000);
631 evo_mthd(push, 0x0080, 1);
632 evo_data(push, 0x00000000);
633 evo_kick(push, dev, 0);
636 nv_encoder->crtc = NULL;
640 static enum drm_connector_status
641 nvd0_dac_detect(struct drm_encoder *encoder, struct drm_connector *connector)
643 return connector_status_disconnected;
647 nvd0_dac_destroy(struct drm_encoder *encoder)
649 drm_encoder_cleanup(encoder);
653 static const struct drm_encoder_helper_funcs nvd0_dac_hfunc = {
654 .dpms = nvd0_dac_dpms,
655 .mode_fixup = nvd0_dac_mode_fixup,
656 .prepare = nvd0_dac_prepare,
657 .commit = nvd0_dac_commit,
658 .mode_set = nvd0_dac_mode_set,
659 .disable = nvd0_dac_disconnect,
660 .get_crtc = nvd0_display_crtc_get,
661 .detect = nvd0_dac_detect
664 static const struct drm_encoder_funcs nvd0_dac_func = {
665 .destroy = nvd0_dac_destroy,
669 nvd0_dac_create(struct drm_connector *connector, struct dcb_entry *dcbe)
671 struct drm_device *dev = connector->dev;
672 struct nouveau_encoder *nv_encoder;
673 struct drm_encoder *encoder;
675 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
678 nv_encoder->dcb = dcbe;
679 nv_encoder->or = ffs(dcbe->or) - 1;
681 encoder = to_drm_encoder(nv_encoder);
682 encoder->possible_crtcs = dcbe->heads;
683 encoder->possible_clones = 0;
684 drm_encoder_init(dev, encoder, &nvd0_dac_func, DRM_MODE_ENCODER_DAC);
685 drm_encoder_helper_add(encoder, &nvd0_dac_hfunc);
687 drm_mode_connector_attach_encoder(connector, encoder);
691 /******************************************************************************
693 *****************************************************************************/
695 nvd0_sor_dpms(struct drm_encoder *encoder, int mode)
697 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
698 struct drm_device *dev = encoder->dev;
699 struct drm_encoder *partner;
700 int or = nv_encoder->or;
703 nv_encoder->last_dpms = mode;
705 list_for_each_entry(partner, &dev->mode_config.encoder_list, head) {
706 struct nouveau_encoder *nv_partner = nouveau_encoder(partner);
708 if (partner->encoder_type != DRM_MODE_ENCODER_TMDS)
711 if (nv_partner != nv_encoder &&
712 nv_partner->dcb->or == nv_encoder->or) {
713 if (nv_partner->last_dpms == DRM_MODE_DPMS_ON)
719 dpms_ctrl = (mode == DRM_MODE_DPMS_ON);
720 dpms_ctrl |= 0x80000000;
722 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
723 nv_mask(dev, 0x61c004 + (or * 0x0800), 0x80000001, dpms_ctrl);
724 nv_wait(dev, 0x61c004 + (or * 0x0800), 0x80000000, 0x00000000);
725 nv_wait(dev, 0x61c030 + (or * 0x0800), 0x10000000, 0x00000000);
729 nvd0_sor_mode_fixup(struct drm_encoder *encoder, struct drm_display_mode *mode,
730 struct drm_display_mode *adjusted_mode)
732 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
733 struct nouveau_connector *nv_connector;
735 nv_connector = nouveau_encoder_connector_get(nv_encoder);
736 if (nv_connector && nv_connector->native_mode) {
737 if (nv_connector->scaling_mode != DRM_MODE_SCALE_NONE) {
738 int id = adjusted_mode->base.id;
739 *adjusted_mode = *nv_connector->native_mode;
740 adjusted_mode->base.id = id;
748 nvd0_sor_prepare(struct drm_encoder *encoder)
753 nvd0_sor_commit(struct drm_encoder *encoder)
758 nvd0_sor_mode_set(struct drm_encoder *encoder, struct drm_display_mode *mode,
759 struct drm_display_mode *adjusted_mode)
761 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
762 struct nouveau_crtc *nv_crtc = nouveau_crtc(encoder->crtc);
763 u32 mode_ctrl = (1 << nv_crtc->index);
766 if (nv_encoder->dcb->sorconf.link & 1) {
767 if (adjusted_mode->clock < 165000)
768 mode_ctrl |= 0x00000100;
770 mode_ctrl |= 0x00000500;
772 mode_ctrl |= 0x00000200;
775 nvd0_sor_dpms(encoder, DRM_MODE_DPMS_ON);
777 push = evo_wait(encoder->dev, 0, 2);
779 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
780 evo_data(push, mode_ctrl);
781 evo_kick(push, encoder->dev, 0);
784 nv_encoder->crtc = encoder->crtc;
788 nvd0_sor_disconnect(struct drm_encoder *encoder)
790 struct nouveau_encoder *nv_encoder = nouveau_encoder(encoder);
791 struct drm_device *dev = encoder->dev;
794 if (nv_encoder->crtc) {
795 nvd0_crtc_prepare(nv_encoder->crtc);
797 push = evo_wait(dev, 0, 4);
799 evo_mthd(push, 0x0200 + (nv_encoder->or * 0x20), 1);
800 evo_data(push, 0x00000000);
801 evo_mthd(push, 0x0080, 1);
802 evo_data(push, 0x00000000);
803 evo_kick(push, dev, 0);
806 nv_encoder->crtc = NULL;
807 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
812 nvd0_sor_destroy(struct drm_encoder *encoder)
814 drm_encoder_cleanup(encoder);
818 static const struct drm_encoder_helper_funcs nvd0_sor_hfunc = {
819 .dpms = nvd0_sor_dpms,
820 .mode_fixup = nvd0_sor_mode_fixup,
821 .prepare = nvd0_sor_prepare,
822 .commit = nvd0_sor_commit,
823 .mode_set = nvd0_sor_mode_set,
824 .disable = nvd0_sor_disconnect,
825 .get_crtc = nvd0_display_crtc_get,
828 static const struct drm_encoder_funcs nvd0_sor_func = {
829 .destroy = nvd0_sor_destroy,
833 nvd0_sor_create(struct drm_connector *connector, struct dcb_entry *dcbe)
835 struct drm_device *dev = connector->dev;
836 struct nouveau_encoder *nv_encoder;
837 struct drm_encoder *encoder;
839 nv_encoder = kzalloc(sizeof(*nv_encoder), GFP_KERNEL);
842 nv_encoder->dcb = dcbe;
843 nv_encoder->or = ffs(dcbe->or) - 1;
844 nv_encoder->last_dpms = DRM_MODE_DPMS_OFF;
846 encoder = to_drm_encoder(nv_encoder);
847 encoder->possible_crtcs = dcbe->heads;
848 encoder->possible_clones = 0;
849 drm_encoder_init(dev, encoder, &nvd0_sor_func, DRM_MODE_ENCODER_TMDS);
850 drm_encoder_helper_add(encoder, &nvd0_sor_hfunc);
852 drm_mode_connector_attach_encoder(connector, encoder);
856 /******************************************************************************
858 *****************************************************************************/
860 nvd0_display_unk1_handler(struct drm_device *dev)
862 NV_INFO(dev, "PDISP: 1 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
863 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
865 nv_wr32(dev, 0x6101d4, 0x00000000);
866 nv_wr32(dev, 0x6109d4, 0x00000000);
867 nv_wr32(dev, 0x6101d0, 0x80000000);
871 nvd0_display_unk2_handler(struct drm_device *dev)
873 NV_INFO(dev, "PDISP: 2 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
874 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
876 nv_wr32(dev, 0x6101d4, 0x00000000);
877 nv_wr32(dev, 0x6109d4, 0x00000000);
878 nv_wr32(dev, 0x6101d0, 0x80000000);
882 nvd0_display_unk4_handler(struct drm_device *dev)
884 NV_INFO(dev, "PDISP: 4 0x%08x 0x%08x 0x%08x\n", nv_rd32(dev, 0x6101d0),
885 nv_rd32(dev, 0x6101d4), nv_rd32(dev, 0x6109d4));
887 nv_wr32(dev, 0x6101d4, 0x00000000);
888 nv_wr32(dev, 0x6109d4, 0x00000000);
889 nv_wr32(dev, 0x6101d0, 0x80000000);
893 nvd0_display_intr(struct drm_device *dev)
895 u32 intr = nv_rd32(dev, 0x610088);
897 if (intr & 0x00000002) {
898 u32 stat = nv_rd32(dev, 0x61009c);
899 int chid = ffs(stat) - 1;
901 u32 mthd = nv_rd32(dev, 0x6101f0 + (chid * 12));
902 u32 data = nv_rd32(dev, 0x6101f4 + (chid * 12));
903 u32 unkn = nv_rd32(dev, 0x6101f8 + (chid * 12));
905 NV_INFO(dev, "EvoCh: chid %d mthd 0x%04x data 0x%08x "
907 chid, (mthd & 0x0000ffc), data, mthd, unkn);
908 nv_wr32(dev, 0x61009c, (1 << chid));
909 nv_wr32(dev, 0x6101f0 + (chid * 12), 0x90000000);
915 if (intr & 0x00100000) {
916 u32 stat = nv_rd32(dev, 0x6100ac);
918 if (stat & 0x00000007) {
919 nv_wr32(dev, 0x6100ac, (stat & 0x00000007));
921 if (stat & 0x00000001)
922 nvd0_display_unk1_handler(dev);
923 if (stat & 0x00000002)
924 nvd0_display_unk2_handler(dev);
925 if (stat & 0x00000004)
926 nvd0_display_unk4_handler(dev);
931 NV_INFO(dev, "PDISP: unknown intr24 0x%08x\n", stat);
932 nv_wr32(dev, 0x6100ac, stat);
938 if (intr & 0x01000000) {
939 u32 stat = nv_rd32(dev, 0x6100bc);
940 nv_wr32(dev, 0x6100bc, stat);
944 if (intr & 0x02000000) {
945 u32 stat = nv_rd32(dev, 0x6108bc);
946 nv_wr32(dev, 0x6108bc, stat);
951 NV_INFO(dev, "PDISP: unknown intr 0x%08x\n", intr);
954 /******************************************************************************
956 *****************************************************************************/
958 nvd0_display_fini(struct drm_device *dev)
963 for (i = 14; i >= 13; i--) {
964 if (!(nv_rd32(dev, 0x610490 + (i * 0x10)) & 0x00000001))
967 nv_mask(dev, 0x610490 + (i * 0x10), 0x00000001, 0x00000000);
968 nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00000000);
969 nv_mask(dev, 0x610090, 1 << i, 0x00000000);
970 nv_mask(dev, 0x6100a0, 1 << i, 0x00000000);
974 if (nv_rd32(dev, 0x610490) & 0x00000010) {
975 nv_mask(dev, 0x610490, 0x00000010, 0x00000000);
976 nv_mask(dev, 0x610490, 0x00000003, 0x00000000);
977 nv_wait(dev, 0x610490, 0x80000000, 0x00000000);
978 nv_mask(dev, 0x610090, 0x00000001, 0x00000000);
979 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000000);
984 nvd0_display_init(struct drm_device *dev)
986 struct nvd0_display *disp = nvd0_display(dev);
990 if (nv_rd32(dev, 0x6100ac) & 0x00000100) {
991 nv_wr32(dev, 0x6100ac, 0x00000100);
992 nv_mask(dev, 0x6194e8, 0x00000001, 0x00000000);
993 if (!nv_wait(dev, 0x6194e8, 0x00000002, 0x00000000)) {
994 NV_ERROR(dev, "PDISP: 0x6194e8 0x%08x\n",
995 nv_rd32(dev, 0x6194e8));
1000 /* nfi what these are exactly, i do know that SOR_MODE_CTRL won't
1001 * work at all unless you do the SOR part below.
1003 for (i = 0; i < 3; i++) {
1004 u32 dac = nv_rd32(dev, 0x61a000 + (i * 0x800));
1005 nv_wr32(dev, 0x6101c0 + (i * 0x800), dac);
1008 for (i = 0; i < 4; i++) {
1009 u32 sor = nv_rd32(dev, 0x61c000 + (i * 0x800));
1010 nv_wr32(dev, 0x6301c4 + (i * 0x800), sor);
1013 for (i = 0; i < 2; i++) {
1014 u32 crtc0 = nv_rd32(dev, 0x616104 + (i * 0x800));
1015 u32 crtc1 = nv_rd32(dev, 0x616108 + (i * 0x800));
1016 u32 crtc2 = nv_rd32(dev, 0x61610c + (i * 0x800));
1017 nv_wr32(dev, 0x6101b4 + (i * 0x800), crtc0);
1018 nv_wr32(dev, 0x6101b8 + (i * 0x800), crtc1);
1019 nv_wr32(dev, 0x6101bc + (i * 0x800), crtc2);
1022 /* point at our hash table / objects, enable interrupts */
1023 nv_wr32(dev, 0x610010, (disp->mem->vinst >> 8) | 9);
1024 nv_mask(dev, 0x6100b0, 0x00000307, 0x00000307);
1027 nv_wr32(dev, 0x610494, (disp->evo[0].handle >> 8) | 3);
1028 nv_wr32(dev, 0x610498, 0x00010000);
1029 nv_wr32(dev, 0x61049c, 0x00000001);
1030 nv_mask(dev, 0x610490, 0x00000010, 0x00000010);
1031 nv_wr32(dev, 0x640000, 0x00000000);
1032 nv_wr32(dev, 0x610490, 0x01000013);
1033 if (!nv_wait(dev, 0x610490, 0x80000000, 0x00000000)) {
1034 NV_ERROR(dev, "PDISP: master 0x%08x\n",
1035 nv_rd32(dev, 0x610490));
1038 nv_mask(dev, 0x610090, 0x00000001, 0x00000001);
1039 nv_mask(dev, 0x6100a0, 0x00000001, 0x00000001);
1042 for (i = 13; i <= 14; i++) {
1043 nv_wr32(dev, 0x610490 + (i * 0x10), 0x00000001);
1044 if (!nv_wait(dev, 0x610490 + (i * 0x10), 0x00010000, 0x00010000)) {
1045 NV_ERROR(dev, "PDISP: curs%d 0x%08x\n", i,
1046 nv_rd32(dev, 0x610490 + (i * 0x10)));
1050 nv_mask(dev, 0x610090, 1 << i, 1 << i);
1051 nv_mask(dev, 0x6100a0, 1 << i, 1 << i);
1054 push = evo_wait(dev, 0, 32);
1057 evo_mthd(push, 0x0088, 1);
1058 evo_data(push, MEM_SYNC);
1059 evo_mthd(push, 0x0084, 1);
1060 evo_data(push, 0x00000000);
1061 evo_mthd(push, 0x0084, 1);
1062 evo_data(push, 0x80000000);
1063 evo_mthd(push, 0x008c, 1);
1064 evo_data(push, 0x00000000);
1065 evo_kick(push, dev, 0);
1071 nvd0_display_destroy(struct drm_device *dev)
1073 struct drm_nouveau_private *dev_priv = dev->dev_private;
1074 struct nvd0_display *disp = nvd0_display(dev);
1075 struct pci_dev *pdev = dev->pdev;
1077 nvd0_display_fini(dev);
1079 pci_free_consistent(pdev, PAGE_SIZE, disp->evo[0].ptr, disp->evo[0].handle);
1080 nouveau_gpuobj_ref(NULL, &disp->mem);
1081 nouveau_irq_unregister(dev, 26);
1083 dev_priv->engine.display.priv = NULL;
1088 nvd0_display_create(struct drm_device *dev)
1090 struct drm_nouveau_private *dev_priv = dev->dev_private;
1091 struct nouveau_instmem_engine *pinstmem = &dev_priv->engine.instmem;
1092 struct dcb_table *dcb = &dev_priv->vbios.dcb;
1093 struct drm_connector *connector, *tmp;
1094 struct pci_dev *pdev = dev->pdev;
1095 struct nvd0_display *disp;
1096 struct dcb_entry *dcbe;
1099 disp = kzalloc(sizeof(*disp), GFP_KERNEL);
1102 dev_priv->engine.display.priv = disp;
1104 /* create crtc objects to represent the hw heads */
1105 for (i = 0; i < 2; i++) {
1106 ret = nvd0_crtc_create(dev, i);
1111 /* create encoder/connector objects based on VBIOS DCB table */
1112 for (i = 0, dcbe = &dcb->entry[0]; i < dcb->entries; i++, dcbe++) {
1113 connector = nouveau_connector_create(dev, dcbe->connector);
1114 if (IS_ERR(connector))
1117 if (dcbe->location != DCB_LOC_ON_CHIP) {
1118 NV_WARN(dev, "skipping off-chip encoder %d/%d\n",
1119 dcbe->type, ffs(dcbe->or) - 1);
1123 switch (dcbe->type) {
1125 nvd0_sor_create(connector, dcbe);
1128 nvd0_dac_create(connector, dcbe);
1131 NV_WARN(dev, "skipping unsupported encoder %d/%d\n",
1132 dcbe->type, ffs(dcbe->or) - 1);
1137 /* cull any connectors we created that don't have an encoder */
1138 list_for_each_entry_safe(connector, tmp, &dev->mode_config.connector_list, head) {
1139 if (connector->encoder_ids[0])
1142 NV_WARN(dev, "%s has no encoders, removing\n",
1143 drm_get_connector_name(connector));
1144 connector->funcs->destroy(connector);
1147 /* setup interrupt handling */
1148 nouveau_irq_register(dev, 26, nvd0_display_intr);
1150 /* hash table and dma objects for the memory areas we care about */
1151 ret = nouveau_gpuobj_new(dev, NULL, 0x4000, 0x10000,
1152 NVOBJ_FLAG_ZERO_ALLOC, &disp->mem);
1156 nv_wo32(disp->mem, 0x1000, 0x00000049);
1157 nv_wo32(disp->mem, 0x1004, (disp->mem->vinst + 0x2000) >> 8);
1158 nv_wo32(disp->mem, 0x1008, (disp->mem->vinst + 0x2fff) >> 8);
1159 nv_wo32(disp->mem, 0x100c, 0x00000000);
1160 nv_wo32(disp->mem, 0x1010, 0x00000000);
1161 nv_wo32(disp->mem, 0x1014, 0x00000000);
1162 nv_wo32(disp->mem, 0x0000, MEM_SYNC);
1163 nv_wo32(disp->mem, 0x0004, (0x1000 << 9) | 0x00000001);
1165 nv_wo32(disp->mem, 0x1020, 0x00000049);
1166 nv_wo32(disp->mem, 0x1024, 0x00000000);
1167 nv_wo32(disp->mem, 0x1028, (dev_priv->vram_size - 1) >> 8);
1168 nv_wo32(disp->mem, 0x102c, 0x00000000);
1169 nv_wo32(disp->mem, 0x1030, 0x00000000);
1170 nv_wo32(disp->mem, 0x1034, 0x00000000);
1171 nv_wo32(disp->mem, 0x0008, MEM_VRAM);
1172 nv_wo32(disp->mem, 0x000c, (0x1020 << 9) | 0x00000001);
1174 nv_wo32(disp->mem, 0x1040, 0x00000009);
1175 nv_wo32(disp->mem, 0x1044, 0x00000000);
1176 nv_wo32(disp->mem, 0x1048, (dev_priv->vram_size - 1) >> 8);
1177 nv_wo32(disp->mem, 0x104c, 0x00000000);
1178 nv_wo32(disp->mem, 0x1050, 0x00000000);
1179 nv_wo32(disp->mem, 0x1054, 0x00000000);
1180 nv_wo32(disp->mem, 0x0010, NvEvoVRAM_LP);
1181 nv_wo32(disp->mem, 0x0014, (0x1040 << 9) | 0x00000001);
1183 nv_wo32(disp->mem, 0x1060, 0x0fe00009);
1184 nv_wo32(disp->mem, 0x1064, 0x00000000);
1185 nv_wo32(disp->mem, 0x1068, (dev_priv->vram_size - 1) >> 8);
1186 nv_wo32(disp->mem, 0x106c, 0x00000000);
1187 nv_wo32(disp->mem, 0x1070, 0x00000000);
1188 nv_wo32(disp->mem, 0x1074, 0x00000000);
1189 nv_wo32(disp->mem, 0x0018, NvEvoFB32);
1190 nv_wo32(disp->mem, 0x001c, (0x1060 << 9) | 0x00000001);
1192 pinstmem->flush(dev);
1194 /* push buffers for evo channels */
1196 pci_alloc_consistent(pdev, PAGE_SIZE, &disp->evo[0].handle);
1197 if (!disp->evo[0].ptr) {
1202 ret = nvd0_display_init(dev);
1208 nvd0_display_destroy(dev);