nv50_crtc.c 21.1 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 42 43 44 45 46 47
/*
 * Copyright (C) 2008 Maarten Maathuis.
 * All Rights Reserved.
 *
 * Permission is hereby granted, free of charge, to any person obtaining
 * a copy of this software and associated documentation files (the
 * "Software"), to deal in the Software without restriction, including
 * without limitation the rights to use, copy, modify, merge, publish,
 * distribute, sublicense, and/or sell copies of the Software, and to
 * permit persons to whom the Software is furnished to do so, subject to
 * the following conditions:
 *
 * The above copyright notice and this permission notice (including the
 * next paragraph) shall be included in all copies or substantial
 * portions of the Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
 * EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
 * MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
 * IN NO EVENT SHALL THE COPYRIGHT OWNER(S) AND/OR ITS SUPPLIERS BE
 * LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
 * OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
 * WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
 *
 */

#include "drmP.h"
#include "drm_mode.h"
#include "drm_crtc_helper.h"

#define NOUVEAU_DMA_DEBUG (nouveau_reg_debug & NOUVEAU_REG_DEBUG_EVO)
#include "nouveau_reg.h"
#include "nouveau_drv.h"
#include "nouveau_hw.h"
#include "nouveau_encoder.h"
#include "nouveau_crtc.h"
#include "nouveau_fb.h"
#include "nouveau_connector.h"
#include "nv50_display.h"

static void
nv50_crtc_lut_load(struct drm_crtc *crtc)
{
	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
	void __iomem *lut = nvbo_kmap_obj_iovirtual(nv_crtc->lut.nvbo);
	int i;

48
	NV_DEBUG_KMS(crtc->dev, "\n");
49 50 51 52 53 54 55 56 57 58 59 60 61 62 63 64 65 66 67

	for (i = 0; i < 256; i++) {
		writew(nv_crtc->lut.r[i] >> 2, lut + 8*i + 0);
		writew(nv_crtc->lut.g[i] >> 2, lut + 8*i + 2);
		writew(nv_crtc->lut.b[i] >> 2, lut + 8*i + 4);
	}

	if (nv_crtc->lut.depth == 30) {
		writew(nv_crtc->lut.r[i - 1] >> 2, lut + 8*i + 0);
		writew(nv_crtc->lut.g[i - 1] >> 2, lut + 8*i + 2);
		writew(nv_crtc->lut.b[i - 1] >> 2, lut + 8*i + 4);
	}
}

int
nv50_crtc_blank(struct nouveau_crtc *nv_crtc, bool blanked)
{
	struct drm_device *dev = nv_crtc->base.dev;
	struct drm_nouveau_private *dev_priv = dev->dev_private;
68
	struct nouveau_channel *evo = nv50_display(dev)->master;
69 70
	int index = nv_crtc->index, ret;

71 72
	NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
	NV_DEBUG_KMS(dev, "%s\n", blanked ? "blanked" : "unblanked");
73 74 75 76 77 78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106

	if (blanked) {
		nv_crtc->cursor.hide(nv_crtc, false);

		ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 7 : 5);
		if (ret) {
			NV_ERROR(dev, "no space while blanking crtc\n");
			return ret;
		}
		BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
		OUT_RING(evo, NV50_EVO_CRTC_CLUT_MODE_BLANK);
		OUT_RING(evo, 0);
		if (dev_priv->chipset != 0x50) {
			BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
			OUT_RING(evo, NV84_EVO_CRTC_CLUT_DMA_HANDLE_NONE);
		}

		BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
		OUT_RING(evo, NV50_EVO_CRTC_FB_DMA_HANDLE_NONE);
	} else {
		if (nv_crtc->cursor.visible)
			nv_crtc->cursor.show(nv_crtc, false);
		else
			nv_crtc->cursor.hide(nv_crtc, false);

		ret = RING_SPACE(evo, dev_priv->chipset != 0x50 ? 10 : 8);
		if (ret) {
			NV_ERROR(dev, "no space while unblanking crtc\n");
			return ret;
		}
		BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, CLUT_MODE), 2);
		OUT_RING(evo, nv_crtc->lut.depth == 8 ?
				NV50_EVO_CRTC_CLUT_MODE_OFF :
				NV50_EVO_CRTC_CLUT_MODE_ON);
107
		OUT_RING(evo, nv_crtc->lut.nvbo->bo.offset >> 8);
108 109 110 111 112 113 114 115 116 117
		if (dev_priv->chipset != 0x50) {
			BEGIN_RING(evo, 0, NV84_EVO_CRTC(index, CLUT_DMA), 1);
			OUT_RING(evo, NvEvoVRAM);
		}

		BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_OFFSET), 2);
		OUT_RING(evo, nv_crtc->fb.offset >> 8);
		OUT_RING(evo, 0);
		BEGIN_RING(evo, 0, NV50_EVO_CRTC(index, FB_DMA), 1);
		if (dev_priv->chipset != 0x50)
118 119
			if (nv_crtc->fb.tile_flags == 0x7a00 ||
			    nv_crtc->fb.tile_flags == 0xfe00)
120 121 122 123 124
				OUT_RING(evo, NvEvoFB32);
			else
			if (nv_crtc->fb.tile_flags == 0x7000)
				OUT_RING(evo, NvEvoFB16);
			else
125
				OUT_RING(evo, NvEvoVRAM_LP);
126
		else
127
			OUT_RING(evo, NvEvoVRAM_LP);
128 129 130 131 132 133 134
	}

	nv_crtc->fb.blanked = blanked;
	return 0;
}

static int
135
nv50_crtc_set_dither(struct nouveau_crtc *nv_crtc, bool update)
136
{
137 138 139 140 141 142 143 144 145 146 147 148 149
	struct nouveau_channel *evo = nv50_display(nv_crtc->base.dev)->master;
	struct nouveau_connector *nv_connector;
	struct drm_connector *connector;
	int head = nv_crtc->index, ret;
	u32 mode = 0x00;

	nv_connector = nouveau_crtc_connector_get(nv_crtc);
	connector = &nv_connector->base;
	if (nv_connector->dithering_mode == DITHERING_MODE_AUTO) {
		if (nv_crtc->base.fb->depth > connector->display_info.bpc * 3)
			mode = DITHERING_MODE_DYNAMIC2X2;
	} else {
		mode = nv_connector->dithering_mode;
150 151
	}

152 153 154 155 156 157
	if (nv_connector->dithering_depth == DITHERING_DEPTH_AUTO) {
		if (connector->display_info.bpc >= 8)
			mode |= DITHERING_DEPTH_8BPC;
	} else {
		mode |= nv_connector->dithering_depth;
	}
158

159 160 161 162 163 164 165 166 167
	ret = RING_SPACE(evo, 2 + (update ? 2 : 0));
	if (ret == 0) {
		BEGIN_RING(evo, 0, NV50_EVO_CRTC(head, DITHER_CTRL), 1);
		OUT_RING  (evo, mode);
		if (update) {
			BEGIN_RING(evo, 0, NV50_EVO_UPDATE, 1);
			OUT_RING  (evo, 0);
			FIRE_RING (evo);
		}
168 169
	}

170
	return ret;
171 172 173 174 175 176 177 178 179 180 181 182 183 184 185 186 187 188 189 190 191
}

struct nouveau_connector *
nouveau_crtc_connector_get(struct nouveau_crtc *nv_crtc)
{
	struct drm_device *dev = nv_crtc->base.dev;
	struct drm_connector *connector;
	struct drm_crtc *crtc = to_drm_crtc(nv_crtc);

	/* The safest approach is to find an encoder with the right crtc, that
	 * is also linked to a connector. */
	list_for_each_entry(connector, &dev->mode_config.connector_list, head) {
		if (connector->encoder)
			if (connector->encoder->crtc == crtc)
				return nouveau_connector(connector);
	}

	return NULL;
}

static int
192
nv50_crtc_set_scale(struct nouveau_crtc *nv_crtc, bool update)
193
{
194
	struct nouveau_connector *nv_connector;
195 196
	struct drm_crtc *crtc = &nv_crtc->base;
	struct drm_device *dev = crtc->dev;
197
	struct nouveau_channel *evo = nv50_display(dev)->master;
198 199
	struct drm_display_mode *umode = &crtc->mode;
	struct drm_display_mode *omode;
200
	int scaling_mode, ret;
201
	u32 ctrl = 0, oX, oY;
202

203
	NV_DEBUG_KMS(dev, "\n");
204

205 206 207 208
	nv_connector = nouveau_crtc_connector_get(nv_crtc);
	if (!nv_connector || !nv_connector->native_mode) {
		NV_ERROR(dev, "no native mode, forcing panel scaling\n");
		scaling_mode = DRM_MODE_SCALE_NONE;
209 210
	} else {
		scaling_mode = nv_connector->scaling_mode;
211 212 213 214 215
	}

	/* start off at the resolution we programmed the crtc for, this
	 * effectively handles NONE/FULL scaling
	 */
216 217 218 219 220 221 222 223 224
	if (scaling_mode != DRM_MODE_SCALE_NONE)
		omode = nv_connector->native_mode;
	else
		omode = umode;

	oX = omode->hdisplay;
	oY = omode->vdisplay;
	if (omode->flags & DRM_MODE_FLAG_DBLSCAN)
		oY *= 2;
225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241

	/* add overscan compensation if necessary, will keep the aspect
	 * ratio the same as the backend mode unless overridden by the
	 * user setting both hborder and vborder properties.
	 */
	if (nv_connector && ( nv_connector->underscan == UNDERSCAN_ON ||
			     (nv_connector->underscan == UNDERSCAN_AUTO &&
			      nv_connector->edid &&
			      drm_detect_hdmi_monitor(nv_connector->edid)))) {
		u32 bX = nv_connector->underscan_hborder;
		u32 bY = nv_connector->underscan_vborder;
		u32 aspect = (oY << 19) / oX;

		if (bX) {
			oX -= (bX * 2);
			if (bY) oY -= (bY * 2);
			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
242
		} else {
243 244 245
			oX -= (oX >> 4) + 32;
			if (bY) oY -= (bY * 2);
			else    oY  = ((oX * aspect) + (aspect / 2)) >> 19;
246 247 248
		}
	}

249 250 251
	/* handle CENTER/ASPECT scaling, taking into account the areas
	 * removed already for overscan compensation
	 */
252
	switch (scaling_mode) {
253
	case DRM_MODE_SCALE_CENTER:
254 255
		oX = min((u32)umode->hdisplay, oX);
		oY = min((u32)umode->vdisplay, oY);
256
		/* fall-through */
257
	case DRM_MODE_SCALE_ASPECT:
258
		if (oY < oX) {
259
			u32 aspect = (umode->hdisplay << 19) / umode->vdisplay;
260
			oX = ((oY * aspect) + (aspect / 2)) >> 19;
261
		} else {
262
			u32 aspect = (umode->vdisplay << 19) / umode->hdisplay;
263
			oY = ((oX * aspect) + (aspect / 2)) >> 19;
264 265 266 267 268 269
		}
		break;
	default:
		break;
	}

270 271 272
	if (umode->hdisplay != oX || umode->vdisplay != oY ||
	    umode->flags & DRM_MODE_FLAG_INTERLACE ||
	    umode->flags & DRM_MODE_FLAG_DBLSCAN)
273 274
		ctrl |= NV50_EVO_CRTC_SCALE_CTRL_ACTIVE;

275
	ret = RING_SPACE(evo, 5);
276 277 278 279
	if (ret)
		return ret;

	BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_CTRL), 1);
280
	OUT_RING  (evo, ctrl);
281
	BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, SCALE_RES1), 2);
282 283
	OUT_RING  (evo, oY << 16 | oX);
	OUT_RING  (evo, oY << 16 | oX);
284 285

	if (update) {
286
		nv50_display_flip_stop(crtc);
287
		nv50_display_sync(dev);
288
		nv50_display_flip_next(crtc, crtc->fb, NULL);
289 290 291 292 293 294 295 296
	}

	return 0;
}

int
nv50_crtc_set_clock(struct drm_device *dev, int head, int pclk)
{
297
	struct drm_nouveau_private *dev_priv = dev->dev_private;
298
	struct pll_lims pll;
299
	uint32_t reg1, reg2;
300
	int ret, N1, M1, N2, M2, P;
301

302
	ret = get_pll_limits(dev, PLL_VPLL0 + head, &pll);
303 304 305
	if (ret)
		return ret;

306 307 308 309
	if (pll.vco2.maxfreq) {
		ret = nv50_calc_pll(dev, &pll, pclk, &N1, &M1, &N2, &M2, &P);
		if (ret <= 0)
			return 0;
310

311
		NV_DEBUG(dev, "pclk %d out %d NM1 %d %d NM2 %d %d P %d\n",
312 313
			 pclk, ret, N1, M1, N2, M2, P);

314 315 316 317 318
		reg1 = nv_rd32(dev, pll.reg + 4) & 0xff00ff00;
		reg2 = nv_rd32(dev, pll.reg + 8) & 0x8000ff00;
		nv_wr32(dev, pll.reg + 0, 0x10000611);
		nv_wr32(dev, pll.reg + 4, reg1 | (M1 << 16) | N1);
		nv_wr32(dev, pll.reg + 8, reg2 | (P << 28) | (M2 << 16) | N2);
319 320
	} else
	if (dev_priv->chipset < NV_C0) {
321
		ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
322 323 324 325 326
		if (ret <= 0)
			return 0;

		NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
			 pclk, ret, N1, N2, M1, P);
327

328 329 330 331
		reg1 = nv_rd32(dev, pll.reg + 4) & 0xffc00000;
		nv_wr32(dev, pll.reg + 0, 0x50000610);
		nv_wr32(dev, pll.reg + 4, reg1 | (P << 16) | (M1 << 8) | N1);
		nv_wr32(dev, pll.reg + 8, N2);
332
	} else {
333
		ret = nva3_calc_pll(dev, &pll, pclk, &N1, &N2, &M1, &P);
334 335 336 337 338 339
		if (ret <= 0)
			return 0;

		NV_DEBUG(dev, "pclk %d out %d N %d fN 0x%04x M %d P %d\n",
			 pclk, ret, N1, N2, M1, P);

340 341 342
		nv_mask(dev, pll.reg + 0x0c, 0x00000000, 0x00000100);
		nv_wr32(dev, pll.reg + 0x04, (P << 16) | (N1 << 8) | M1);
		nv_wr32(dev, pll.reg + 0x10, N2 << 16);
343 344 345 346 347 348 349 350
	}

	return 0;
}

static void
nv50_crtc_destroy(struct drm_crtc *crtc)
{
351 352
	struct drm_device *dev;
	struct nouveau_crtc *nv_crtc;
353 354 355 356

	if (!crtc)
		return;

357 358 359 360 361
	dev = crtc->dev;
	nv_crtc = nouveau_crtc(crtc);

	NV_DEBUG_KMS(dev, "\n");

362 363
	drm_crtc_cleanup(&nv_crtc->base);

364
	nouveau_bo_unmap(nv_crtc->lut.nvbo);
365
	nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
366
	nouveau_bo_unmap(nv_crtc->cursor.nvbo);
367 368 369 370 371 372 373 374 375 376 377 378 379 380 381 382 383 384 385
	nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
	kfree(nv_crtc);
}

int
nv50_crtc_cursor_set(struct drm_crtc *crtc, struct drm_file *file_priv,
		     uint32_t buffer_handle, uint32_t width, uint32_t height)
{
	struct drm_device *dev = crtc->dev;
	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
	struct nouveau_bo *cursor = NULL;
	struct drm_gem_object *gem;
	int ret = 0, i;

	if (!buffer_handle) {
		nv_crtc->cursor.hide(nv_crtc, true);
		return 0;
	}

386 387 388
	if (width != 64 || height != 64)
		return -EINVAL;

389 390
	gem = drm_gem_object_lookup(dev, file_priv, buffer_handle);
	if (!gem)
391
		return -ENOENT;
392 393 394 395 396 397 398 399 400 401 402 403
	cursor = nouveau_gem_object(gem);

	ret = nouveau_bo_map(cursor);
	if (ret)
		goto out;

	/* The simple will do for now. */
	for (i = 0; i < 64 * 64; i++)
		nouveau_bo_wr32(nv_crtc->cursor.nvbo, i, nouveau_bo_rd32(cursor, i));

	nouveau_bo_unmap(cursor);

404
	nv_crtc->cursor.set_offset(nv_crtc, nv_crtc->cursor.nvbo->bo.offset);
405 406 407
	nv_crtc->cursor.show(nv_crtc, true);

out:
408
	drm_gem_object_unreference_unlocked(gem);
409 410 411 412 413 414 415 416 417 418 419 420 421 422
	return ret;
}

int
nv50_crtc_cursor_move(struct drm_crtc *crtc, int x, int y)
{
	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);

	nv_crtc->cursor.set_pos(nv_crtc, x, y);
	return 0;
}

static void
nv50_crtc_gamma_set(struct drm_crtc *crtc, u16 *r, u16 *g, u16 *b,
J
James Simmons 已提交
423
		    uint32_t start, uint32_t size)
424
{
J
James Simmons 已提交
425
	int end = (start + size > 256) ? 256 : start + size, i;
426 427
	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);

J
James Simmons 已提交
428
	for (i = start; i < end; i++) {
429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465
		nv_crtc->lut.r[i] = r[i];
		nv_crtc->lut.g[i] = g[i];
		nv_crtc->lut.b[i] = b[i];
	}

	/* We need to know the depth before we upload, but it's possible to
	 * get called before a framebuffer is bound.  If this is the case,
	 * mark the lut values as dirty by setting depth==0, and it'll be
	 * uploaded on the first mode_set_base()
	 */
	if (!nv_crtc->base.fb) {
		nv_crtc->lut.depth = 0;
		return;
	}

	nv50_crtc_lut_load(crtc);
}

static void
nv50_crtc_save(struct drm_crtc *crtc)
{
	NV_ERROR(crtc->dev, "!!\n");
}

static void
nv50_crtc_restore(struct drm_crtc *crtc)
{
	NV_ERROR(crtc->dev, "!!\n");
}

static const struct drm_crtc_funcs nv50_crtc_funcs = {
	.save = nv50_crtc_save,
	.restore = nv50_crtc_restore,
	.cursor_set = nv50_crtc_cursor_set,
	.cursor_move = nv50_crtc_cursor_move,
	.gamma_set = nv50_crtc_gamma_set,
	.set_config = drm_crtc_helper_set_config,
466
	.page_flip = nouveau_crtc_page_flip,
467 468 469 470 471 472 473 474 475 476 477 478 479 480
	.destroy = nv50_crtc_destroy,
};

static void
nv50_crtc_dpms(struct drm_crtc *crtc, int mode)
{
}

static void
nv50_crtc_prepare(struct drm_crtc *crtc)
{
	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
	struct drm_device *dev = crtc->dev;

481
	NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
482

483
	nv50_display_flip_stop(crtc);
484
	drm_vblank_pre_modeset(dev, nv_crtc->index);
485 486 487 488 489 490 491 492 493
	nv50_crtc_blank(nv_crtc, true);
}

static void
nv50_crtc_commit(struct drm_crtc *crtc)
{
	struct drm_device *dev = crtc->dev;
	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);

494
	NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
495 496

	nv50_crtc_blank(nv_crtc, false);
497
	drm_vblank_post_modeset(dev, nv_crtc->index);
498
	nv50_display_sync(dev);
499
	nv50_display_flip_next(crtc, crtc->fb, NULL);
500 501 502 503 504 505 506 507 508 509
}

static bool
nv50_crtc_mode_fixup(struct drm_crtc *crtc, struct drm_display_mode *mode,
		     struct drm_display_mode *adjusted_mode)
{
	return true;
}

static int
510 511
nv50_crtc_do_mode_set_base(struct drm_crtc *crtc,
			   struct drm_framebuffer *passed_fb,
512
			   int x, int y, bool atomic)
513 514 515 516
{
	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
	struct drm_device *dev = nv_crtc->base.dev;
	struct drm_nouveau_private *dev_priv = dev->dev_private;
517
	struct nouveau_channel *evo = nv50_display(dev)->master;
518 519
	struct drm_framebuffer *drm_fb;
	struct nouveau_framebuffer *fb;
520
	int ret;
521

522
	NV_DEBUG_KMS(dev, "index %d\n", nv_crtc->index);
523

524 525 526 527 528 529
	/* no fb bound */
	if (!atomic && !crtc->fb) {
		NV_DEBUG_KMS(dev, "No FB bound\n");
		return 0;
	}

530 531 532 533 534 535 536
	/* If atomic, we want to switch to the fb we were passed, so
	 * now we update pointers to do that.  (We don't pin; just
	 * assume we're already pinned and update the base address.)
	 */
	if (atomic) {
		drm_fb = passed_fb;
		fb = nouveau_framebuffer(passed_fb);
537
	} else {
538 539
		drm_fb = crtc->fb;
		fb = nouveau_framebuffer(crtc->fb);
540 541 542 543 544 545 546 547 548 549 550 551 552
		/* If not atomic, we can go ahead and pin, and unpin the
		 * old fb we were passed.
		 */
		ret = nouveau_bo_pin(fb->nvbo, TTM_PL_FLAG_VRAM);
		if (ret)
			return ret;

		if (passed_fb) {
			struct nouveau_framebuffer *ofb = nouveau_framebuffer(passed_fb);
			nouveau_bo_unpin(ofb->nvbo);
		}
	}

553
	nv_crtc->fb.offset = fb->nvbo->bo.offset;
554
	nv_crtc->fb.tile_flags = nouveau_bo_tile_layout(fb->nvbo);
555 556 557 558 559 560 561
	nv_crtc->fb.cpp = drm_fb->bits_per_pixel / 8;
	if (!nv_crtc->fb.blanked && dev_priv->chipset != 0x50) {
		ret = RING_SPACE(evo, 2);
		if (ret)
			return ret;

		BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_DMA), 1);
562
		OUT_RING  (evo, fb->r_dma);
563 564 565 566 567 568 569
	}

	ret = RING_SPACE(evo, 12);
	if (ret)
		return ret;

	BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_OFFSET), 5);
570 571 572 573 574
	OUT_RING  (evo, nv_crtc->fb.offset >> 8);
	OUT_RING  (evo, 0);
	OUT_RING  (evo, (drm_fb->height << 16) | drm_fb->width);
	OUT_RING  (evo, fb->r_pitch);
	OUT_RING  (evo, fb->r_format);
575 576

	BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, CLUT_MODE), 1);
577 578
	OUT_RING  (evo, fb->base.depth == 8 ?
		   NV50_EVO_CRTC_CLUT_MODE_OFF : NV50_EVO_CRTC_CLUT_MODE_ON);
579 580

	BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, COLOR_CTRL), 1);
581
	OUT_RING  (evo, NV50_EVO_CRTC_COLOR_CTRL_COLOR);
582
	BEGIN_RING(evo, 0, NV50_EVO_CRTC(nv_crtc->index, FB_POS), 1);
583
	OUT_RING  (evo, (y << 16) | x);
584 585 586 587 588 589 590 591 592 593

	if (nv_crtc->lut.depth != fb->base.depth) {
		nv_crtc->lut.depth = fb->base.depth;
		nv50_crtc_lut_load(crtc);
	}

	return 0;
}

static int
594 595
nv50_crtc_mode_set(struct drm_crtc *crtc, struct drm_display_mode *umode,
		   struct drm_display_mode *mode, int x, int y,
596 597 598
		   struct drm_framebuffer *old_fb)
{
	struct drm_device *dev = crtc->dev;
599
	struct nouveau_channel *evo = nv50_display(dev)->master;
600
	struct nouveau_crtc *nv_crtc = nouveau_crtc(crtc);
601 602 603 604 605 606
	u32 head = nv_crtc->index * 0x400;
	u32 ilace = (mode->flags & DRM_MODE_FLAG_INTERLACE) ? 2 : 1;
	u32 vscan = (mode->flags & DRM_MODE_FLAG_DBLSCAN) ? 2 : 1;
	u32 hactive, hsynce, hbackp, hfrontp, hblanke, hblanks;
	u32 vactive, vsynce, vbackp, vfrontp, vblanke, vblanks;
	u32 vblan2e = 0, vblan2s = 1;
607 608
	int ret;

609 610 611 612 613 614 615 616 617 618 619
	/* hw timing description looks like this:
	 *
	 * <sync> <back porch> <---------display---------> <front porch>
	 * ______
	 *       |____________|---------------------------|____________|
	 *
	 *       ^ synce      ^ blanke                    ^ blanks     ^ active
	 *
	 * interlaced modes also have 2 additional values pointing at the end
	 * and start of the next field's blanking period.
	 */
620

621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637
	hactive = mode->htotal;
	hsynce  = mode->hsync_end - mode->hsync_start - 1;
	hbackp  = mode->htotal - mode->hsync_end;
	hblanke = hsynce + hbackp;
	hfrontp = mode->hsync_start - mode->hdisplay;
	hblanks = mode->htotal - hfrontp - 1;

	vactive = mode->vtotal * vscan / ilace;
	vsynce  = ((mode->vsync_end - mode->vsync_start) * vscan / ilace) - 1;
	vbackp  = (mode->vtotal - mode->vsync_end) * vscan / ilace;
	vblanke = vsynce + vbackp;
	vfrontp = (mode->vsync_start - mode->vdisplay) * vscan / ilace;
	vblanks = vactive - vfrontp - 1;
	if (mode->flags & DRM_MODE_FLAG_INTERLACE) {
		vblan2e = vactive + vsynce + vbackp;
		vblan2s = vblan2e + (mode->vdisplay * vscan / ilace);
		vactive = (vactive * 2) + 1;
638 639
	}

640 641 642 643 644 645 646 647 648 649 650 651 652 653 654 655 656 657 658 659
	ret = RING_SPACE(evo, 18);
	if (ret == 0) {
		BEGIN_RING(evo, 0, 0x0804 + head, 2);
		OUT_RING  (evo, 0x00800000 | mode->clock);
		OUT_RING  (evo, (ilace == 2) ? 2 : 0);
		BEGIN_RING(evo, 0, 0x0810 + head, 6);
		OUT_RING  (evo, 0x00000000); /* border colour */
		OUT_RING  (evo, (vactive << 16) | hactive);
		OUT_RING  (evo, ( vsynce << 16) | hsynce);
		OUT_RING  (evo, (vblanke << 16) | hblanke);
		OUT_RING  (evo, (vblanks << 16) | hblanks);
		OUT_RING  (evo, (vblan2e << 16) | vblan2s);
		BEGIN_RING(evo, 0, 0x082c + head, 1);
		OUT_RING  (evo, 0x00000000);
		BEGIN_RING(evo, 0, 0x0900 + head, 1);
		OUT_RING  (evo, 0x00000311); /* makes sync channel work */
		BEGIN_RING(evo, 0, 0x08c8 + head, 1);
		OUT_RING  (evo, (umode->vdisplay << 16) | umode->hdisplay);
		BEGIN_RING(evo, 0, 0x08d4 + head, 1);
		OUT_RING  (evo, 0x00000000); /* screen position */
660 661
	}

662 663
	nv_crtc->set_dither(nv_crtc, false);
	nv_crtc->set_scale(nv_crtc, false);
664

665
	return nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
666 667 668 669 670 671
}

static int
nv50_crtc_mode_set_base(struct drm_crtc *crtc, int x, int y,
			struct drm_framebuffer *old_fb)
{
672 673
	int ret;

674
	nv50_display_flip_stop(crtc);
675 676 677 678
	ret = nv50_crtc_do_mode_set_base(crtc, old_fb, x, y, false);
	if (ret)
		return ret;

679
	ret = nv50_display_sync(crtc->dev);
680 681 682 683
	if (ret)
		return ret;

	return nv50_display_flip_next(crtc, crtc->fb, NULL);
684 685 686 687 688
}

static int
nv50_crtc_mode_set_base_atomic(struct drm_crtc *crtc,
			       struct drm_framebuffer *fb,
689
			       int x, int y, enum mode_set_atomic state)
690
{
691 692
	int ret;

693
	nv50_display_flip_stop(crtc);
694 695 696 697
	ret = nv50_crtc_do_mode_set_base(crtc, fb, x, y, true);
	if (ret)
		return ret;

698
	return nv50_display_sync(crtc->dev);
699 700 701 702 703 704 705 706 707
}

static const struct drm_crtc_helper_funcs nv50_crtc_helper_funcs = {
	.dpms = nv50_crtc_dpms,
	.prepare = nv50_crtc_prepare,
	.commit = nv50_crtc_commit,
	.mode_fixup = nv50_crtc_mode_fixup,
	.mode_set = nv50_crtc_mode_set,
	.mode_set_base = nv50_crtc_mode_set_base,
708
	.mode_set_base_atomic = nv50_crtc_mode_set_base_atomic,
709 710 711 712 713 714 715 716 717
	.load_lut = nv50_crtc_lut_load,
};

int
nv50_crtc_create(struct drm_device *dev, int index)
{
	struct nouveau_crtc *nv_crtc = NULL;
	int ret, i;

718
	NV_DEBUG_KMS(dev, "\n");
719 720 721 722 723 724 725 726 727 728 729 730 731 732 733

	nv_crtc = kzalloc(sizeof(*nv_crtc), GFP_KERNEL);
	if (!nv_crtc)
		return -ENOMEM;

	/* Default CLUT parameters, will be activated on the hw upon
	 * first mode set.
	 */
	for (i = 0; i < 256; i++) {
		nv_crtc->lut.r[i] = i << 8;
		nv_crtc->lut.g[i] = i << 8;
		nv_crtc->lut.b[i] = i << 8;
	}
	nv_crtc->lut.depth = 0;

734
	ret = nouveau_bo_new(dev, 4096, 0x100, TTM_PL_FLAG_VRAM,
735
			     0, 0x0000, &nv_crtc->lut.nvbo);
736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758
	if (!ret) {
		ret = nouveau_bo_pin(nv_crtc->lut.nvbo, TTM_PL_FLAG_VRAM);
		if (!ret)
			ret = nouveau_bo_map(nv_crtc->lut.nvbo);
		if (ret)
			nouveau_bo_ref(NULL, &nv_crtc->lut.nvbo);
	}

	if (ret) {
		kfree(nv_crtc);
		return ret;
	}

	nv_crtc->index = index;

	/* set function pointers */
	nv_crtc->set_dither = nv50_crtc_set_dither;
	nv_crtc->set_scale = nv50_crtc_set_scale;

	drm_crtc_init(dev, &nv_crtc->base, &nv50_crtc_funcs);
	drm_crtc_helper_add(&nv_crtc->base, &nv50_crtc_helper_funcs);
	drm_mode_crtc_set_gamma_size(&nv_crtc->base, 256);

759
	ret = nouveau_bo_new(dev, 64*64*4, 0x100, TTM_PL_FLAG_VRAM,
760
			     0, 0x0000, &nv_crtc->cursor.nvbo);
761 762 763 764 765 766 767 768 769 770 771
	if (!ret) {
		ret = nouveau_bo_pin(nv_crtc->cursor.nvbo, TTM_PL_FLAG_VRAM);
		if (!ret)
			ret = nouveau_bo_map(nv_crtc->cursor.nvbo);
		if (ret)
			nouveau_bo_ref(NULL, &nv_crtc->cursor.nvbo);
	}

	nv50_cursor_init(nv_crtc);
	return 0;
}