kirin_drm_ade.c 27.9 KB
Newer Older
1
// SPDX-License-Identifier: GPL-2.0-only
2 3 4 5 6 7 8 9 10 11 12 13 14 15 16
/*
 * Hisilicon Hi6220 SoC ADE(Advanced Display Engine)'s crtc&plane driver
 *
 * Copyright (c) 2016 Linaro Limited.
 * Copyright (c) 2014-2016 Hisilicon Limited.
 *
 * Author:
 *	Xinliang Liu <z.liuxinliang@hisilicon.com>
 *	Xinliang Liu <xinliang.liu@linaro.org>
 *	Xinwei Kong <kong.kongxinwei@hisilicon.com>
 */

#include <linux/bitops.h>
#include <linux/clk.h>
#include <linux/mfd/syscon.h>
S
Sam Ravnborg 已提交
17
#include <linux/platform_device.h>
18 19 20
#include <linux/regmap.h>
#include <linux/reset.h>

S
Sam Ravnborg 已提交
21 22
#include <video/display_timing.h>

23 24
#include <drm/drm_atomic.h>
#include <drm/drm_atomic_helper.h>
25
#include <drm/drm_crtc.h>
S
Sam Ravnborg 已提交
26
#include <drm/drm_drv.h>
27
#include <drm/drm_fb_cma_helper.h>
S
Sam Ravnborg 已提交
28
#include <drm/drm_fourcc.h>
29 30 31
#include <drm/drm_gem_cma_helper.h>
#include <drm/drm_plane_helper.h>
#include <drm/drm_probe_helper.h>
S
Sam Ravnborg 已提交
32
#include <drm/drm_vblank.h>
33 34 35 36

#include "kirin_drm_drv.h"
#include "kirin_ade_reg.h"

37 38 39 40
#define PRIMARY_CH	ADE_CH1 /* primary plane */
#define OUT_OVLY	ADE_OVLY2 /* output overlay compositor */
#define ADE_DEBUG	1

41 42
#define to_kirin_crtc(crtc) \
	container_of(crtc, struct kirin_crtc, base)
43

44 45 46
#define to_kirin_plane(plane) \
	container_of(plane, struct kirin_plane, base)

47

48 49 50 51 52 53 54 55 56 57 58
struct ade_hw_ctx {
	void __iomem  *base;
	struct regmap *noc_regmap;
	struct clk *ade_core_clk;
	struct clk *media_noc_clk;
	struct clk *ade_pix_clk;
	struct reset_control *reset;
	bool power_on;
	int irq;
};

59
struct kirin_crtc {
60
	struct drm_crtc base;
61
	void *hw_ctx;
62
	struct work_struct display_reset_wq;
63 64 65
	bool enable;
};

66
struct kirin_plane {
67
	struct drm_plane base;
68 69
	void *hw_ctx;
	u32 ch;
70 71
};

72
struct ade_data {
73
	struct kirin_crtc crtc;
74
	struct kirin_plane planes[ADE_CH_NUM];
75
	struct ade_hw_ctx *hw_ctx;
76 77
};

78 79 80 81 82 83 84 85 86 87 88 89 90 91 92 93 94 95 96 97 98 99 100 101 102 103 104 105 106 107 108 109 110 111 112 113 114 115 116 117 118 119 120 121 122 123 124 125 126 127 128 129 130 131 132 133 134 135
/* ade-format info: */
struct ade_format {
	u32 pixel_format;
	enum ade_fb_format ade_format;
};

static const struct ade_format ade_formats[] = {
	/* 16bpp RGB: */
	{ DRM_FORMAT_RGB565, ADE_RGB_565 },
	{ DRM_FORMAT_BGR565, ADE_BGR_565 },
	/* 24bpp RGB: */
	{ DRM_FORMAT_RGB888, ADE_RGB_888 },
	{ DRM_FORMAT_BGR888, ADE_BGR_888 },
	/* 32bpp [A]RGB: */
	{ DRM_FORMAT_XRGB8888, ADE_XRGB_8888 },
	{ DRM_FORMAT_XBGR8888, ADE_XBGR_8888 },
	{ DRM_FORMAT_RGBA8888, ADE_RGBA_8888 },
	{ DRM_FORMAT_BGRA8888, ADE_BGRA_8888 },
	{ DRM_FORMAT_ARGB8888, ADE_ARGB_8888 },
	{ DRM_FORMAT_ABGR8888, ADE_ABGR_8888 },
};

static const u32 channel_formats1[] = {
	/* channel 1,2,3,4 */
	DRM_FORMAT_RGB565, DRM_FORMAT_BGR565, DRM_FORMAT_RGB888,
	DRM_FORMAT_BGR888, DRM_FORMAT_XRGB8888, DRM_FORMAT_XBGR8888,
	DRM_FORMAT_RGBA8888, DRM_FORMAT_BGRA8888, DRM_FORMAT_ARGB8888,
	DRM_FORMAT_ABGR8888
};

u32 ade_get_channel_formats(u8 ch, const u32 **formats)
{
	switch (ch) {
	case ADE_CH1:
		*formats = channel_formats1;
		return ARRAY_SIZE(channel_formats1);
	default:
		DRM_ERROR("no this channel %d\n", ch);
		*formats = NULL;
		return 0;
	}
}

/* convert from fourcc format to ade format */
static u32 ade_get_format(u32 pixel_format)
{
	int i;

	for (i = 0; i < ARRAY_SIZE(ade_formats); i++)
		if (ade_formats[i].pixel_format == pixel_format)
			return ade_formats[i].ade_format;

	/* not found */
	DRM_ERROR("Not found pixel format!!fourcc_format= %d\n",
		  pixel_format);
	return ADE_FORMAT_UNSUPPORT;
}

136 137 138 139 140 141 142 143 144 145 146 147 148 149 150 151 152 153 154 155 156 157 158 159 160 161 162 163 164 165 166 167
static void ade_update_reload_bit(void __iomem *base, u32 bit_num, u32 val)
{
	u32 bit_ofst, reg_num;

	bit_ofst = bit_num % 32;
	reg_num = bit_num / 32;

	ade_update_bits(base + ADE_RELOAD_DIS(reg_num), bit_ofst,
			MASK(1), !!val);
}

static u32 ade_read_reload_bit(void __iomem *base, u32 bit_num)
{
	u32 tmp, bit_ofst, reg_num;

	bit_ofst = bit_num % 32;
	reg_num = bit_num / 32;

	tmp = readl(base + ADE_RELOAD_DIS(reg_num));
	return !!(BIT(bit_ofst) & tmp);
}

static void ade_init(struct ade_hw_ctx *ctx)
{
	void __iomem *base = ctx->base;

	/* enable clk gate */
	ade_update_bits(base + ADE_CTRL1, AUTO_CLK_GATE_EN_OFST,
			AUTO_CLK_GATE_EN, ADE_ENABLE);
	/* clear overlay */
	writel(0, base + ADE_OVLY1_TRANS_CFG);
	writel(0, base + ADE_OVLY_CTL);
168
	writel(0, base + ADE_OVLYX_CTL(OUT_OVLY));
169 170 171 172 173 174 175 176 177 178 179
	/* clear reset and reload regs */
	writel(MASK(32), base + ADE_SOFT_RST_SEL(0));
	writel(MASK(32), base + ADE_SOFT_RST_SEL(1));
	writel(MASK(32), base + ADE_RELOAD_DIS(0));
	writel(MASK(32), base + ADE_RELOAD_DIS(1));
	/*
	 * for video mode, all the ade registers should
	 * become effective at frame end.
	 */
	ade_update_bits(base + ADE_CTRL, FRM_END_START_OFST,
			FRM_END_START_MASK, REG_EFFECTIVE_IN_ADEEN_FRMEND);
180
	ade_update_bits(base + LDI_INT_EN, UNDERFLOW_INT_EN_OFST, MASK(1), 1);
181 182
}

183 184 185 186
static bool ade_crtc_mode_fixup(struct drm_crtc *crtc,
				const struct drm_display_mode *mode,
				struct drm_display_mode *adjusted_mode)
{
187 188
	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
189 190 191 192 193 194 195

	adjusted_mode->clock =
		clk_round_rate(ctx->ade_pix_clk, mode->clock * 1000) / 1000;
	return true;
}


196 197 198 199 200 201 202 203 204 205 206 207 208 209 210 211 212
static void ade_set_pix_clk(struct ade_hw_ctx *ctx,
			    struct drm_display_mode *mode,
			    struct drm_display_mode *adj_mode)
{
	u32 clk_Hz = mode->clock * 1000;
	int ret;

	/*
	 * Success should be guaranteed in mode_valid call back,
	 * so failure shouldn't happen here
	 */
	ret = clk_set_rate(ctx->ade_pix_clk, clk_Hz);
	if (ret)
		DRM_ERROR("failed to set pixel clk %dHz (%d)\n", clk_Hz, ret);
	adj_mode->clock = clk_get_rate(ctx->ade_pix_clk) / 1000;
}

213
static void ade_ldi_set_mode(struct ade_hw_ctx *ctx,
214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245 246
			     struct drm_display_mode *mode,
			     struct drm_display_mode *adj_mode)
{
	void __iomem *base = ctx->base;
	u32 width = mode->hdisplay;
	u32 height = mode->vdisplay;
	u32 hfp, hbp, hsw, vfp, vbp, vsw;
	u32 plr_flags;

	plr_flags = (mode->flags & DRM_MODE_FLAG_NVSYNC) ? FLAG_NVSYNC : 0;
	plr_flags |= (mode->flags & DRM_MODE_FLAG_NHSYNC) ? FLAG_NHSYNC : 0;
	hfp = mode->hsync_start - mode->hdisplay;
	hbp = mode->htotal - mode->hsync_end;
	hsw = mode->hsync_end - mode->hsync_start;
	vfp = mode->vsync_start - mode->vdisplay;
	vbp = mode->vtotal - mode->vsync_end;
	vsw = mode->vsync_end - mode->vsync_start;
	if (vsw > 15) {
		DRM_DEBUG_DRIVER("vsw exceeded 15\n");
		vsw = 15;
	}

	writel((hbp << HBP_OFST) | hfp, base + LDI_HRZ_CTRL0);
	 /* the configured value is actual value - 1 */
	writel(hsw - 1, base + LDI_HRZ_CTRL1);
	writel((vbp << VBP_OFST) | vfp, base + LDI_VRT_CTRL0);
	 /* the configured value is actual value - 1 */
	writel(vsw - 1, base + LDI_VRT_CTRL1);
	 /* the configured value is actual value - 1 */
	writel(((height - 1) << VSIZE_OFST) | (width - 1),
	       base + LDI_DSP_SIZE);
	writel(plr_flags, base + LDI_PLR_CTRL);

247 248 249 250
	/* set overlay compositor output size */
	writel(((width - 1) << OUTPUT_XSIZE_OFST) | (height - 1),
	       base + ADE_OVLY_OUTPUT_SIZE(OUT_OVLY));

251 252 253 254 255 256 257 258 259 260 261 262 263 264 265 266 267 268 269 270 271 272 273 274 275 276 277 278 279 280 281 282 283 284 285 286 287 288 289 290 291 292 293 294 295 296 297 298 299 300 301 302
	/* ctran6 setting */
	writel(CTRAN_BYPASS_ON, base + ADE_CTRAN_DIS(ADE_CTRAN6));
	 /* the configured value is actual value - 1 */
	writel(width * height - 1, base + ADE_CTRAN_IMAGE_SIZE(ADE_CTRAN6));
	ade_update_reload_bit(base, CTRAN_OFST + ADE_CTRAN6, 0);

	ade_set_pix_clk(ctx, mode, adj_mode);

	DRM_DEBUG_DRIVER("set mode: %dx%d\n", width, height);
}

static int ade_power_up(struct ade_hw_ctx *ctx)
{
	int ret;

	ret = clk_prepare_enable(ctx->media_noc_clk);
	if (ret) {
		DRM_ERROR("failed to enable media_noc_clk (%d)\n", ret);
		return ret;
	}

	ret = reset_control_deassert(ctx->reset);
	if (ret) {
		DRM_ERROR("failed to deassert reset\n");
		return ret;
	}

	ret = clk_prepare_enable(ctx->ade_core_clk);
	if (ret) {
		DRM_ERROR("failed to enable ade_core_clk (%d)\n", ret);
		return ret;
	}

	ade_init(ctx);
	ctx->power_on = true;
	return 0;
}

static void ade_power_down(struct ade_hw_ctx *ctx)
{
	void __iomem *base = ctx->base;

	writel(ADE_DISABLE, base + LDI_CTRL);
	/* dsi pixel off */
	writel(DSI_PCLK_OFF, base + LDI_HDMI_DSI_GT);

	clk_disable_unprepare(ctx->ade_core_clk);
	reset_control_assert(ctx->reset);
	clk_disable_unprepare(ctx->media_noc_clk);
	ctx->power_on = false;
}

303
static void ade_set_medianoc_qos(struct ade_hw_ctx *ctx)
304 305 306 307 308 309 310 311 312 313 314 315 316 317
{
	struct regmap *map = ctx->noc_regmap;

	regmap_update_bits(map, ADE0_QOSGENERATOR_MODE,
			   QOSGENERATOR_MODE_MASK, BYPASS_MODE);
	regmap_update_bits(map, ADE0_QOSGENERATOR_EXTCONTROL,
			   SOCKET_QOS_EN, SOCKET_QOS_EN);

	regmap_update_bits(map, ADE1_QOSGENERATOR_MODE,
			   QOSGENERATOR_MODE_MASK, BYPASS_MODE);
	regmap_update_bits(map, ADE1_QOSGENERATOR_EXTCONTROL,
			   SOCKET_QOS_EN, SOCKET_QOS_EN);
}

318
static int ade_crtc_enable_vblank(struct drm_crtc *crtc)
319
{
320 321
	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
322 323 324 325 326 327 328 329 330 331 332
	void __iomem *base = ctx->base;

	if (!ctx->power_on)
		(void)ade_power_up(ctx);

	ade_update_bits(base + LDI_INT_EN, FRAME_END_INT_EN_OFST,
			MASK(1), 1);

	return 0;
}

333
static void ade_crtc_disable_vblank(struct drm_crtc *crtc)
334
{
335 336
	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
337 338 339 340 341 342 343 344 345 346 347
	void __iomem *base = ctx->base;

	if (!ctx->power_on) {
		DRM_ERROR("power is down! vblank disable fail\n");
		return;
	}

	ade_update_bits(base + LDI_INT_EN, FRAME_END_INT_EN_OFST,
			MASK(1), 0);
}

348 349
static void drm_underflow_wq(struct work_struct *work)
{
350
	struct kirin_crtc *acrtc = container_of(work, struct kirin_crtc,
351 352 353 354 355 356 357 358
					      display_reset_wq);
	struct drm_device *drm_dev = (&acrtc->base)->dev;
	struct drm_atomic_state *state;

	state = drm_atomic_helper_suspend(drm_dev);
	drm_atomic_helper_resume(drm_dev, state);
}

359 360
static irqreturn_t ade_irq_handler(int irq, void *data)
{
361 362 363
	struct kirin_crtc *kcrtc = data;
	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
	struct drm_crtc *crtc = &kcrtc->base;
364 365 366 367 368 369 370 371 372 373 374 375
	void __iomem *base = ctx->base;
	u32 status;

	status = readl(base + LDI_MSK_INT);
	DRM_DEBUG_VBL("LDI IRQ: status=0x%X\n", status);

	/* vblank irq */
	if (status & BIT(FRAME_END_INT_EN_OFST)) {
		ade_update_bits(base + LDI_INT_CLR, FRAME_END_INT_EN_OFST,
				MASK(1), 1);
		drm_crtc_handle_vblank(crtc);
	}
376 377 378 379
	if (status & BIT(UNDERFLOW_INT_EN_OFST)) {
		ade_update_bits(base + LDI_INT_CLR, UNDERFLOW_INT_EN_OFST,
				MASK(1), 1);
		DRM_ERROR("LDI underflow!");
380
		schedule_work(&kcrtc->display_reset_wq);
381
	}
382 383 384 385

	return IRQ_HANDLED;
}

386
static void ade_display_enable(struct ade_hw_ctx *ctx)
387 388
{
	void __iomem *base = ctx->base;
389
	u32 out_fmt = LDI_OUT_RGB_888;
390

391 392 393 394
	/* enable output overlay compositor */
	writel(ADE_ENABLE, base + ADE_OVLYX_CTL(OUT_OVLY));
	ade_update_reload_bit(base, OVLY_OFST + OUT_OVLY, 0);

395 396 397 398 399 400 401 402 403 404 405 406 407
	/* display source setting */
	writel(DISP_SRC_OVLY2, base + ADE_DISP_SRC_CFG);

	/* enable ade */
	writel(ADE_ENABLE, base + ADE_EN);
	/* enable ldi */
	writel(NORMAL_MODE, base + LDI_WORK_MODE);
	writel((out_fmt << BPP_OFST) | DATA_GATE_EN | LDI_EN,
	       base + LDI_CTRL);
	/* dsi pixel on */
	writel(DSI_PCLK_ON, base + LDI_HDMI_DSI_GT);
}

408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498
#if ADE_DEBUG
static void ade_rdma_dump_regs(void __iomem *base, u32 ch)
{
	u32 reg_ctrl, reg_addr, reg_size, reg_stride, reg_space, reg_en;
	u32 val;

	reg_ctrl = RD_CH_CTRL(ch);
	reg_addr = RD_CH_ADDR(ch);
	reg_size = RD_CH_SIZE(ch);
	reg_stride = RD_CH_STRIDE(ch);
	reg_space = RD_CH_SPACE(ch);
	reg_en = RD_CH_EN(ch);

	val = ade_read_reload_bit(base, RDMA_OFST + ch);
	DRM_DEBUG_DRIVER("[rdma%d]: reload(%d)\n", ch + 1, val);
	val = readl(base + reg_ctrl);
	DRM_DEBUG_DRIVER("[rdma%d]: reg_ctrl(0x%08x)\n", ch + 1, val);
	val = readl(base + reg_addr);
	DRM_DEBUG_DRIVER("[rdma%d]: reg_addr(0x%08x)\n", ch + 1, val);
	val = readl(base + reg_size);
	DRM_DEBUG_DRIVER("[rdma%d]: reg_size(0x%08x)\n", ch + 1, val);
	val = readl(base + reg_stride);
	DRM_DEBUG_DRIVER("[rdma%d]: reg_stride(0x%08x)\n", ch + 1, val);
	val = readl(base + reg_space);
	DRM_DEBUG_DRIVER("[rdma%d]: reg_space(0x%08x)\n", ch + 1, val);
	val = readl(base + reg_en);
	DRM_DEBUG_DRIVER("[rdma%d]: reg_en(0x%08x)\n", ch + 1, val);
}

static void ade_clip_dump_regs(void __iomem *base, u32 ch)
{
	u32 val;

	val = ade_read_reload_bit(base, CLIP_OFST + ch);
	DRM_DEBUG_DRIVER("[clip%d]: reload(%d)\n", ch + 1, val);
	val = readl(base + ADE_CLIP_DISABLE(ch));
	DRM_DEBUG_DRIVER("[clip%d]: reg_clip_disable(0x%08x)\n", ch + 1, val);
	val = readl(base + ADE_CLIP_SIZE0(ch));
	DRM_DEBUG_DRIVER("[clip%d]: reg_clip_size0(0x%08x)\n", ch + 1, val);
	val = readl(base + ADE_CLIP_SIZE1(ch));
	DRM_DEBUG_DRIVER("[clip%d]: reg_clip_size1(0x%08x)\n", ch + 1, val);
}

static void ade_compositor_routing_dump_regs(void __iomem *base, u32 ch)
{
	u8 ovly_ch = 0; /* TODO: Only primary plane now */
	u32 val;

	val = readl(base + ADE_OVLY_CH_XY0(ovly_ch));
	DRM_DEBUG_DRIVER("[overlay ch%d]: reg_ch_xy0(0x%08x)\n", ovly_ch, val);
	val = readl(base + ADE_OVLY_CH_XY1(ovly_ch));
	DRM_DEBUG_DRIVER("[overlay ch%d]: reg_ch_xy1(0x%08x)\n", ovly_ch, val);
	val = readl(base + ADE_OVLY_CH_CTL(ovly_ch));
	DRM_DEBUG_DRIVER("[overlay ch%d]: reg_ch_ctl(0x%08x)\n", ovly_ch, val);
}

static void ade_dump_overlay_compositor_regs(void __iomem *base, u32 comp)
{
	u32 val;

	val = ade_read_reload_bit(base, OVLY_OFST + comp);
	DRM_DEBUG_DRIVER("[overlay%d]: reload(%d)\n", comp + 1, val);
	writel(ADE_ENABLE, base + ADE_OVLYX_CTL(comp));
	DRM_DEBUG_DRIVER("[overlay%d]: reg_ctl(0x%08x)\n", comp + 1, val);
	val = readl(base + ADE_OVLY_CTL);
	DRM_DEBUG_DRIVER("ovly_ctl(0x%08x)\n", val);
}

static void ade_dump_regs(void __iomem *base)
{
	u32 i;

	/* dump channel regs */
	for (i = 0; i < ADE_CH_NUM; i++) {
		/* dump rdma regs */
		ade_rdma_dump_regs(base, i);

		/* dump clip regs */
		ade_clip_dump_regs(base, i);

		/* dump compositor routing regs */
		ade_compositor_routing_dump_regs(base, i);
	}

	/* dump overlay compositor regs */
	ade_dump_overlay_compositor_regs(base, OUT_OVLY);
}
#else
static void ade_dump_regs(void __iomem *base) { }
#endif

499 500
static void ade_crtc_atomic_enable(struct drm_crtc *crtc,
				   struct drm_crtc_state *old_state)
501
{
502 503
	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
504 505
	int ret;

506
	if (kcrtc->enable)
507 508 509 510 511 512 513 514
		return;

	if (!ctx->power_on) {
		ret = ade_power_up(ctx);
		if (ret)
			return;
	}

515
	ade_set_medianoc_qos(ctx);
516
	ade_display_enable(ctx);
517
	ade_dump_regs(ctx->base);
518
	drm_crtc_vblank_on(crtc);
519
	kcrtc->enable = true;
520 521
}

522 523
static void ade_crtc_atomic_disable(struct drm_crtc *crtc,
				    struct drm_crtc_state *old_state)
524
{
525 526
	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
527

528
	if (!kcrtc->enable)
529 530
		return;

531
	drm_crtc_vblank_off(crtc);
532
	ade_power_down(ctx);
533
	kcrtc->enable = false;
534 535 536 537
}

static void ade_crtc_mode_set_nofb(struct drm_crtc *crtc)
{
538 539
	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
540 541 542 543 544
	struct drm_display_mode *mode = &crtc->state->mode;
	struct drm_display_mode *adj_mode = &crtc->state->adjusted_mode;

	if (!ctx->power_on)
		(void)ade_power_up(ctx);
545
	ade_ldi_set_mode(ctx, mode, adj_mode);
546 547 548 549 550
}

static void ade_crtc_atomic_begin(struct drm_crtc *crtc,
				  struct drm_crtc_state *old_state)
{
551 552
	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
553 554
	struct drm_display_mode *mode = &crtc->state->mode;
	struct drm_display_mode *adj_mode = &crtc->state->adjusted_mode;
555 556 557

	if (!ctx->power_on)
		(void)ade_power_up(ctx);
558
	ade_ldi_set_mode(ctx, mode, adj_mode);
559 560 561 562 563 564
}

static void ade_crtc_atomic_flush(struct drm_crtc *crtc,
				  struct drm_crtc_state *old_state)

{
565 566
	struct kirin_crtc *kcrtc = to_kirin_crtc(crtc);
	struct ade_hw_ctx *ctx = kcrtc->hw_ctx;
567
	struct drm_pending_vblank_event *event = crtc->state->event;
568 569 570
	void __iomem *base = ctx->base;

	/* only crtc is enabled regs take effect */
571
	if (kcrtc->enable) {
572
		ade_dump_regs(base);
573 574 575
		/* flush ade registers */
		writel(ADE_ENABLE, base + ADE_EN);
	}
576 577 578 579 580 581 582 583 584 585 586

	if (event) {
		crtc->state->event = NULL;

		spin_lock_irq(&crtc->dev->event_lock);
		if (drm_crtc_vblank_get(crtc) == 0)
			drm_crtc_arm_vblank_event(crtc, event);
		else
			drm_crtc_send_vblank_event(crtc, event);
		spin_unlock_irq(&crtc->dev->event_lock);
	}
587 588 589
}

static const struct drm_crtc_helper_funcs ade_crtc_helper_funcs = {
590
	.mode_fixup	= ade_crtc_mode_fixup,
591 592 593
	.mode_set_nofb	= ade_crtc_mode_set_nofb,
	.atomic_begin	= ade_crtc_atomic_begin,
	.atomic_flush	= ade_crtc_atomic_flush,
594
	.atomic_enable	= ade_crtc_atomic_enable,
595
	.atomic_disable	= ade_crtc_atomic_disable,
596 597 598 599 600 601 602 603 604
};

static const struct drm_crtc_funcs ade_crtc_funcs = {
	.destroy	= drm_crtc_cleanup,
	.set_config	= drm_atomic_helper_set_config,
	.page_flip	= drm_atomic_helper_page_flip,
	.reset		= drm_atomic_helper_crtc_reset,
	.atomic_duplicate_state	= drm_atomic_helper_crtc_duplicate_state,
	.atomic_destroy_state	= drm_atomic_helper_crtc_destroy_state,
605 606
	.enable_vblank	= ade_crtc_enable_vblank,
	.disable_vblank	= ade_crtc_disable_vblank,
607 608 609 610 611 612 613 614 615 616 617 618 619
};

static int ade_crtc_init(struct drm_device *dev, struct drm_crtc *crtc,
			 struct drm_plane *plane)
{
	struct device_node *port;
	int ret;

	/* set crtc port so that
	 * drm_of_find_possible_crtcs call works
	 */
	port = of_get_child_by_name(dev->dev->of_node, "port");
	if (!port) {
620
		DRM_ERROR("no port node found in %pOF\n", dev->dev->of_node);
621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637
		return -EINVAL;
	}
	of_node_put(port);
	crtc->port = port;

	ret = drm_crtc_init_with_planes(dev, crtc, plane, NULL,
					&ade_crtc_funcs, NULL);
	if (ret) {
		DRM_ERROR("failed to init crtc.\n");
		return ret;
	}

	drm_crtc_helper_add(crtc, &ade_crtc_helper_funcs);

	return 0;
}

638 639 640 641
static void ade_rdma_set(void __iomem *base, struct drm_framebuffer *fb,
			 u32 ch, u32 y, u32 in_h, u32 fmt)
{
	struct drm_gem_cma_object *obj = drm_fb_cma_get_gem_obj(fb, 0);
642
	struct drm_format_name_buf format_name;
643 644 645 646 647 648 649
	u32 reg_ctrl, reg_addr, reg_size, reg_stride, reg_space, reg_en;
	u32 stride = fb->pitches[0];
	u32 addr = (u32)obj->paddr + y * stride;

	DRM_DEBUG_DRIVER("rdma%d: (y=%d, height=%d), stride=%d, paddr=0x%x\n",
			 ch + 1, y, in_h, stride, (u32)obj->paddr);
	DRM_DEBUG_DRIVER("addr=0x%x, fb:%dx%d, pixel_format=%d(%s)\n",
650
			 addr, fb->width, fb->height, fmt,
V
Ville Syrjälä 已提交
651
			 drm_get_format_name(fb->format->format, &format_name));
652 653 654 655 656 657 658 659 660 661 662 663 664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708 709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730 731 732 733 734 735 736 737 738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766 767 768 769 770 771 772 773 774 775 776 777 778 779 780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797 798

	/* get reg offset */
	reg_ctrl = RD_CH_CTRL(ch);
	reg_addr = RD_CH_ADDR(ch);
	reg_size = RD_CH_SIZE(ch);
	reg_stride = RD_CH_STRIDE(ch);
	reg_space = RD_CH_SPACE(ch);
	reg_en = RD_CH_EN(ch);

	/*
	 * TODO: set rotation
	 */
	writel((fmt << 16) & 0x1f0000, base + reg_ctrl);
	writel(addr, base + reg_addr);
	writel((in_h << 16) | stride, base + reg_size);
	writel(stride, base + reg_stride);
	writel(in_h * stride, base + reg_space);
	writel(ADE_ENABLE, base + reg_en);
	ade_update_reload_bit(base, RDMA_OFST + ch, 0);
}

static void ade_rdma_disable(void __iomem *base, u32 ch)
{
	u32 reg_en;

	/* get reg offset */
	reg_en = RD_CH_EN(ch);
	writel(0, base + reg_en);
	ade_update_reload_bit(base, RDMA_OFST + ch, 1);
}

static void ade_clip_set(void __iomem *base, u32 ch, u32 fb_w, u32 x,
			 u32 in_w, u32 in_h)
{
	u32 disable_val;
	u32 clip_left;
	u32 clip_right;

	/*
	 * clip width, no need to clip height
	 */
	if (fb_w == in_w) { /* bypass */
		disable_val = 1;
		clip_left = 0;
		clip_right = 0;
	} else {
		disable_val = 0;
		clip_left = x;
		clip_right = fb_w - (x + in_w) - 1;
	}

	DRM_DEBUG_DRIVER("clip%d: clip_left=%d, clip_right=%d\n",
			 ch + 1, clip_left, clip_right);

	writel(disable_val, base + ADE_CLIP_DISABLE(ch));
	writel((fb_w - 1) << 16 | (in_h - 1), base + ADE_CLIP_SIZE0(ch));
	writel(clip_left << 16 | clip_right, base + ADE_CLIP_SIZE1(ch));
	ade_update_reload_bit(base, CLIP_OFST + ch, 0);
}

static void ade_clip_disable(void __iomem *base, u32 ch)
{
	writel(1, base + ADE_CLIP_DISABLE(ch));
	ade_update_reload_bit(base, CLIP_OFST + ch, 1);
}

static bool has_Alpha_channel(int format)
{
	switch (format) {
	case ADE_ARGB_8888:
	case ADE_ABGR_8888:
	case ADE_RGBA_8888:
	case ADE_BGRA_8888:
		return true;
	default:
		return false;
	}
}

static void ade_get_blending_params(u32 fmt, u8 glb_alpha, u8 *alp_mode,
				    u8 *alp_sel, u8 *under_alp_sel)
{
	bool has_alpha = has_Alpha_channel(fmt);

	/*
	 * get alp_mode
	 */
	if (has_alpha && glb_alpha < 255)
		*alp_mode = ADE_ALP_PIXEL_AND_GLB;
	else if (has_alpha)
		*alp_mode = ADE_ALP_PIXEL;
	else
		*alp_mode = ADE_ALP_GLOBAL;

	/*
	 * get alp sel
	 */
	*alp_sel = ADE_ALP_MUL_COEFF_3; /* 1 */
	*under_alp_sel = ADE_ALP_MUL_COEFF_2; /* 0 */
}

static void ade_compositor_routing_set(void __iomem *base, u8 ch,
				       u32 x0, u32 y0,
				       u32 in_w, u32 in_h, u32 fmt)
{
	u8 ovly_ch = 0; /* TODO: This is the zpos, only one plane now */
	u8 glb_alpha = 255;
	u32 x1 = x0 + in_w - 1;
	u32 y1 = y0 + in_h - 1;
	u32 val;
	u8 alp_sel;
	u8 under_alp_sel;
	u8 alp_mode;

	ade_get_blending_params(fmt, glb_alpha, &alp_mode, &alp_sel,
				&under_alp_sel);

	/* overlay routing setting
	 */
	writel(x0 << 16 | y0, base + ADE_OVLY_CH_XY0(ovly_ch));
	writel(x1 << 16 | y1, base + ADE_OVLY_CH_XY1(ovly_ch));
	val = (ch + 1) << CH_SEL_OFST | BIT(CH_EN_OFST) |
		alp_sel << CH_ALP_SEL_OFST |
		under_alp_sel << CH_UNDER_ALP_SEL_OFST |
		glb_alpha << CH_ALP_GBL_OFST |
		alp_mode << CH_ALP_MODE_OFST;
	writel(val, base + ADE_OVLY_CH_CTL(ovly_ch));
	/* connect this plane/channel to overlay2 compositor */
	ade_update_bits(base + ADE_OVLY_CTL, CH_OVLY_SEL_OFST(ovly_ch),
			CH_OVLY_SEL_MASK, CH_OVLY_SEL_VAL(OUT_OVLY));
}

static void ade_compositor_routing_disable(void __iomem *base, u32 ch)
{
	u8 ovly_ch = 0; /* TODO: Only primary plane now */

	/* disable this plane/channel */
	ade_update_bits(base + ADE_OVLY_CH_CTL(ovly_ch), CH_EN_OFST,
			MASK(1), 0);
	/* dis-connect this plane/channel of overlay2 compositor */
	ade_update_bits(base + ADE_OVLY_CTL, CH_OVLY_SEL_OFST(ovly_ch),
			CH_OVLY_SEL_MASK, 0);
}

/*
 * Typicaly, a channel looks like: DMA-->clip-->scale-->ctrans-->compositor
 */
799
static void ade_update_channel(struct kirin_plane *kplane,
800 801 802 803 804
			       struct drm_framebuffer *fb, int crtc_x,
			       int crtc_y, unsigned int crtc_w,
			       unsigned int crtc_h, u32 src_x,
			       u32 src_y, u32 src_w, u32 src_h)
{
805
	struct ade_hw_ctx *ctx = kplane->hw_ctx;
806
	void __iomem *base = ctx->base;
V
Ville Syrjälä 已提交
807
	u32 fmt = ade_get_format(fb->format->format);
808
	u32 ch = kplane->ch;
809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825 826 827 828 829 830 831
	u32 in_w;
	u32 in_h;

	DRM_DEBUG_DRIVER("channel%d: src:(%d, %d)-%dx%d, crtc:(%d, %d)-%dx%d",
			 ch + 1, src_x, src_y, src_w, src_h,
			 crtc_x, crtc_y, crtc_w, crtc_h);

	/* 1) DMA setting */
	in_w = src_w;
	in_h = src_h;
	ade_rdma_set(base, fb, ch, src_y, in_h, fmt);

	/* 2) clip setting */
	ade_clip_set(base, ch, fb->width, src_x, in_w, in_h);

	/* 3) TODO: scale setting for overlay planes */

	/* 4) TODO: ctran/csc setting for overlay planes */

	/* 5) compositor routing setting */
	ade_compositor_routing_set(base, ch, crtc_x, crtc_y, in_w, in_h, fmt);
}

832
static void ade_disable_channel(struct kirin_plane *kplane)
833
{
834
	struct ade_hw_ctx *ctx = kplane->hw_ctx;
835
	void __iomem *base = ctx->base;
836
	u32 ch = kplane->ch;
837 838 839 840 841 842 843 844 845 846 847 848 849 850 851 852 853 854 855 856 857 858 859 860 861 862 863 864 865 866 867 868

	DRM_DEBUG_DRIVER("disable channel%d\n", ch + 1);

	/* disable read DMA */
	ade_rdma_disable(base, ch);

	/* disable clip */
	ade_clip_disable(base, ch);

	/* disable compositor routing */
	ade_compositor_routing_disable(base, ch);
}

static int ade_plane_atomic_check(struct drm_plane *plane,
				  struct drm_plane_state *state)
{
	struct drm_framebuffer *fb = state->fb;
	struct drm_crtc *crtc = state->crtc;
	struct drm_crtc_state *crtc_state;
	u32 src_x = state->src_x >> 16;
	u32 src_y = state->src_y >> 16;
	u32 src_w = state->src_w >> 16;
	u32 src_h = state->src_h >> 16;
	int crtc_x = state->crtc_x;
	int crtc_y = state->crtc_y;
	u32 crtc_w = state->crtc_w;
	u32 crtc_h = state->crtc_h;
	u32 fmt;

	if (!crtc || !fb)
		return 0;

V
Ville Syrjälä 已提交
869
	fmt = ade_get_format(fb->format->format);
870 871 872 873 874 875 876 877 878 879 880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897
	if (fmt == ADE_FORMAT_UNSUPPORT)
		return -EINVAL;

	crtc_state = drm_atomic_get_crtc_state(state->state, crtc);
	if (IS_ERR(crtc_state))
		return PTR_ERR(crtc_state);

	if (src_w != crtc_w || src_h != crtc_h) {
		return -EINVAL;
	}

	if (src_x + src_w > fb->width ||
	    src_y + src_h > fb->height)
		return -EINVAL;

	if (crtc_x < 0 || crtc_y < 0)
		return -EINVAL;

	if (crtc_x + crtc_w > crtc_state->adjusted_mode.hdisplay ||
	    crtc_y + crtc_h > crtc_state->adjusted_mode.vdisplay)
		return -EINVAL;

	return 0;
}

static void ade_plane_atomic_update(struct drm_plane *plane,
				    struct drm_plane_state *old_state)
{
898 899
	struct drm_plane_state *state = plane->state;
	struct kirin_plane *kplane = to_kirin_plane(plane);
900

901
	ade_update_channel(kplane, state->fb, state->crtc_x, state->crtc_y,
902 903 904 905 906 907 908 909
			   state->crtc_w, state->crtc_h,
			   state->src_x >> 16, state->src_y >> 16,
			   state->src_w >> 16, state->src_h >> 16);
}

static void ade_plane_atomic_disable(struct drm_plane *plane,
				     struct drm_plane_state *old_state)
{
910
	struct kirin_plane *kplane = to_kirin_plane(plane);
911

912
	ade_disable_channel(kplane);
913 914 915 916 917 918 919 920 921 922 923 924 925 926 927 928 929
}

static const struct drm_plane_helper_funcs ade_plane_helper_funcs = {
	.atomic_check = ade_plane_atomic_check,
	.atomic_update = ade_plane_atomic_update,
	.atomic_disable = ade_plane_atomic_disable,
};

static struct drm_plane_funcs ade_plane_funcs = {
	.update_plane	= drm_atomic_helper_update_plane,
	.disable_plane	= drm_atomic_helper_disable_plane,
	.destroy = drm_plane_cleanup,
	.reset = drm_atomic_helper_plane_reset,
	.atomic_duplicate_state = drm_atomic_helper_plane_duplicate_state,
	.atomic_destroy_state = drm_atomic_helper_plane_destroy_state,
};

930
static int ade_plane_init(struct drm_device *dev, struct kirin_plane *kplane,
931 932 933 934 935 936 937
			  enum drm_plane_type type)
{
	const u32 *fmts;
	u32 fmts_cnt;
	int ret = 0;

	/* get  properties */
938
	fmts_cnt = ade_get_channel_formats(kplane->ch, &fmts);
939 940 941
	if (ret)
		return ret;

942
	ret = drm_universal_plane_init(dev, &kplane->base, 1, &ade_plane_funcs,
943
				       fmts, fmts_cnt, NULL, type, NULL);
944
	if (ret) {
945
		DRM_ERROR("fail to init plane, ch=%d\n", kplane->ch);
946 947 948
		return ret;
	}

949
	drm_plane_helper_add(&kplane->base, &ade_plane_helper_funcs);
950 951 952 953

	return 0;
}

954
static void *ade_hw_ctx_alloc(struct platform_device *pdev)
955 956 957 958
{
	struct resource *res;
	struct device *dev = &pdev->dev;
	struct device_node *np = pdev->dev.of_node;
959 960 961 962 963 964 965
	struct ade_hw_ctx *ctx = NULL;

	ctx = devm_kzalloc(dev, sizeof(*ctx), GFP_KERNEL);
	if (!ctx) {
		DRM_ERROR("failed to alloc ade_hw_ctx\n");
		return ERR_PTR(-ENOMEM);
	}
966 967 968 969 970

	res = platform_get_resource(pdev, IORESOURCE_MEM, 0);
	ctx->base = devm_ioremap_resource(dev, res);
	if (IS_ERR(ctx->base)) {
		DRM_ERROR("failed to remap ade io base\n");
971
		return ERR_PTR(-EIO);
972 973 974 975
	}

	ctx->reset = devm_reset_control_get(dev, NULL);
	if (IS_ERR(ctx->reset))
976
		return ERR_PTR(-ENODEV);
977 978 979 980 981

	ctx->noc_regmap =
		syscon_regmap_lookup_by_phandle(np, "hisilicon,noc-syscon");
	if (IS_ERR(ctx->noc_regmap)) {
		DRM_ERROR("failed to get noc regmap\n");
982
		return ERR_PTR(-ENODEV);
983 984 985 986 987
	}

	ctx->irq = platform_get_irq(pdev, 0);
	if (ctx->irq < 0) {
		DRM_ERROR("failed to get irq\n");
988
		return ERR_PTR(-ENODEV);
989 990 991
	}

	ctx->ade_core_clk = devm_clk_get(dev, "clk_ade_core");
992
	if (IS_ERR(ctx->ade_core_clk)) {
993
		DRM_ERROR("failed to parse clk ADE_CORE\n");
994
		return ERR_PTR(-ENODEV);
995 996 997
	}

	ctx->media_noc_clk = devm_clk_get(dev, "clk_codec_jpeg");
998
	if (IS_ERR(ctx->media_noc_clk)) {
999
		DRM_ERROR("failed to parse clk CODEC_JPEG\n");
1000
		return ERR_PTR(-ENODEV);
1001 1002 1003
	}

	ctx->ade_pix_clk = devm_clk_get(dev, "clk_ade_pix");
1004
	if (IS_ERR(ctx->ade_pix_clk)) {
1005
		DRM_ERROR("failed to parse clk ADE_PIX\n");
1006
		return ERR_PTR(-ENODEV);
1007 1008
	}

1009
	return ctx;
1010 1011
}

1012
static int ade_drm_init(struct platform_device *pdev)
1013
{
1014
	struct drm_device *dev = platform_get_drvdata(pdev);
1015 1016
	struct ade_data *ade;
	struct ade_hw_ctx *ctx;
1017
	struct kirin_crtc *kcrtc;
1018
	struct kirin_plane *kplane;
1019
	enum drm_plane_type type;
1020
	int ret;
1021
	int i;
1022 1023 1024 1025 1026 1027 1028 1029

	ade = devm_kzalloc(dev->dev, sizeof(*ade), GFP_KERNEL);
	if (!ade) {
		DRM_ERROR("failed to alloc ade_data\n");
		return -ENOMEM;
	}
	platform_set_drvdata(pdev, ade);

1030 1031 1032 1033 1034 1035 1036
	ctx = ade_hw_ctx_alloc(pdev);
	if (IS_ERR(ctx)) {
		DRM_ERROR("failed to initialize kirin_priv hw ctx\n");
		return -EINVAL;
	}
	ade->hw_ctx = ctx;

1037 1038
	kcrtc = &ade->crtc;
	kcrtc->hw_ctx = ctx;
1039

1040 1041 1042 1043 1044 1045
	/*
	 * plane init
	 * TODO: Now only support primary plane, overlay planes
	 * need to do.
	 */
	for (i = 0; i < ADE_CH_NUM; i++) {
1046 1047 1048
		kplane = &ade->planes[i];
		kplane->ch = i;
		kplane->hw_ctx = ctx;
1049 1050 1051
		type = i == PRIMARY_CH ? DRM_PLANE_TYPE_PRIMARY :
			DRM_PLANE_TYPE_OVERLAY;

1052
		ret = ade_plane_init(dev, kplane, type);
1053 1054 1055 1056 1057
		if (ret)
			return ret;
	}

	/* crtc init */
1058
	ret = ade_crtc_init(dev, &kcrtc->base, &ade->planes[PRIMARY_CH].base);
1059 1060 1061
	if (ret)
		return ret;

1062 1063
	/* vblank irq init */
	ret = devm_request_irq(dev->dev, ctx->irq, ade_irq_handler,
1064
			       IRQF_SHARED, dev->driver->name, kcrtc);
1065

1066
	INIT_WORK(&kcrtc->display_reset_wq, drm_underflow_wq);
1067

1068 1069 1070
	if (ret)
		return ret;

1071 1072 1073
	return 0;
}

1074
static void ade_drm_cleanup(struct platform_device *pdev)
1075 1076 1077 1078 1079 1080
{
}

const struct kirin_dc_ops ade_dc_ops = {
	.init = ade_drm_init,
	.cleanup = ade_drm_cleanup
1081
};