intel_ddi.c 180.5 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
/*
 * Copyright © 2012 Intel Corporation
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice (including the next
 * paragraph) shall be included in all copies or substantial portions of the
 * Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 * IN THE SOFTWARE.
 *
 * Authors:
 *    Eugeni Dodonov <eugeni.dodonov@intel.com>
 *
 */

28
#include <drm/drm_scdc_helper.h>
29

30
#include "i915_drv.h"
31
#include "i915_trace.h"
32
#include "intel_audio.h"
33
#include "intel_combo_phy.h"
34
#include "intel_connector.h"
35
#include "intel_ddi.h"
36
#include "intel_display_types.h"
37
#include "intel_dp.h"
38
#include "intel_dp_mst.h"
39
#include "intel_dp_link_training.h"
40
#include "intel_dpio_phy.h"
41
#include "intel_dsi.h"
42
#include "intel_fifo_underrun.h"
43
#include "intel_gmbus.h"
44
#include "intel_hdcp.h"
45
#include "intel_hdmi.h"
46
#include "intel_hotplug.h"
47
#include "intel_lspcon.h"
48
#include "intel_panel.h"
49
#include "intel_psr.h"
50
#include "intel_sprite.h"
51
#include "intel_tc.h"
52
#include "intel_vdsc.h"
53

54 55 56
struct ddi_buf_trans {
	u32 trans1;	/* balance leg enable, de-emph level */
	u32 trans2;	/* vref sel, vswing */
57
	u8 i_boost;	/* SKL: I_boost; valid: 0x0, 0x1, 0x3, 0x7 */
58 59
};

60 61 62 63 64 65 66 67 68 69 70 71 72
static const u8 index_to_dp_signal_levels[] = {
	[0] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0,
	[1] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1,
	[2] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2,
	[3] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_3,
	[4] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0,
	[5] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1,
	[6] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2,
	[7] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0,
	[8] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1,
	[9] = DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0,
};

73 74 75 76
/* HDMI/DVI modes ignore everything but the last 2 items. So we share
 * them for both DP and FDI transports, allowing those ports to
 * automatically adapt to HDMI connections as well
 */
77
static const struct ddi_buf_trans hsw_ddi_translations_dp[] = {
78 79 80 81 82 83 84 85 86
	{ 0x00FFFFFF, 0x0006000E, 0x0 },
	{ 0x00D75FFF, 0x0005000A, 0x0 },
	{ 0x00C30FFF, 0x00040006, 0x0 },
	{ 0x80AAAFFF, 0x000B0000, 0x0 },
	{ 0x00FFFFFF, 0x0005000A, 0x0 },
	{ 0x00D75FFF, 0x000C0004, 0x0 },
	{ 0x80C30FFF, 0x000B0000, 0x0 },
	{ 0x00FFFFFF, 0x00040006, 0x0 },
	{ 0x80D75FFF, 0x000B0000, 0x0 },
87 88
};

89
static const struct ddi_buf_trans hsw_ddi_translations_fdi[] = {
90 91 92 93 94 95 96 97 98
	{ 0x00FFFFFF, 0x0007000E, 0x0 },
	{ 0x00D75FFF, 0x000F000A, 0x0 },
	{ 0x00C30FFF, 0x00060006, 0x0 },
	{ 0x00AAAFFF, 0x001E0000, 0x0 },
	{ 0x00FFFFFF, 0x000F000A, 0x0 },
	{ 0x00D75FFF, 0x00160004, 0x0 },
	{ 0x00C30FFF, 0x001E0000, 0x0 },
	{ 0x00FFFFFF, 0x00060006, 0x0 },
	{ 0x00D75FFF, 0x001E0000, 0x0 },
99 100
};

101 102
static const struct ddi_buf_trans hsw_ddi_translations_hdmi[] = {
					/* Idx	NT mV d	T mV d	db	*/
103 104 105 106 107 108 109 110 111 112 113 114
	{ 0x00FFFFFF, 0x0006000E, 0x0 },/* 0:	400	400	0	*/
	{ 0x00E79FFF, 0x000E000C, 0x0 },/* 1:	400	500	2	*/
	{ 0x00D75FFF, 0x0005000A, 0x0 },/* 2:	400	600	3.5	*/
	{ 0x00FFFFFF, 0x0005000A, 0x0 },/* 3:	600	600	0	*/
	{ 0x00E79FFF, 0x001D0007, 0x0 },/* 4:	600	750	2	*/
	{ 0x00D75FFF, 0x000C0004, 0x0 },/* 5:	600	900	3.5	*/
	{ 0x00FFFFFF, 0x00040006, 0x0 },/* 6:	800	800	0	*/
	{ 0x80E79FFF, 0x00030002, 0x0 },/* 7:	800	1000	2	*/
	{ 0x00FFFFFF, 0x00140005, 0x0 },/* 8:	850	850	0	*/
	{ 0x00FFFFFF, 0x000C0004, 0x0 },/* 9:	900	900	0	*/
	{ 0x00FFFFFF, 0x001C0003, 0x0 },/* 10:	950	950	0	*/
	{ 0x80FFFFFF, 0x00030002, 0x0 },/* 11:	1000	1000	0	*/
115 116
};

117
static const struct ddi_buf_trans bdw_ddi_translations_edp[] = {
118 119 120 121 122 123 124 125 126
	{ 0x00FFFFFF, 0x00000012, 0x0 },
	{ 0x00EBAFFF, 0x00020011, 0x0 },
	{ 0x00C71FFF, 0x0006000F, 0x0 },
	{ 0x00AAAFFF, 0x000E000A, 0x0 },
	{ 0x00FFFFFF, 0x00020011, 0x0 },
	{ 0x00DB6FFF, 0x0005000F, 0x0 },
	{ 0x00BEEFFF, 0x000A000C, 0x0 },
	{ 0x00FFFFFF, 0x0005000F, 0x0 },
	{ 0x00DB6FFF, 0x000A000C, 0x0 },
127 128
};

129
static const struct ddi_buf_trans bdw_ddi_translations_dp[] = {
130 131 132 133 134 135 136 137 138
	{ 0x00FFFFFF, 0x0007000E, 0x0 },
	{ 0x00D75FFF, 0x000E000A, 0x0 },
	{ 0x00BEFFFF, 0x00140006, 0x0 },
	{ 0x80B2CFFF, 0x001B0002, 0x0 },
	{ 0x00FFFFFF, 0x000E000A, 0x0 },
	{ 0x00DB6FFF, 0x00160005, 0x0 },
	{ 0x80C71FFF, 0x001A0002, 0x0 },
	{ 0x00F7DFFF, 0x00180004, 0x0 },
	{ 0x80D75FFF, 0x001B0002, 0x0 },
139 140
};

141
static const struct ddi_buf_trans bdw_ddi_translations_fdi[] = {
142 143 144 145 146 147 148 149 150
	{ 0x00FFFFFF, 0x0001000E, 0x0 },
	{ 0x00D75FFF, 0x0004000A, 0x0 },
	{ 0x00C30FFF, 0x00070006, 0x0 },
	{ 0x00AAAFFF, 0x000C0000, 0x0 },
	{ 0x00FFFFFF, 0x0004000A, 0x0 },
	{ 0x00D75FFF, 0x00090004, 0x0 },
	{ 0x00C30FFF, 0x000C0000, 0x0 },
	{ 0x00FFFFFF, 0x00070006, 0x0 },
	{ 0x00D75FFF, 0x000C0000, 0x0 },
151 152
};

153 154
static const struct ddi_buf_trans bdw_ddi_translations_hdmi[] = {
					/* Idx	NT mV d	T mV df	db	*/
155 156 157 158 159 160 161 162 163 164
	{ 0x00FFFFFF, 0x0007000E, 0x0 },/* 0:	400	400	0	*/
	{ 0x00D75FFF, 0x000E000A, 0x0 },/* 1:	400	600	3.5	*/
	{ 0x00BEFFFF, 0x00140006, 0x0 },/* 2:	400	800	6	*/
	{ 0x00FFFFFF, 0x0009000D, 0x0 },/* 3:	450	450	0	*/
	{ 0x00FFFFFF, 0x000E000A, 0x0 },/* 4:	600	600	0	*/
	{ 0x00D7FFFF, 0x00140006, 0x0 },/* 5:	600	800	2.5	*/
	{ 0x80CB2FFF, 0x001B0002, 0x0 },/* 6:	600	1000	4.5	*/
	{ 0x00FFFFFF, 0x00140006, 0x0 },/* 7:	800	800	0	*/
	{ 0x80E79FFF, 0x001B0002, 0x0 },/* 8:	800	1000	2	*/
	{ 0x80FFFFFF, 0x001B0002, 0x0 },/* 9:	1000	1000	0	*/
165 166
};

167
/* Skylake H and S */
168
static const struct ddi_buf_trans skl_ddi_translations_dp[] = {
169 170 171
	{ 0x00002016, 0x000000A0, 0x0 },
	{ 0x00005012, 0x0000009B, 0x0 },
	{ 0x00007011, 0x00000088, 0x0 },
172
	{ 0x80009010, 0x000000C0, 0x1 },
173 174
	{ 0x00002016, 0x0000009B, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
175
	{ 0x80007011, 0x000000C0, 0x1 },
176
	{ 0x00002016, 0x000000DF, 0x0 },
177
	{ 0x80005012, 0x000000C0, 0x1 },
178 179
};

180 181
/* Skylake U */
static const struct ddi_buf_trans skl_u_ddi_translations_dp[] = {
182
	{ 0x0000201B, 0x000000A2, 0x0 },
183
	{ 0x00005012, 0x00000088, 0x0 },
184
	{ 0x80007011, 0x000000CD, 0x1 },
185
	{ 0x80009010, 0x000000C0, 0x1 },
186
	{ 0x0000201B, 0x0000009D, 0x0 },
187 188
	{ 0x80005012, 0x000000C0, 0x1 },
	{ 0x80007011, 0x000000C0, 0x1 },
189
	{ 0x00002016, 0x00000088, 0x0 },
190
	{ 0x80005012, 0x000000C0, 0x1 },
191 192
};

193 194
/* Skylake Y */
static const struct ddi_buf_trans skl_y_ddi_translations_dp[] = {
195 196
	{ 0x00000018, 0x000000A2, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
197
	{ 0x80007011, 0x000000CD, 0x3 },
198
	{ 0x80009010, 0x000000C0, 0x3 },
199
	{ 0x00000018, 0x0000009D, 0x0 },
200 201
	{ 0x80005012, 0x000000C0, 0x3 },
	{ 0x80007011, 0x000000C0, 0x3 },
202
	{ 0x00000018, 0x00000088, 0x0 },
203
	{ 0x80005012, 0x000000C0, 0x3 },
204 205
};

206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244
/* Kabylake H and S */
static const struct ddi_buf_trans kbl_ddi_translations_dp[] = {
	{ 0x00002016, 0x000000A0, 0x0 },
	{ 0x00005012, 0x0000009B, 0x0 },
	{ 0x00007011, 0x00000088, 0x0 },
	{ 0x80009010, 0x000000C0, 0x1 },
	{ 0x00002016, 0x0000009B, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
	{ 0x80007011, 0x000000C0, 0x1 },
	{ 0x00002016, 0x00000097, 0x0 },
	{ 0x80005012, 0x000000C0, 0x1 },
};

/* Kabylake U */
static const struct ddi_buf_trans kbl_u_ddi_translations_dp[] = {
	{ 0x0000201B, 0x000000A1, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
	{ 0x80007011, 0x000000CD, 0x3 },
	{ 0x80009010, 0x000000C0, 0x3 },
	{ 0x0000201B, 0x0000009D, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
	{ 0x80007011, 0x000000C0, 0x3 },
	{ 0x00002016, 0x0000004F, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
};

/* Kabylake Y */
static const struct ddi_buf_trans kbl_y_ddi_translations_dp[] = {
	{ 0x00001017, 0x000000A1, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
	{ 0x80007011, 0x000000CD, 0x3 },
	{ 0x8000800F, 0x000000C0, 0x3 },
	{ 0x00001017, 0x0000009D, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
	{ 0x80007011, 0x000000C0, 0x3 },
	{ 0x00001017, 0x0000004C, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
};

245
/*
246
 * Skylake/Kabylake H and S
247 248
 * eDP 1.4 low vswing translation parameters
 */
249
static const struct ddi_buf_trans skl_ddi_translations_edp[] = {
250 251 252 253 254 255 256 257 258 259 260 261 262
	{ 0x00000018, 0x000000A8, 0x0 },
	{ 0x00004013, 0x000000A9, 0x0 },
	{ 0x00007011, 0x000000A2, 0x0 },
	{ 0x00009010, 0x0000009C, 0x0 },
	{ 0x00000018, 0x000000A9, 0x0 },
	{ 0x00006013, 0x000000A2, 0x0 },
	{ 0x00007011, 0x000000A6, 0x0 },
	{ 0x00000018, 0x000000AB, 0x0 },
	{ 0x00007013, 0x0000009F, 0x0 },
	{ 0x00000018, 0x000000DF, 0x0 },
};

/*
263
 * Skylake/Kabylake U
264 265 266 267 268 269 270 271 272 273 274 275 276
 * eDP 1.4 low vswing translation parameters
 */
static const struct ddi_buf_trans skl_u_ddi_translations_edp[] = {
	{ 0x00000018, 0x000000A8, 0x0 },
	{ 0x00004013, 0x000000A9, 0x0 },
	{ 0x00007011, 0x000000A2, 0x0 },
	{ 0x00009010, 0x0000009C, 0x0 },
	{ 0x00000018, 0x000000A9, 0x0 },
	{ 0x00006013, 0x000000A2, 0x0 },
	{ 0x00007011, 0x000000A6, 0x0 },
	{ 0x00002016, 0x000000AB, 0x0 },
	{ 0x00005013, 0x0000009F, 0x0 },
	{ 0x00000018, 0x000000DF, 0x0 },
277 278
};

279
/*
280
 * Skylake/Kabylake Y
281 282
 * eDP 1.4 low vswing translation parameters
 */
283
static const struct ddi_buf_trans skl_y_ddi_translations_edp[] = {
284 285 286 287 288 289 290 291 292 293 294
	{ 0x00000018, 0x000000A8, 0x0 },
	{ 0x00004013, 0x000000AB, 0x0 },
	{ 0x00007011, 0x000000A4, 0x0 },
	{ 0x00009010, 0x000000DF, 0x0 },
	{ 0x00000018, 0x000000AA, 0x0 },
	{ 0x00006013, 0x000000A4, 0x0 },
	{ 0x00007011, 0x0000009D, 0x0 },
	{ 0x00000018, 0x000000A0, 0x0 },
	{ 0x00006012, 0x000000DF, 0x0 },
	{ 0x00000018, 0x0000008A, 0x0 },
};
295

296
/* Skylake/Kabylake U, H and S */
297
static const struct ddi_buf_trans skl_ddi_translations_hdmi[] = {
298 299 300 301 302 303
	{ 0x00000018, 0x000000AC, 0x0 },
	{ 0x00005012, 0x0000009D, 0x0 },
	{ 0x00007011, 0x00000088, 0x0 },
	{ 0x00000018, 0x000000A1, 0x0 },
	{ 0x00000018, 0x00000098, 0x0 },
	{ 0x00004013, 0x00000088, 0x0 },
304
	{ 0x80006012, 0x000000CD, 0x1 },
305
	{ 0x00000018, 0x000000DF, 0x0 },
306 307 308
	{ 0x80003015, 0x000000CD, 0x1 },	/* Default */
	{ 0x80003015, 0x000000C0, 0x1 },
	{ 0x80000018, 0x000000C0, 0x1 },
309 310
};

311
/* Skylake/Kabylake Y */
312
static const struct ddi_buf_trans skl_y_ddi_translations_hdmi[] = {
313 314
	{ 0x00000018, 0x000000A1, 0x0 },
	{ 0x00005012, 0x000000DF, 0x0 },
315
	{ 0x80007011, 0x000000CB, 0x3 },
316 317 318
	{ 0x00000018, 0x000000A4, 0x0 },
	{ 0x00000018, 0x0000009D, 0x0 },
	{ 0x00004013, 0x00000080, 0x0 },
319
	{ 0x80006013, 0x000000C0, 0x3 },
320
	{ 0x00000018, 0x0000008A, 0x0 },
321 322 323
	{ 0x80003015, 0x000000C0, 0x3 },	/* Default */
	{ 0x80003015, 0x000000C0, 0x3 },
	{ 0x80000018, 0x000000C0, 0x3 },
324 325
};

326
struct bxt_ddi_buf_trans {
327 328 329 330
	u8 margin;	/* swing value */
	u8 scale;	/* scale value */
	u8 enable;	/* scale enable */
	u8 deemphasis;
331 332 333 334
};

static const struct bxt_ddi_buf_trans bxt_ddi_translations_dp[] = {
					/* Idx	NT mV diff	db  */
335 336 337 338 339 340 341 342 343 344
	{ 52,  0x9A, 0, 128, },	/* 0:	400		0   */
	{ 78,  0x9A, 0, 85,  },	/* 1:	400		3.5 */
	{ 104, 0x9A, 0, 64,  },	/* 2:	400		6   */
	{ 154, 0x9A, 0, 43,  },	/* 3:	400		9.5 */
	{ 77,  0x9A, 0, 128, },	/* 4:	600		0   */
	{ 116, 0x9A, 0, 85,  },	/* 5:	600		3.5 */
	{ 154, 0x9A, 0, 64,  },	/* 6:	600		6   */
	{ 102, 0x9A, 0, 128, },	/* 7:	800		0   */
	{ 154, 0x9A, 0, 85,  },	/* 8:	800		3.5 */
	{ 154, 0x9A, 1, 128, },	/* 9:	1200		0   */
345 346
};

347 348
static const struct bxt_ddi_buf_trans bxt_ddi_translations_edp[] = {
					/* Idx	NT mV diff	db  */
349 350 351 352 353 354 355 356 357 358
	{ 26, 0, 0, 128, },	/* 0:	200		0   */
	{ 38, 0, 0, 112, },	/* 1:	200		1.5 */
	{ 48, 0, 0, 96,  },	/* 2:	200		4   */
	{ 54, 0, 0, 69,  },	/* 3:	200		6   */
	{ 32, 0, 0, 128, },	/* 4:	250		0   */
	{ 48, 0, 0, 104, },	/* 5:	250		1.5 */
	{ 54, 0, 0, 85,  },	/* 6:	250		4   */
	{ 43, 0, 0, 128, },	/* 7:	300		0   */
	{ 54, 0, 0, 101, },	/* 8:	300		1.5 */
	{ 48, 0, 0, 128, },	/* 9:	300		0   */
359 360
};

361 362 363 364 365
/* BSpec has 2 recommended values - entries 0 and 8.
 * Using the entry with higher vswing.
 */
static const struct bxt_ddi_buf_trans bxt_ddi_translations_hdmi[] = {
					/* Idx	NT mV diff	db  */
366 367 368 369 370 371 372 373 374 375
	{ 52,  0x9A, 0, 128, },	/* 0:	400		0   */
	{ 52,  0x9A, 0, 85,  },	/* 1:	400		3.5 */
	{ 52,  0x9A, 0, 64,  },	/* 2:	400		6   */
	{ 42,  0x9A, 0, 43,  },	/* 3:	400		9.5 */
	{ 77,  0x9A, 0, 128, },	/* 4:	600		0   */
	{ 77,  0x9A, 0, 85,  },	/* 5:	600		3.5 */
	{ 77,  0x9A, 0, 64,  },	/* 6:	600		6   */
	{ 102, 0x9A, 0, 128, },	/* 7:	800		0   */
	{ 102, 0x9A, 0, 85,  },	/* 8:	800		3.5 */
	{ 154, 0x9A, 1, 128, },	/* 9:	1200		0   */
376 377
};

378
struct cnl_ddi_buf_trans {
379 380 381 382 383
	u8 dw2_swing_sel;
	u8 dw7_n_scalar;
	u8 dw4_cursor_coeff;
	u8 dw4_post_cursor_2;
	u8 dw4_post_cursor_1;
384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517
};

/* Voltage Swing Programming for VccIO 0.85V for DP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_85V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5D, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x6A, 0x38, 0x00, 0x07 },	/* 350   500      3.1   */
	{ 0xB, 0x7A, 0x32, 0x00, 0x0D },	/* 350   700      6.0   */
	{ 0x6, 0x7C, 0x2D, 0x00, 0x12 },	/* 350   900      8.2   */
	{ 0xA, 0x69, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xB, 0x7A, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7C, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xB, 0x7D, 0x3C, 0x00, 0x03 },	/* 650   725      0.9   */
	{ 0x6, 0x7C, 0x34, 0x00, 0x0B },	/* 600   900      3.5   */
	{ 0x6, 0x7B, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

/* Voltage Swing Programming for VccIO 0.85V for HDMI */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_85V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x60, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x73, 0x36, 0x00, 0x09 },	/* 450   650      3.2   */
	{ 0x6, 0x7F, 0x31, 0x00, 0x0E },	/* 450   850      5.5   */
	{ 0xB, 0x73, 0x3F, 0x00, 0x00 },	/* 650   650      0.0   */
	{ 0x6, 0x7F, 0x37, 0x00, 0x08 },	/* 650   850      2.3   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 850   850      0.0   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
};

/* Voltage Swing Programming for VccIO 0.85V for eDP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_85V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x66, 0x3A, 0x00, 0x05 },	/* 384   500      2.3   */
	{ 0x0, 0x7F, 0x38, 0x00, 0x07 },	/* 153   200      2.3   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 192   250      2.3   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 230   300      2.3   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 269   350      2.3   */
	{ 0xA, 0x66, 0x3C, 0x00, 0x03 },	/* 446   500      1.0   */
	{ 0xB, 0x70, 0x3C, 0x00, 0x03 },	/* 460   600      2.3   */
	{ 0xC, 0x75, 0x3C, 0x00, 0x03 },	/* 537   700      2.3   */
	{ 0x2, 0x7F, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
};

/* Voltage Swing Programming for VccIO 0.95V for DP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_95V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5D, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x6A, 0x38, 0x00, 0x07 },	/* 350   500      3.1   */
	{ 0xB, 0x7A, 0x32, 0x00, 0x0D },	/* 350   700      6.0   */
	{ 0x6, 0x7C, 0x2D, 0x00, 0x12 },	/* 350   900      8.2   */
	{ 0xA, 0x69, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xB, 0x7A, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7C, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xB, 0x7D, 0x3C, 0x00, 0x03 },	/* 650   725      0.9   */
	{ 0x6, 0x7C, 0x34, 0x00, 0x0B },	/* 600   900      3.5   */
	{ 0x6, 0x7B, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

/* Voltage Swing Programming for VccIO 0.95V for HDMI */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_95V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5C, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
	{ 0xB, 0x69, 0x37, 0x00, 0x08 },	/* 400   600      3.5   */
	{ 0x5, 0x76, 0x31, 0x00, 0x0E },	/* 400   800      6.0   */
	{ 0xA, 0x5E, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x69, 0x3F, 0x00, 0x00 },	/* 600   600      0.0   */
	{ 0xB, 0x79, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
	{ 0x6, 0x7D, 0x32, 0x00, 0x0D },	/* 600   1000     4.4   */
	{ 0x5, 0x76, 0x3F, 0x00, 0x00 },	/* 800   800      0.0   */
	{ 0x6, 0x7D, 0x39, 0x00, 0x06 },	/* 800   1000     1.9   */
	{ 0x6, 0x7F, 0x39, 0x00, 0x06 },	/* 850   1050     1.8   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1050  1050     0.0   */
};

/* Voltage Swing Programming for VccIO 0.95V for eDP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_95V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x61, 0x3A, 0x00, 0x05 },	/* 384   500      2.3   */
	{ 0x0, 0x7F, 0x38, 0x00, 0x07 },	/* 153   200      2.3   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 192   250      2.3   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 230   300      2.3   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 269   350      2.3   */
	{ 0xA, 0x61, 0x3C, 0x00, 0x03 },	/* 446   500      1.0   */
	{ 0xB, 0x68, 0x39, 0x00, 0x06 },	/* 460   600      2.3   */
	{ 0xC, 0x6E, 0x39, 0x00, 0x06 },	/* 537   700      2.3   */
	{ 0x4, 0x7F, 0x3A, 0x00, 0x05 },	/* 460   600      2.3   */
	{ 0x2, 0x7F, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
};

/* Voltage Swing Programming for VccIO 1.05V for DP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_1_05V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x58, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
	{ 0xB, 0x64, 0x37, 0x00, 0x08 },	/* 400   600      3.5   */
	{ 0x5, 0x70, 0x31, 0x00, 0x0E },	/* 400   800      6.0   */
	{ 0x6, 0x7F, 0x2C, 0x00, 0x13 },	/* 400   1050     8.4   */
	{ 0xB, 0x64, 0x3F, 0x00, 0x00 },	/* 600   600      0.0   */
	{ 0x5, 0x73, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
	{ 0x6, 0x7F, 0x30, 0x00, 0x0F },	/* 550   1050     5.6   */
	{ 0x5, 0x76, 0x3E, 0x00, 0x01 },	/* 850   900      0.5   */
	{ 0x6, 0x7F, 0x36, 0x00, 0x09 },	/* 750   1050     2.9   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1050  1050     0.0   */
};

/* Voltage Swing Programming for VccIO 1.05V for HDMI */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_1_05V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x58, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
	{ 0xB, 0x64, 0x37, 0x00, 0x08 },	/* 400   600      3.5   */
	{ 0x5, 0x70, 0x31, 0x00, 0x0E },	/* 400   800      6.0   */
	{ 0xA, 0x5B, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x64, 0x3F, 0x00, 0x00 },	/* 600   600      0.0   */
	{ 0x5, 0x73, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
	{ 0x6, 0x7C, 0x32, 0x00, 0x0D },	/* 600   1000     4.4   */
	{ 0x5, 0x70, 0x3F, 0x00, 0x00 },	/* 800   800      0.0   */
	{ 0x6, 0x7C, 0x39, 0x00, 0x06 },	/* 800   1000     1.9   */
	{ 0x6, 0x7F, 0x39, 0x00, 0x06 },	/* 850   1050     1.8   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1050  1050     0.0   */
};

/* Voltage Swing Programming for VccIO 1.05V for eDP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_1_05V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5E, 0x3A, 0x00, 0x05 },	/* 384   500      2.3   */
	{ 0x0, 0x7F, 0x38, 0x00, 0x07 },	/* 153   200      2.3   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 192   250      2.3   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 230   300      2.3   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 269   350      2.3   */
	{ 0xA, 0x5E, 0x3C, 0x00, 0x03 },	/* 446   500      1.0   */
	{ 0xB, 0x64, 0x39, 0x00, 0x06 },	/* 460   600      2.3   */
	{ 0xE, 0x6A, 0x39, 0x00, 0x06 },	/* 537   700      2.3   */
	{ 0x2, 0x7F, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
};

518 519 520 521 522 523 524 525 526 527 528 529 530
/* icl_combo_phy_ddi_translations */
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_dp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x71, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6C, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
531 532
};

533 534 535 536 537 538 539 540 541 542 543 544
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0x0, 0x7F, 0x3F, 0x00, 0x00 },	/* 200   200      0.0   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 200   250      1.9   */
	{ 0x1, 0x7F, 0x33, 0x00, 0x0C },	/* 200   300      3.5   */
	{ 0x9, 0x7F, 0x31, 0x00, 0x0E },	/* 200   350      4.9   */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },	/* 250   250      0.0   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 250   300      1.6   */
	{ 0x9, 0x7F, 0x35, 0x00, 0x0A },	/* 250   350      2.9   */
	{ 0x1, 0x7F, 0x3F, 0x00, 0x00 },	/* 300   300      0.0   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 300   350      1.3   */
	{ 0x9, 0x7F, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
545 546
};

547 548 549 550 551 552 553 554 555 556 557 558
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr3[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x71, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6C, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
559 560
};

561 562 563 564 565 566 567 568 569
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_hdmi[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x60, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x73, 0x36, 0x00, 0x09 },	/* 450   650      3.2   */
	{ 0x6, 0x7F, 0x31, 0x00, 0x0E },	/* 450   850      5.5   */
	{ 0xB, 0x73, 0x3F, 0x00, 0x00 },	/* 650   650      0.0   ALS */
	{ 0x6, 0x7F, 0x37, 0x00, 0x08 },	/* 650   850      2.3   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 850   850      0.0   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
570 571
};

572
static const struct cnl_ddi_buf_trans ehl_combo_phy_ddi_translations_dp[] = {
573 574 575
						/* NT mV Trans mV db    */
	{ 0xA, 0x33, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x47, 0x36, 0x00, 0x09 },	/* 350   500      3.1   */
576 577
	{ 0xC, 0x64, 0x34, 0x00, 0x0B },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x30, 0x00, 0x0F },	/* 350   900      8.2   */
578
	{ 0xA, 0x46, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
579 580
	{ 0xC, 0x64, 0x38, 0x00, 0x07 },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x32, 0x00, 0x0D },	/* 500   900      5.1   */
581
	{ 0xC, 0x61, 0x3F, 0x00, 0x00 },	/* 650   700      0.6   */
582
	{ 0x6, 0x7F, 0x38, 0x00, 0x07 },	/* 600   900      3.5   */
583 584 585
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

586 587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613
static const struct cnl_ddi_buf_trans jsl_combo_phy_ddi_translations_edp_hbr[] = {
						/* NT mV Trans mV db    */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },        /* 200   200      0.0   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },        /* 200   250      1.9   */
	{ 0x1, 0x7F, 0x33, 0x00, 0x0C },        /* 200   300      3.5   */
	{ 0xA, 0x35, 0x36, 0x00, 0x09 },        /* 200   350      4.9   */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },        /* 250   250      0.0   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },        /* 250   300      1.6   */
	{ 0xA, 0x35, 0x35, 0x00, 0x0A },        /* 250   350      2.9   */
	{ 0x1, 0x7F, 0x3F, 0x00, 0x00 },        /* 300   300      0.0   */
	{ 0xA, 0x35, 0x38, 0x00, 0x07 },        /* 300   350      1.3   */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },        /* 350   350      0.0   */
};

static const struct cnl_ddi_buf_trans jsl_combo_phy_ddi_translations_edp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },        /* 200   200      0.0   */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },        /* 200   250      1.9   */
	{ 0x1, 0x7F, 0x3D, 0x00, 0x02 },        /* 200   300      3.5   */
	{ 0xA, 0x35, 0x38, 0x00, 0x07 },        /* 200   350      4.9   */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },        /* 250   250      0.0   */
	{ 0x1, 0x7F, 0x3F, 0x00, 0x00 },        /* 250   300      1.6   */
	{ 0xA, 0x35, 0x3A, 0x00, 0x05 },        /* 250   350      2.9   */
	{ 0x1, 0x7F, 0x3F, 0x00, 0x00 },        /* 300   300      0.0   */
	{ 0xA, 0x35, 0x38, 0x00, 0x07 },        /* 300   350      1.3   */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },        /* 350   350      0.0   */
};

614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641
static const struct cnl_ddi_buf_trans dg1_combo_phy_ddi_translations_dp_rbr_hbr[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x32, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x48, 0x35, 0x00, 0x0A },	/* 350   500      3.1   */
	{ 0xC, 0x63, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2C, 0x00, 0x13 },	/* 350   900      8.2   */
	{ 0xA, 0x43, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x60, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xC, 0x60, 0x3F, 0x00, 0x00 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x37, 0x00, 0x08 },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

static const struct cnl_ddi_buf_trans dg1_combo_phy_ddi_translations_dp_hbr2_hbr3[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x32, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x48, 0x35, 0x00, 0x0A },	/* 350   500      3.1   */
	{ 0xC, 0x63, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2C, 0x00, 0x13 },	/* 350   900      8.2   */
	{ 0xA, 0x43, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x60, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xC, 0x58, 0x3F, 0x00, 0x00 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

642 643
struct icl_mg_phy_ddi_buf_trans {
	u32 cri_txdeemph_override_11_6;
644
	u32 cri_txdeemph_override_5_0;
645 646 647
	u32 cri_txdeemph_override_17_12;
};

648 649 650 651 652 653 654 655 656 657 658 659 660 661 662
static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_rbr_hbr[] = {
				/* Voltage swing  pre-emphasis */
	{ 0x18, 0x00, 0x00 },	/* 0              0   */
	{ 0x1D, 0x00, 0x05 },	/* 0              1   */
	{ 0x24, 0x00, 0x0C },	/* 0              2   */
	{ 0x2B, 0x00, 0x14 },	/* 0              3   */
	{ 0x21, 0x00, 0x00 },	/* 1              0   */
	{ 0x2B, 0x00, 0x08 },	/* 1              1   */
	{ 0x30, 0x00, 0x0F },	/* 1              2   */
	{ 0x31, 0x00, 0x03 },	/* 2              0   */
	{ 0x34, 0x00, 0x0B },	/* 2              1   */
	{ 0x3F, 0x00, 0x00 },	/* 3              0   */
};

static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hbr2_hbr3[] = {
663
				/* Voltage swing  pre-emphasis */
664 665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687
	{ 0x18, 0x00, 0x00 },	/* 0              0   */
	{ 0x1D, 0x00, 0x05 },	/* 0              1   */
	{ 0x24, 0x00, 0x0C },	/* 0              2   */
	{ 0x2B, 0x00, 0x14 },	/* 0              3   */
	{ 0x26, 0x00, 0x00 },	/* 1              0   */
	{ 0x2C, 0x00, 0x07 },	/* 1              1   */
	{ 0x33, 0x00, 0x0C },	/* 1              2   */
	{ 0x2E, 0x00, 0x00 },	/* 2              0   */
	{ 0x36, 0x00, 0x09 },	/* 2              1   */
	{ 0x3F, 0x00, 0x00 },	/* 3              0   */
};

static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hdmi[] = {
				/* HDMI Preset	VS	Pre-emph */
	{ 0x1A, 0x0, 0x0 },	/* 1		400mV	0dB */
	{ 0x20, 0x0, 0x0 },	/* 2		500mV	0dB */
	{ 0x29, 0x0, 0x0 },	/* 3		650mV	0dB */
	{ 0x32, 0x0, 0x0 },	/* 4		800mV	0dB */
	{ 0x3F, 0x0, 0x0 },	/* 5		1000mV	0dB */
	{ 0x3A, 0x0, 0x5 },	/* 6		Full	-1.5 dB */
	{ 0x39, 0x0, 0x6 },	/* 7		Full	-1.8 dB */
	{ 0x38, 0x0, 0x7 },	/* 8		Full	-2 dB */
	{ 0x37, 0x0, 0x8 },	/* 9		Full	-2.5 dB */
	{ 0x36, 0x0, 0x9 },	/* 10		Full	-3 dB */
688 689
};

690 691 692 693 694 695
struct tgl_dkl_phy_ddi_buf_trans {
	u32 dkl_vswing_control;
	u32 dkl_preshoot_control;
	u32 dkl_de_emphasis_control;
};

696
static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans[] = {
697 698
				/* VS	pre-emp	Non-trans mV	Pre-emph dB */
	{ 0x7, 0x0, 0x00 },	/* 0	0	400mV		0 dB */
699 700
	{ 0x5, 0x0, 0x05 },	/* 0	1	400mV		3.5 dB */
	{ 0x2, 0x0, 0x0B },	/* 0	2	400mV		6 dB */
701 702 703 704 705 706 707 708 709 710 711 712 713 714
	{ 0x0, 0x0, 0x18 },	/* 0	3	400mV		9.5 dB */
	{ 0x5, 0x0, 0x00 },	/* 1	0	600mV		0 dB */
	{ 0x2, 0x0, 0x08 },	/* 1	1	600mV		3.5 dB */
	{ 0x0, 0x0, 0x14 },	/* 1	2	600mV		6 dB */
	{ 0x2, 0x0, 0x00 },	/* 2	0	800mV		0 dB */
	{ 0x0, 0x0, 0x0B },	/* 2	1	800mV		3.5 dB */
	{ 0x0, 0x0, 0x00 },	/* 3	0	1200mV		0 dB HDMI default */
};

static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans_hbr2[] = {
				/* VS	pre-emp	Non-trans mV	Pre-emph dB */
	{ 0x7, 0x0, 0x00 },	/* 0	0	400mV		0 dB */
	{ 0x5, 0x0, 0x05 },	/* 0	1	400mV		3.5 dB */
	{ 0x2, 0x0, 0x0B },	/* 0	2	400mV		6 dB */
715 716
	{ 0x0, 0x0, 0x19 },	/* 0	3	400mV		9.5 dB */
	{ 0x5, 0x0, 0x00 },	/* 1	0	600mV		0 dB */
717
	{ 0x2, 0x0, 0x08 },	/* 1	1	600mV		3.5 dB */
718 719 720 721 722 723
	{ 0x0, 0x0, 0x14 },	/* 1	2	600mV		6 dB */
	{ 0x2, 0x0, 0x00 },	/* 2	0	800mV		0 dB */
	{ 0x0, 0x0, 0x0B },	/* 2	1	800mV		3.5 dB */
	{ 0x0, 0x0, 0x00 },	/* 3	0	1200mV		0 dB HDMI default */
};

724 725 726 727 728 729 730 731 732 733 734 735 736 737
static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_hdmi_ddi_trans[] = {
				/* HDMI Preset	VS	Pre-emph */
	{ 0x7, 0x0, 0x0 },	/* 1		400mV	0dB */
	{ 0x6, 0x0, 0x0 },	/* 2		500mV	0dB */
	{ 0x4, 0x0, 0x0 },	/* 3		650mV	0dB */
	{ 0x2, 0x0, 0x0 },	/* 4		800mV	0dB */
	{ 0x0, 0x0, 0x0 },	/* 5		1000mV	0dB */
	{ 0x0, 0x0, 0x5 },	/* 6		Full	-1.5 dB */
	{ 0x0, 0x0, 0x6 },	/* 7		Full	-1.8 dB */
	{ 0x0, 0x0, 0x7 },	/* 8		Full	-2 dB */
	{ 0x0, 0x0, 0x8 },	/* 9		Full	-2.5 dB */
	{ 0x0, 0x0, 0xA },	/* 10		Full	-3 dB */
};

738 739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765
static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x32, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x71, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7D, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6C, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x63, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x47, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x63, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x61, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7B, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

766 767 768 769 770 771 772 773 774 775 776 777 778 779
static const struct cnl_ddi_buf_trans tgl_uy_combo_phy_ddi_translations_dp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x36, 0x00, 0x09 },	/* 350   500      3.1   */
	{ 0xC, 0x60, 0x32, 0x00, 0x0D },	/* 350   700      6.0   */
	{ 0xC, 0x7F, 0x2D, 0x00, 0x12 },	/* 350   900      8.2   */
	{ 0xC, 0x47, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x6F, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7D, 0x32, 0x00, 0x0D },	/* 500   900      5.1   */
	{ 0x6, 0x60, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x34, 0x00, 0x0B },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

780 781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796
/*
 * Cloned the HOBL entry to comply with the voltage and pre-emphasis entries
 * that DisplayPort specification requires
 */
static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_edp_hbr2_hobl[] = {
						/* VS	pre-emp	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	0	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	1	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	2	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	3	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1	0	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1	1	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1	2	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 2	0	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 2	1	*/
};

797 798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824
static const struct cnl_ddi_buf_trans rkl_combo_phy_ddi_translations_dp_hbr[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x2F, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x63, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7D, 0x2A, 0x00, 0x15 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6E, 0x3E, 0x00, 0x01 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

static const struct cnl_ddi_buf_trans rkl_combo_phy_ddi_translations_dp_hbr2_hbr3[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x50, 0x38, 0x00, 0x07 },	/* 350   500      3.1   */
	{ 0xC, 0x61, 0x33, 0x00, 0x0C },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2E, 0x00, 0x11 },	/* 350   900      8.2   */
	{ 0xA, 0x47, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x5F, 0x38, 0x00, 0x07 },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x5F, 0x3F, 0x00, 0x00 },	/* 650   700      0.6   */
	{ 0x6, 0x7E, 0x36, 0x00, 0x09 },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

825 826 827 828 829
static bool is_hobl_buf_trans(const struct cnl_ddi_buf_trans *table)
{
	return table == tgl_combo_phy_ddi_translations_edp_hbr2_hobl;
}

830
static const struct ddi_buf_trans *
831
bdw_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
832
{
833 834
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

835 836 837 838 839 840 841 842 843
	if (dev_priv->vbt.edp.low_vswing) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_edp);
		return bdw_ddi_translations_edp;
	} else {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_dp);
		return bdw_ddi_translations_dp;
	}
}

844
static const struct ddi_buf_trans *
845
skl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
846
{
847 848
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

849
	if (IS_SKL_ULX(dev_priv)) {
850
		*n_entries = ARRAY_SIZE(skl_y_ddi_translations_dp);
851
		return skl_y_ddi_translations_dp;
852
	} else if (IS_SKL_ULT(dev_priv)) {
853
		*n_entries = ARRAY_SIZE(skl_u_ddi_translations_dp);
854
		return skl_u_ddi_translations_dp;
855 856
	} else {
		*n_entries = ARRAY_SIZE(skl_ddi_translations_dp);
857
		return skl_ddi_translations_dp;
858 859 860
	}
}

861
static const struct ddi_buf_trans *
862
kbl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
863
{
864 865
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

866 867 868
	if (IS_KBL_ULX(dev_priv) ||
	    IS_CFL_ULX(dev_priv) ||
	    IS_CML_ULX(dev_priv)) {
869 870
		*n_entries = ARRAY_SIZE(kbl_y_ddi_translations_dp);
		return kbl_y_ddi_translations_dp;
871 872 873
	} else if (IS_KBL_ULT(dev_priv) ||
		   IS_CFL_ULT(dev_priv) ||
		   IS_CML_ULT(dev_priv)) {
874 875 876 877 878 879 880 881
		*n_entries = ARRAY_SIZE(kbl_u_ddi_translations_dp);
		return kbl_u_ddi_translations_dp;
	} else {
		*n_entries = ARRAY_SIZE(kbl_ddi_translations_dp);
		return kbl_ddi_translations_dp;
	}
}

882
static const struct ddi_buf_trans *
883
skl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
884
{
885 886
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

887
	if (dev_priv->vbt.edp.low_vswing) {
888 889 890 891
		if (IS_SKL_ULX(dev_priv) ||
		    IS_KBL_ULX(dev_priv) ||
		    IS_CFL_ULX(dev_priv) ||
		    IS_CML_ULX(dev_priv)) {
892
			*n_entries = ARRAY_SIZE(skl_y_ddi_translations_edp);
893
			return skl_y_ddi_translations_edp;
894 895 896 897
		} else if (IS_SKL_ULT(dev_priv) ||
			   IS_KBL_ULT(dev_priv) ||
			   IS_CFL_ULT(dev_priv) ||
			   IS_CML_ULT(dev_priv)) {
898
			*n_entries = ARRAY_SIZE(skl_u_ddi_translations_edp);
899
			return skl_u_ddi_translations_edp;
900 901
		} else {
			*n_entries = ARRAY_SIZE(skl_ddi_translations_edp);
902
			return skl_ddi_translations_edp;
903 904
		}
	}
905

906 907 908
	if (IS_KABYLAKE(dev_priv) ||
	    IS_COFFEELAKE(dev_priv) ||
	    IS_COMETLAKE(dev_priv))
909
		return kbl_get_buf_trans_dp(encoder, n_entries);
910
	else
911
		return skl_get_buf_trans_dp(encoder, n_entries);
912 913 914
}

static const struct ddi_buf_trans *
915
skl_get_buf_trans_hdmi(struct drm_i915_private *dev_priv, int *n_entries)
916
{
917 918 919 920
	if (IS_SKL_ULX(dev_priv) ||
	    IS_KBL_ULX(dev_priv) ||
	    IS_CFL_ULX(dev_priv) ||
	    IS_CML_ULX(dev_priv)) {
921
		*n_entries = ARRAY_SIZE(skl_y_ddi_translations_hdmi);
922
		return skl_y_ddi_translations_hdmi;
923 924
	} else {
		*n_entries = ARRAY_SIZE(skl_ddi_translations_hdmi);
925
		return skl_ddi_translations_hdmi;
926 927 928
	}
}

929 930 931 932 933 934 935 936 937
static int skl_buf_trans_num_entries(enum port port, int n_entries)
{
	/* Only DDIA and DDIE can select the 10th register with DP */
	if (port == PORT_A || port == PORT_E)
		return min(n_entries, 10);
	else
		return min(n_entries, 9);
}

938
static const struct ddi_buf_trans *
939
intel_ddi_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
940
{
941 942
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

943 944 945
	if (IS_KABYLAKE(dev_priv) ||
	    IS_COFFEELAKE(dev_priv) ||
	    IS_COMETLAKE(dev_priv)) {
946
		const struct ddi_buf_trans *ddi_translations =
947
			kbl_get_buf_trans_dp(encoder, n_entries);
948
		*n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries);
949
		return ddi_translations;
950
	} else if (IS_SKYLAKE(dev_priv)) {
951
		const struct ddi_buf_trans *ddi_translations =
952
			skl_get_buf_trans_dp(encoder, n_entries);
953
		*n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries);
954
		return ddi_translations;
955 956 957 958 959 960 961 962 963 964 965 966 967
	} else if (IS_BROADWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_dp);
		return  bdw_ddi_translations_dp;
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_dp);
		return hsw_ddi_translations_dp;
	}

	*n_entries = 0;
	return NULL;
}

static const struct ddi_buf_trans *
968
intel_ddi_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
969
{
970 971
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

972
	if (IS_GEN9_BC(dev_priv)) {
973
		const struct ddi_buf_trans *ddi_translations =
974
			skl_get_buf_trans_edp(encoder, n_entries);
975
		*n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries);
976
		return ddi_translations;
977
	} else if (IS_BROADWELL(dev_priv)) {
978
		return bdw_get_buf_trans_edp(encoder, n_entries);
979 980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_dp);
		return hsw_ddi_translations_dp;
	}

	*n_entries = 0;
	return NULL;
}

static const struct ddi_buf_trans *
intel_ddi_get_buf_trans_fdi(struct drm_i915_private *dev_priv,
			    int *n_entries)
{
	if (IS_BROADWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_fdi);
		return bdw_ddi_translations_fdi;
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_fdi);
		return hsw_ddi_translations_fdi;
	}

	*n_entries = 0;
	return NULL;
}

1004
static const struct ddi_buf_trans *
1005
intel_ddi_get_buf_trans_hdmi(struct intel_encoder *encoder,
1006 1007
			     int *n_entries)
{
1008 1009
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

1010 1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023
	if (IS_GEN9_BC(dev_priv)) {
		return skl_get_buf_trans_hdmi(dev_priv, n_entries);
	} else if (IS_BROADWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_hdmi);
		return bdw_ddi_translations_hdmi;
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_hdmi);
		return hsw_ddi_translations_hdmi;
	}

	*n_entries = 0;
	return NULL;
}

1024
static const struct bxt_ddi_buf_trans *
1025
bxt_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
1026 1027 1028 1029 1030 1031
{
	*n_entries = ARRAY_SIZE(bxt_ddi_translations_dp);
	return bxt_ddi_translations_dp;
}

static const struct bxt_ddi_buf_trans *
1032
bxt_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
1033
{
1034 1035
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

1036 1037 1038 1039 1040
	if (dev_priv->vbt.edp.low_vswing) {
		*n_entries = ARRAY_SIZE(bxt_ddi_translations_edp);
		return bxt_ddi_translations_edp;
	}

1041
	return bxt_get_buf_trans_dp(encoder, n_entries);
1042 1043 1044
}

static const struct bxt_ddi_buf_trans *
1045
bxt_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries)
1046 1047 1048 1049 1050
{
	*n_entries = ARRAY_SIZE(bxt_ddi_translations_hdmi);
	return bxt_ddi_translations_hdmi;
}

1051
static const struct cnl_ddi_buf_trans *
1052
cnl_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries)
1053
{
1054
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1055
	u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
1056 1057 1058 1059 1060 1061 1062 1063 1064 1065

	if (voltage == VOLTAGE_INFO_0_85V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_85V);
		return cnl_ddi_translations_hdmi_0_85V;
	} else if (voltage == VOLTAGE_INFO_0_95V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_95V);
		return cnl_ddi_translations_hdmi_0_95V;
	} else if (voltage == VOLTAGE_INFO_1_05V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_1_05V);
		return cnl_ddi_translations_hdmi_1_05V;
1066 1067
	} else {
		*n_entries = 1; /* shut up gcc */
1068
		MISSING_CASE(voltage);
1069
	}
1070 1071 1072 1073
	return NULL;
}

static const struct cnl_ddi_buf_trans *
1074
cnl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
1075
{
1076
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1077
	u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
1078 1079 1080 1081 1082 1083 1084 1085 1086 1087

	if (voltage == VOLTAGE_INFO_0_85V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_85V);
		return cnl_ddi_translations_dp_0_85V;
	} else if (voltage == VOLTAGE_INFO_0_95V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_95V);
		return cnl_ddi_translations_dp_0_95V;
	} else if (voltage == VOLTAGE_INFO_1_05V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_1_05V);
		return cnl_ddi_translations_dp_1_05V;
1088 1089
	} else {
		*n_entries = 1; /* shut up gcc */
1090
		MISSING_CASE(voltage);
1091
	}
1092 1093 1094 1095
	return NULL;
}

static const struct cnl_ddi_buf_trans *
1096
cnl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
1097
{
1098
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1099
	u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110

	if (dev_priv->vbt.edp.low_vswing) {
		if (voltage == VOLTAGE_INFO_0_85V) {
			*n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_85V);
			return cnl_ddi_translations_edp_0_85V;
		} else if (voltage == VOLTAGE_INFO_0_95V) {
			*n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_95V);
			return cnl_ddi_translations_edp_0_95V;
		} else if (voltage == VOLTAGE_INFO_1_05V) {
			*n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_1_05V);
			return cnl_ddi_translations_edp_1_05V;
1111 1112
		} else {
			*n_entries = 1; /* shut up gcc */
1113
			MISSING_CASE(voltage);
1114
		}
1115 1116
		return NULL;
	} else {
1117
		return cnl_get_buf_trans_dp(encoder, n_entries);
1118 1119 1120
	}
}

1121
static const struct cnl_ddi_buf_trans *
1122 1123
icl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder,
			     const struct intel_crtc_state *crtc_state,
1124 1125 1126 1127 1128 1129 1130
			     int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi);
	return icl_combo_phy_ddi_translations_hdmi;
}

static const struct cnl_ddi_buf_trans *
1131 1132
icl_get_combo_buf_trans_dp(struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
1133 1134 1135 1136 1137 1138 1139
			   int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2);
	return icl_combo_phy_ddi_translations_dp_hbr2;
}

static const struct cnl_ddi_buf_trans *
1140 1141
icl_get_combo_buf_trans_edp(struct intel_encoder *encoder,
			    const struct intel_crtc_state *crtc_state,
1142
			    int *n_entries)
1143
{
1144 1145
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

1146
	if (crtc_state->port_clock > 540000) {
1147 1148
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3);
		return icl_combo_phy_ddi_translations_edp_hbr3;
1149
	} else if (dev_priv->vbt.edp.low_vswing) {
1150 1151
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2);
		return icl_combo_phy_ddi_translations_edp_hbr2;
1152 1153 1154 1155 1156 1157
	} else if (IS_DG1(dev_priv) && crtc_state->port_clock > 270000) {
		*n_entries = ARRAY_SIZE(dg1_combo_phy_ddi_translations_dp_hbr2_hbr3);
		return dg1_combo_phy_ddi_translations_dp_hbr2_hbr3;
	} else if (IS_DG1(dev_priv)) {
		*n_entries = ARRAY_SIZE(dg1_combo_phy_ddi_translations_dp_rbr_hbr);
		return dg1_combo_phy_ddi_translations_dp_rbr_hbr;
1158
	}
1159

1160
	return icl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1161 1162 1163
}

static const struct cnl_ddi_buf_trans *
1164 1165
icl_get_combo_buf_trans(struct intel_encoder *encoder,
			const struct intel_crtc_state *crtc_state,
1166 1167
			int *n_entries)
{
1168 1169 1170 1171
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return icl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
		return icl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries);
1172
	else
1173
		return icl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1174 1175
}

1176
static const struct icl_mg_phy_ddi_buf_trans *
1177 1178
icl_get_mg_buf_trans_hdmi(struct intel_encoder *encoder,
			  const struct intel_crtc_state *crtc_state,
1179 1180 1181 1182 1183 1184 1185
			  int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hdmi);
	return icl_mg_phy_ddi_translations_hdmi;
}

static const struct icl_mg_phy_ddi_buf_trans *
1186 1187
icl_get_mg_buf_trans_dp(struct intel_encoder *encoder,
			const struct intel_crtc_state *crtc_state,
1188
			int *n_entries)
1189
{
1190
	if (crtc_state->port_clock > 270000) {
1191 1192
		*n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hbr2_hbr3);
		return icl_mg_phy_ddi_translations_hbr2_hbr3;
1193 1194 1195
	} else {
		*n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_rbr_hbr);
		return icl_mg_phy_ddi_translations_rbr_hbr;
1196
	}
1197
}
1198

1199
static const struct icl_mg_phy_ddi_buf_trans *
1200 1201
icl_get_mg_buf_trans(struct intel_encoder *encoder,
		     const struct intel_crtc_state *crtc_state,
1202 1203
		     int *n_entries)
{
1204 1205
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return icl_get_mg_buf_trans_hdmi(encoder, crtc_state, n_entries);
1206
	else
1207
		return icl_get_mg_buf_trans_dp(encoder, crtc_state, n_entries);
1208 1209
}

1210
static const struct cnl_ddi_buf_trans *
1211 1212
ehl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder,
			     const struct intel_crtc_state *crtc_state,
1213 1214 1215 1216 1217 1218 1219
			     int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi);
	return icl_combo_phy_ddi_translations_hdmi;
}

static const struct cnl_ddi_buf_trans *
1220 1221
ehl_get_combo_buf_trans_dp(struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
1222 1223 1224 1225 1226 1227 1228
			   int *n_entries)
{
	*n_entries = ARRAY_SIZE(ehl_combo_phy_ddi_translations_dp);
	return ehl_combo_phy_ddi_translations_dp;
}

static const struct cnl_ddi_buf_trans *
1229 1230
ehl_get_combo_buf_trans_edp(struct intel_encoder *encoder,
			    const struct intel_crtc_state *crtc_state,
1231
			    int *n_entries)
1232
{
1233 1234
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

1235
	if (dev_priv->vbt.edp.low_vswing) {
1236 1237
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2);
		return icl_combo_phy_ddi_translations_edp_hbr2;
1238
	}
1239

1240
	return ehl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1241 1242 1243
}

static const struct cnl_ddi_buf_trans *
1244 1245
ehl_get_combo_buf_trans(struct intel_encoder *encoder,
			const struct intel_crtc_state *crtc_state,
1246 1247
			int *n_entries)
{
1248 1249 1250 1251
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return ehl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
		return ehl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries);
1252
	else
1253
		return ehl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1254 1255
}

1256 1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306
static const struct cnl_ddi_buf_trans *
jsl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder,
			     const struct intel_crtc_state *crtc_state,
			     int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi);
	return icl_combo_phy_ddi_translations_hdmi;
}

static const struct cnl_ddi_buf_trans *
jsl_get_combo_buf_trans_dp(struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
			   int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2);
	return icl_combo_phy_ddi_translations_dp_hbr2;
}

static const struct cnl_ddi_buf_trans *
jsl_get_combo_buf_trans_edp(struct intel_encoder *encoder,
			    const struct intel_crtc_state *crtc_state,
			    int *n_entries)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

	if (dev_priv->vbt.edp.low_vswing) {
		if (crtc_state->port_clock > 270000) {
			*n_entries = ARRAY_SIZE(jsl_combo_phy_ddi_translations_edp_hbr2);
			return jsl_combo_phy_ddi_translations_edp_hbr2;
		} else {
			*n_entries = ARRAY_SIZE(jsl_combo_phy_ddi_translations_edp_hbr);
			return jsl_combo_phy_ddi_translations_edp_hbr;
		}
	}

	return jsl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
}

static const struct cnl_ddi_buf_trans *
jsl_get_combo_buf_trans(struct intel_encoder *encoder,
			const struct intel_crtc_state *crtc_state,
			int *n_entries)
{
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return jsl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
		return jsl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries);
	else
		return jsl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
}

1307
static const struct cnl_ddi_buf_trans *
1308 1309
tgl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder,
			     const struct intel_crtc_state *crtc_state,
1310
			     int *n_entries)
1311
{
1312 1313 1314
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi);
	return icl_combo_phy_ddi_translations_hdmi;
}
1315

1316
static const struct cnl_ddi_buf_trans *
1317 1318
tgl_get_combo_buf_trans_dp(struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
1319 1320 1321
			   int *n_entries)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1322

1323
	if (crtc_state->port_clock > 270000) {
1324 1325 1326 1327
		if (IS_ROCKETLAKE(dev_priv)) {
			*n_entries = ARRAY_SIZE(rkl_combo_phy_ddi_translations_dp_hbr2_hbr3);
			return rkl_combo_phy_ddi_translations_dp_hbr2_hbr3;
		} else if (IS_TGL_U(dev_priv) || IS_TGL_Y(dev_priv)) {
1328 1329 1330
			*n_entries = ARRAY_SIZE(tgl_uy_combo_phy_ddi_translations_dp_hbr2);
			return tgl_uy_combo_phy_ddi_translations_dp_hbr2;
		} else {
1331 1332
			*n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr2);
			return tgl_combo_phy_ddi_translations_dp_hbr2;
1333
		}
1334
	} else {
1335 1336 1337 1338 1339 1340 1341
		if (IS_ROCKETLAKE(dev_priv)) {
			*n_entries = ARRAY_SIZE(rkl_combo_phy_ddi_translations_dp_hbr);
			return rkl_combo_phy_ddi_translations_dp_hbr;
		} else {
			*n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr);
			return tgl_combo_phy_ddi_translations_dp_hbr;
		}
1342 1343 1344
	}
}

1345
static const struct cnl_ddi_buf_trans *
1346 1347
tgl_get_combo_buf_trans_edp(struct intel_encoder *encoder,
			    const struct intel_crtc_state *crtc_state,
1348 1349 1350 1351 1352
			    int *n_entries)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);

1353
	if (crtc_state->port_clock > 540000) {
1354 1355 1356 1357 1358 1359 1360 1361 1362 1363
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3);
		return icl_combo_phy_ddi_translations_edp_hbr3;
	} else if (dev_priv->vbt.edp.hobl && !intel_dp->hobl_failed) {
		*n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_edp_hbr2_hobl);
		return tgl_combo_phy_ddi_translations_edp_hbr2_hobl;
	} else if (dev_priv->vbt.edp.low_vswing) {
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2);
		return icl_combo_phy_ddi_translations_edp_hbr2;
	}

1364
	return tgl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1365 1366 1367
}

static const struct cnl_ddi_buf_trans *
1368 1369
tgl_get_combo_buf_trans(struct intel_encoder *encoder,
			const struct intel_crtc_state *crtc_state,
1370 1371
			int *n_entries)
{
1372 1373 1374 1375
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return tgl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
		return tgl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries);
1376
	else
1377
		return tgl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1378 1379
}

1380
static const struct tgl_dkl_phy_ddi_buf_trans *
1381 1382
tgl_get_dkl_buf_trans_hdmi(struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
1383 1384 1385 1386 1387 1388 1389
			   int *n_entries)
{
	*n_entries = ARRAY_SIZE(tgl_dkl_phy_hdmi_ddi_trans);
	return tgl_dkl_phy_hdmi_ddi_trans;
}

static const struct tgl_dkl_phy_ddi_buf_trans *
1390 1391
tgl_get_dkl_buf_trans_dp(struct intel_encoder *encoder,
			 const struct intel_crtc_state *crtc_state,
1392
			 int *n_entries)
1393
{
1394
	if (crtc_state->port_clock > 270000) {
1395 1396
		*n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans_hbr2);
		return tgl_dkl_phy_dp_ddi_trans_hbr2;
1397 1398 1399
	} else {
		*n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans);
		return tgl_dkl_phy_dp_ddi_trans;
1400
	}
1401
}
1402

1403
static const struct tgl_dkl_phy_ddi_buf_trans *
1404 1405
tgl_get_dkl_buf_trans(struct intel_encoder *encoder,
		      const struct intel_crtc_state *crtc_state,
1406 1407
		      int *n_entries)
{
1408 1409
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return tgl_get_dkl_buf_trans_hdmi(encoder, crtc_state, n_entries);
1410
	else
1411
		return tgl_get_dkl_buf_trans_dp(encoder, crtc_state, n_entries);
1412 1413
}

1414 1415
static int intel_ddi_hdmi_level(struct intel_encoder *encoder,
				const struct intel_crtc_state *crtc_state)
1416
{
1417
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1418
	int n_entries, level, default_entry;
1419
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
1420

1421 1422
	if (INTEL_GEN(dev_priv) >= 12) {
		if (intel_phy_is_combo(dev_priv, phy))
1423
			tgl_get_combo_buf_trans_hdmi(encoder, crtc_state, &n_entries);
1424
		else
1425
			tgl_get_dkl_buf_trans_hdmi(encoder, crtc_state, &n_entries);
1426 1427
		default_entry = n_entries - 1;
	} else if (INTEL_GEN(dev_priv) == 11) {
1428
		if (intel_phy_is_combo(dev_priv, phy))
1429
			icl_get_combo_buf_trans_hdmi(encoder, crtc_state, &n_entries);
1430
		else
1431
			icl_get_mg_buf_trans_hdmi(encoder, crtc_state, &n_entries);
1432 1433
		default_entry = n_entries - 1;
	} else if (IS_CANNONLAKE(dev_priv)) {
1434
		cnl_get_buf_trans_hdmi(encoder, &n_entries);
1435
		default_entry = n_entries - 1;
1436
	} else if (IS_GEN9_LP(dev_priv)) {
1437
		bxt_get_buf_trans_hdmi(encoder, &n_entries);
1438
		default_entry = n_entries - 1;
1439
	} else if (IS_GEN9_BC(dev_priv)) {
1440
		intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1441
		default_entry = 8;
1442
	} else if (IS_BROADWELL(dev_priv)) {
1443
		intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1444
		default_entry = 7;
1445
	} else if (IS_HASWELL(dev_priv)) {
1446
		intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1447
		default_entry = 6;
1448
	} else {
1449
		drm_WARN(&dev_priv->drm, 1, "ddi translation table missing\n");
1450
		return 0;
1451 1452
	}

1453
	if (drm_WARN_ON_ONCE(&dev_priv->drm, n_entries == 0))
1454
		return 0;
1455

1456 1457
	level = intel_bios_hdmi_level_shift(encoder);
	if (level < 0)
1458 1459
		level = default_entry;

1460
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
1461
		level = n_entries - 1;
1462

1463
	return level;
1464 1465
}

1466 1467
/*
 * Starting with Haswell, DDI port buffers must be programmed with correct
1468 1469
 * values in advance. This function programs the correct values for
 * DP/eDP/FDI use cases.
1470
 */
1471 1472
static void intel_prepare_dp_ddi_buffers(struct intel_encoder *encoder,
					 const struct intel_crtc_state *crtc_state)
1473
{
1474
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1475
	u32 iboost_bit = 0;
1476
	int i, n_entries;
1477
	enum port port = encoder->port;
1478
	const struct ddi_buf_trans *ddi_translations;
1479

1480 1481 1482 1483
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG))
		ddi_translations = intel_ddi_get_buf_trans_fdi(dev_priv,
							       &n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
1484
		ddi_translations = intel_ddi_get_buf_trans_edp(encoder,
1485
							       &n_entries);
1486
	else
1487
		ddi_translations = intel_ddi_get_buf_trans_dp(encoder,
1488
							      &n_entries);
1489

1490
	/* If we're boosting the current, set bit 31 of trans1 */
1491
	if (IS_GEN9_BC(dev_priv) && intel_bios_dp_boost_level(encoder))
1492
		iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE;
1493

1494
	for (i = 0; i < n_entries; i++) {
1495 1496 1497 1498
		intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, i),
			       ddi_translations[i].trans1 | iboost_bit);
		intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, i),
			       ddi_translations[i].trans2);
1499
	}
1500 1501 1502 1503 1504 1505 1506
}

/*
 * Starting with Haswell, DDI port buffers must be programmed with correct
 * values in advance. This function programs the correct values for
 * HDMI/DVI use cases.
 */
1507
static void intel_prepare_hdmi_ddi_buffers(struct intel_encoder *encoder,
1508
					   int level)
1509 1510 1511
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	u32 iboost_bit = 0;
1512
	int n_entries;
1513
	enum port port = encoder->port;
1514
	const struct ddi_buf_trans *ddi_translations;
1515

1516
	ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1517

1518
	if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
1519
		return;
1520
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
1521
		level = n_entries - 1;
1522

1523
	/* If we're boosting the current, set bit 31 of trans1 */
1524
	if (IS_GEN9_BC(dev_priv) && intel_bios_hdmi_boost_level(encoder))
1525
		iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE;
1526

1527
	/* Entry 9 is for HDMI: */
1528 1529 1530 1531
	intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, 9),
		       ddi_translations[level].trans1 | iboost_bit);
	intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, 9),
		       ddi_translations[level].trans2);
1532 1533
}

1534 1535 1536
static void intel_wait_ddi_buf_idle(struct drm_i915_private *dev_priv,
				    enum port port)
{
1537 1538 1539
	if (IS_BROXTON(dev_priv)) {
		udelay(16);
		return;
1540
	}
1541 1542 1543 1544 1545

	if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) &
			 DDI_BUF_IS_IDLE), 8))
		drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get idle\n",
			port_name(port));
1546
}
1547

1548 1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562
static void intel_wait_ddi_buf_active(struct drm_i915_private *dev_priv,
				      enum port port)
{
	/* Wait > 518 usecs for DDI_BUF_CTL to be non idle */
	if (INTEL_GEN(dev_priv) < 10 && !IS_GEMINILAKE(dev_priv)) {
		usleep_range(518, 1000);
		return;
	}

	if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) &
			  DDI_BUF_IS_IDLE), 500))
		drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get active\n",
			port_name(port));
}

1563
static u32 hsw_pll_to_ddi_pll_sel(const struct intel_shared_dpll *pll)
1564
{
1565
	switch (pll->info->id) {
1566 1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578
	case DPLL_ID_WRPLL1:
		return PORT_CLK_SEL_WRPLL1;
	case DPLL_ID_WRPLL2:
		return PORT_CLK_SEL_WRPLL2;
	case DPLL_ID_SPLL:
		return PORT_CLK_SEL_SPLL;
	case DPLL_ID_LCPLL_810:
		return PORT_CLK_SEL_LCPLL_810;
	case DPLL_ID_LCPLL_1350:
		return PORT_CLK_SEL_LCPLL_1350;
	case DPLL_ID_LCPLL_2700:
		return PORT_CLK_SEL_LCPLL_2700;
	default:
1579
		MISSING_CASE(pll->info->id);
1580 1581 1582 1583
		return PORT_CLK_SEL_NONE;
	}
}

1584
static u32 icl_pll_to_ddi_clk_sel(struct intel_encoder *encoder,
1585
				  const struct intel_crtc_state *crtc_state)
1586
{
1587 1588
	const struct intel_shared_dpll *pll = crtc_state->shared_dpll;
	int clock = crtc_state->port_clock;
1589 1590 1591 1592
	const enum intel_dpll_id id = pll->info->id;

	switch (id) {
	default:
1593 1594 1595 1596
		/*
		 * DPLL_ID_ICL_DPLL0 and DPLL_ID_ICL_DPLL1 should not be used
		 * here, so do warn if this get passed in
		 */
1597 1598
		MISSING_CASE(id);
		return DDI_CLK_SEL_NONE;
1599 1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610
	case DPLL_ID_ICL_TBTPLL:
		switch (clock) {
		case 162000:
			return DDI_CLK_SEL_TBT_162;
		case 270000:
			return DDI_CLK_SEL_TBT_270;
		case 540000:
			return DDI_CLK_SEL_TBT_540;
		case 810000:
			return DDI_CLK_SEL_TBT_810;
		default:
			MISSING_CASE(clock);
1611
			return DDI_CLK_SEL_NONE;
1612
		}
1613 1614 1615 1616
	case DPLL_ID_ICL_MGPLL1:
	case DPLL_ID_ICL_MGPLL2:
	case DPLL_ID_ICL_MGPLL3:
	case DPLL_ID_ICL_MGPLL4:
1617 1618
	case DPLL_ID_TGL_MGPLL5:
	case DPLL_ID_TGL_MGPLL6:
1619 1620 1621 1622
		return DDI_CLK_SEL_MG;
	}
}

1623 1624 1625 1626 1627 1628 1629 1630 1631
/* Starting with Haswell, different DDI ports can work in FDI mode for
 * connection to the PCH-located connectors. For this, it is necessary to train
 * both the DDI port and PCH receiver for the desired DDI buffer settings.
 *
 * The recommended port to work in FDI mode is DDI E, which we use here. Also,
 * please note that when FDI mode is active on DDI E, it shares 2 lines with
 * DDI A (which is used for eDP)
 */

1632
void hsw_fdi_link_train(struct intel_encoder *encoder,
1633
			const struct intel_crtc_state *crtc_state)
1634
{
1635 1636
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1637
	u32 temp, i, rx_ctl_val, ddi_pll_sel;
1638

1639
	intel_prepare_dp_ddi_buffers(encoder, crtc_state);
1640

1641 1642 1643 1644
	/* Set the FDI_RX_MISC pwrdn lanes and the 2 workarounds listed at the
	 * mode set "sequence for CRT port" document:
	 * - TP1 to TP2 time with the default value
	 * - FDI delay to 90h
1645 1646
	 *
	 * WaFDIAutoLinkSetTimingOverrride:hsw
1647
	 */
1648 1649
	intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A),
		       FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2) | FDI_RX_TP1_TO_TP2_48 | FDI_RX_FDI_DELAY_90);
1650 1651

	/* Enable the PCH Receiver FDI PLL */
1652
	rx_ctl_val = dev_priv->fdi_rx_config | FDI_RX_ENHANCE_FRAME_ENABLE |
1653
		     FDI_RX_PLL_ENABLE |
1654
		     FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
1655 1656
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
	intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A));
1657 1658 1659 1660
	udelay(220);

	/* Switch from Rawclk to PCDclk */
	rx_ctl_val |= FDI_PCDCLK;
1661
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
1662 1663

	/* Configure Port Clock Select */
1664
	ddi_pll_sel = hsw_pll_to_ddi_pll_sel(crtc_state->shared_dpll);
1665
	intel_de_write(dev_priv, PORT_CLK_SEL(PORT_E), ddi_pll_sel);
1666
	drm_WARN_ON(&dev_priv->drm, ddi_pll_sel != PORT_CLK_SEL_SPLL);
1667 1668 1669

	/* Start the training iterating through available voltages and emphasis,
	 * testing each value twice. */
1670
	for (i = 0; i < ARRAY_SIZE(hsw_ddi_translations_fdi) * 2; i++) {
1671
		/* Configure DP_TP_CTL with auto-training */
1672
		intel_de_write(dev_priv, DP_TP_CTL(PORT_E),
1673 1674 1675 1676
			       DP_TP_CTL_FDI_AUTOTRAIN |
			       DP_TP_CTL_ENHANCED_FRAME_ENABLE |
			       DP_TP_CTL_LINK_TRAIN_PAT1 |
			       DP_TP_CTL_ENABLE);
1677

1678 1679 1680 1681
		/* Configure and enable DDI_BUF_CTL for DDI E with next voltage.
		 * DDI E does not support port reversal, the functionality is
		 * achieved on the PCH side in FDI_RX_CTL, so no need to set the
		 * port reversal bit */
1682 1683 1684
		intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E),
			       DDI_BUF_CTL_ENABLE | ((crtc_state->fdi_lanes - 1) << 1) | DDI_BUF_TRANS_SELECT(i / 2));
		intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E));
1685 1686 1687

		udelay(600);

1688
		/* Program PCH FDI Receiver TU */
1689
		intel_de_write(dev_priv, FDI_RX_TUSIZE1(PIPE_A), TU_SIZE(64));
1690 1691 1692

		/* Enable PCH FDI Receiver with auto-training */
		rx_ctl_val |= FDI_RX_ENABLE | FDI_LINK_TRAIN_AUTO;
1693 1694
		intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
		intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A));
1695 1696 1697 1698 1699

		/* Wait for FDI receiver lane calibration */
		udelay(30);

		/* Unset FDI_RX_MISC pwrdn lanes */
1700
		temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A));
1701
		temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK);
1702 1703
		intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp);
		intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A));
1704 1705 1706

		/* Wait for FDI auto training time */
		udelay(5);
1707

1708
		temp = intel_de_read(dev_priv, DP_TP_STATUS(PORT_E));
1709
		if (temp & DP_TP_STATUS_AUTOTRAIN_DONE) {
1710 1711
			drm_dbg_kms(&dev_priv->drm,
				    "FDI link training done on step %d\n", i);
1712 1713
			break;
		}
1714

1715 1716 1717 1718 1719
		/*
		 * Leave things enabled even if we failed to train FDI.
		 * Results in less fireworks from the state checker.
		 */
		if (i == ARRAY_SIZE(hsw_ddi_translations_fdi) * 2 - 1) {
1720
			drm_err(&dev_priv->drm, "FDI link training failed!\n");
1721
			break;
1722
		}
1723

1724
		rx_ctl_val &= ~FDI_RX_ENABLE;
1725 1726
		intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
		intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A));
1727

1728
		temp = intel_de_read(dev_priv, DDI_BUF_CTL(PORT_E));
1729
		temp &= ~DDI_BUF_CTL_ENABLE;
1730 1731
		intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E), temp);
		intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E));
1732

1733
		/* Disable DP_TP_CTL and FDI_RX_CTL and retry */
1734
		temp = intel_de_read(dev_priv, DP_TP_CTL(PORT_E));
1735 1736
		temp &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK);
		temp |= DP_TP_CTL_LINK_TRAIN_PAT1;
1737 1738
		intel_de_write(dev_priv, DP_TP_CTL(PORT_E), temp);
		intel_de_posting_read(dev_priv, DP_TP_CTL(PORT_E));
1739 1740

		intel_wait_ddi_buf_idle(dev_priv, PORT_E);
1741 1742

		/* Reset FDI_RX_MISC pwrdn lanes */
1743
		temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A));
1744 1745
		temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK);
		temp |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2);
1746 1747
		intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp);
		intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A));
1748 1749
	}

1750
	/* Enable normal pixel sending for FDI */
1751
	intel_de_write(dev_priv, DP_TP_CTL(PORT_E),
1752 1753 1754 1755
		       DP_TP_CTL_FDI_AUTOTRAIN |
		       DP_TP_CTL_LINK_TRAIN_NORMAL |
		       DP_TP_CTL_ENHANCED_FRAME_ENABLE |
		       DP_TP_CTL_ENABLE);
1756
}
1757

1758 1759
static void intel_ddi_init_dp_buf_reg(struct intel_encoder *encoder,
				      const struct intel_crtc_state *crtc_state)
1760
{
1761
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1762
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1763

1764
	intel_dp->DP = dig_port->saved_port_bits |
1765
		DDI_BUF_CTL_ENABLE | DDI_BUF_TRANS_SELECT(0);
1766
	intel_dp->DP |= DDI_PORT_WIDTH(crtc_state->lane_count);
1767 1768
}

1769 1770 1771
static int icl_calc_tbt_pll_link(struct drm_i915_private *dev_priv,
				 enum port port)
{
1772
	u32 val = intel_de_read(dev_priv, DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK;
1773 1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790

	switch (val) {
	case DDI_CLK_SEL_NONE:
		return 0;
	case DDI_CLK_SEL_TBT_162:
		return 162000;
	case DDI_CLK_SEL_TBT_270:
		return 270000;
	case DDI_CLK_SEL_TBT_540:
		return 540000;
	case DDI_CLK_SEL_TBT_810:
		return 810000;
	default:
		MISSING_CASE(val);
		return 0;
	}
}

1791 1792 1793 1794 1795 1796 1797
static void ddi_dotclock_get(struct intel_crtc_state *pipe_config)
{
	int dotclock;

	if (pipe_config->has_pch_encoder)
		dotclock = intel_dotclock_calculate(pipe_config->port_clock,
						    &pipe_config->fdi_m_n);
1798
	else if (intel_crtc_has_dp_encoder(pipe_config))
1799 1800
		dotclock = intel_dotclock_calculate(pipe_config->port_clock,
						    &pipe_config->dp_m_n);
1801 1802
	else if (pipe_config->has_hdmi_sink && pipe_config->pipe_bpp > 24)
		dotclock = pipe_config->port_clock * 24 / pipe_config->pipe_bpp;
1803 1804 1805
	else
		dotclock = pipe_config->port_clock;

1806 1807
	if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 &&
	    !intel_crtc_has_dp_encoder(pipe_config))
1808 1809
		dotclock *= 2;

1810 1811 1812
	if (pipe_config->pixel_multiplier)
		dotclock /= pipe_config->pixel_multiplier;

1813
	pipe_config->hw.adjusted_mode.crtc_clock = dotclock;
1814
}
1815

1816 1817
static void intel_ddi_clock_get(struct intel_encoder *encoder,
				struct intel_crtc_state *pipe_config)
1818
{
1819
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1820
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
1821

1822
	if (intel_phy_is_tc(dev_priv, phy) &&
1823 1824 1825 1826 1827
	    intel_get_shared_dpll_id(dev_priv, pipe_config->shared_dpll) ==
	    DPLL_ID_ICL_TBTPLL)
		pipe_config->port_clock = icl_calc_tbt_pll_link(dev_priv,
								encoder->port);
	else
1828
		pipe_config->port_clock =
1829 1830
			intel_dpll_get_freq(dev_priv, pipe_config->shared_dpll,
					    &pipe_config->dpll_hw_state);
1831 1832

	ddi_dotclock_get(pipe_config);
1833 1834
}

1835 1836
void intel_ddi_set_dp_msa(const struct intel_crtc_state *crtc_state,
			  const struct drm_connector_state *conn_state)
1837
{
1838
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1839
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1840
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1841
	u32 temp;
1842

1843 1844
	if (!intel_crtc_has_dp_encoder(crtc_state))
		return;
J
Jani Nikula 已提交
1845

1846
	drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder));
1847

1848
	temp = DP_MSA_MISC_SYNC_CLOCK;
1849

1850 1851
	switch (crtc_state->pipe_bpp) {
	case 18:
1852
		temp |= DP_MSA_MISC_6_BPC;
1853 1854
		break;
	case 24:
1855
		temp |= DP_MSA_MISC_8_BPC;
1856 1857
		break;
	case 30:
1858
		temp |= DP_MSA_MISC_10_BPC;
1859 1860
		break;
	case 36:
1861
		temp |= DP_MSA_MISC_12_BPC;
1862 1863 1864 1865
		break;
	default:
		MISSING_CASE(crtc_state->pipe_bpp);
		break;
1866
	}
1867

1868
	/* nonsense combination */
1869 1870
	drm_WARN_ON(&dev_priv->drm, crtc_state->limited_color_range &&
		    crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB);
1871 1872

	if (crtc_state->limited_color_range)
1873
		temp |= DP_MSA_MISC_COLOR_CEA_RGB;
1874

1875 1876 1877
	/*
	 * As per DP 1.2 spec section 2.3.4.3 while sending
	 * YCBCR 444 signals we should program MSA MISC1/0 fields with
1878
	 * colorspace information.
1879 1880
	 */
	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444)
1881
		temp |= DP_MSA_MISC_COLOR_YCBCR_444_BT709;
1882

1883 1884 1885
	/*
	 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication
	 * of Color Encoding Format and Content Color Gamut] while sending
1886 1887
	 * YCBCR 420, HDR BT.2020 signals we should program MSA MISC1 fields
	 * which indicate VSC SDP for the Pixel Encoding/Colorimetry Format.
1888
	 */
1889
	if (intel_dp_needs_vsc_sdp(crtc_state, conn_state))
1890
		temp |= DP_MSA_MISC_COLOR_VSC_SDP;
1891

1892
	intel_de_write(dev_priv, TRANS_MSA_MISC(cpu_transcoder), temp);
1893 1894
}

1895 1896 1897 1898 1899 1900 1901 1902
static u32 bdw_trans_port_sync_master_select(enum transcoder master_transcoder)
{
	if (master_transcoder == TRANSCODER_EDP)
		return 0;
	else
		return master_transcoder + 1;
}

1903 1904 1905 1906 1907 1908 1909
/*
 * Returns the TRANS_DDI_FUNC_CTL value based on CRTC state.
 *
 * Only intended to be used by intel_ddi_enable_transcoder_func() and
 * intel_ddi_config_transcoder_func().
 */
static u32
1910 1911
intel_ddi_transcoder_func_reg_val_get(struct intel_encoder *encoder,
				      const struct intel_crtc_state *crtc_state)
1912
{
1913
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1914 1915
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum pipe pipe = crtc->pipe;
1916
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1917
	enum port port = encoder->port;
1918
	u32 temp;
1919

1920 1921
	/* Enable TRANS_DDI_FUNC_CTL for the pipe to work in HDMI mode */
	temp = TRANS_DDI_FUNC_ENABLE;
1922 1923 1924 1925
	if (INTEL_GEN(dev_priv) >= 12)
		temp |= TGL_TRANS_DDI_SELECT_PORT(port);
	else
		temp |= TRANS_DDI_SELECT_PORT(port);
1926

1927
	switch (crtc_state->pipe_bpp) {
1928
	case 18:
1929
		temp |= TRANS_DDI_BPC_6;
1930 1931
		break;
	case 24:
1932
		temp |= TRANS_DDI_BPC_8;
1933 1934
		break;
	case 30:
1935
		temp |= TRANS_DDI_BPC_10;
1936 1937
		break;
	case 36:
1938
		temp |= TRANS_DDI_BPC_12;
1939 1940
		break;
	default:
1941
		BUG();
1942
	}
1943

1944
	if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PVSYNC)
1945
		temp |= TRANS_DDI_PVSYNC;
1946
	if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PHSYNC)
1947
		temp |= TRANS_DDI_PHSYNC;
1948

1949 1950 1951
	if (cpu_transcoder == TRANSCODER_EDP) {
		switch (pipe) {
		case PIPE_A:
1952 1953 1954 1955
			/* On Haswell, can only use the always-on power well for
			 * eDP when not using the panel fitter, and when not
			 * using motion blur mitigation (which we don't
			 * support). */
1956
			if (crtc_state->pch_pfit.force_thru)
1957 1958 1959
				temp |= TRANS_DDI_EDP_INPUT_A_ONOFF;
			else
				temp |= TRANS_DDI_EDP_INPUT_A_ON;
1960 1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972
			break;
		case PIPE_B:
			temp |= TRANS_DDI_EDP_INPUT_B_ONOFF;
			break;
		case PIPE_C:
			temp |= TRANS_DDI_EDP_INPUT_C_ONOFF;
			break;
		default:
			BUG();
			break;
		}
	}

1973
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
1974
		if (crtc_state->has_hdmi_sink)
1975
			temp |= TRANS_DDI_MODE_SELECT_HDMI;
1976
		else
1977
			temp |= TRANS_DDI_MODE_SELECT_DVI;
S
Shashank Sharma 已提交
1978 1979

		if (crtc_state->hdmi_scrambling)
1980
			temp |= TRANS_DDI_HDMI_SCRAMBLING;
S
Shashank Sharma 已提交
1981 1982
		if (crtc_state->hdmi_high_tmds_clock_ratio)
			temp |= TRANS_DDI_HIGH_TMDS_CHAR_RATE;
1983
	} else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) {
1984
		temp |= TRANS_DDI_MODE_SELECT_FDI;
1985
		temp |= (crtc_state->fdi_lanes - 1) << 1;
1986
	} else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) {
1987
		temp |= TRANS_DDI_MODE_SELECT_DP_MST;
1988
		temp |= DDI_PORT_WIDTH(crtc_state->lane_count);
1989

1990 1991 1992 1993
		if (INTEL_GEN(dev_priv) >= 12) {
			enum transcoder master;

			master = crtc_state->mst_master_transcoder;
1994 1995
			drm_WARN_ON(&dev_priv->drm,
				    master == INVALID_TRANSCODER);
1996 1997
			temp |= TRANS_DDI_MST_TRANSPORT_SELECT(master);
		}
1998
	} else {
1999 2000
		temp |= TRANS_DDI_MODE_SELECT_DP_SST;
		temp |= DDI_PORT_WIDTH(crtc_state->lane_count);
2001 2002
	}

2003 2004 2005 2006 2007 2008 2009 2010 2011
	if (IS_GEN_RANGE(dev_priv, 8, 10) &&
	    crtc_state->master_transcoder != INVALID_TRANSCODER) {
		u8 master_select =
			bdw_trans_port_sync_master_select(crtc_state->master_transcoder);

		temp |= TRANS_DDI_PORT_SYNC_ENABLE |
			TRANS_DDI_PORT_SYNC_MASTER_SELECT(master_select);
	}

2012 2013 2014
	return temp;
}

2015 2016
void intel_ddi_enable_transcoder_func(struct intel_encoder *encoder,
				      const struct intel_crtc_state *crtc_state)
2017
{
2018
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2019 2020
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2021 2022 2023 2024 2025 2026

	if (INTEL_GEN(dev_priv) >= 11) {
		enum transcoder master_transcoder = crtc_state->master_transcoder;
		u32 ctl2 = 0;

		if (master_transcoder != INVALID_TRANSCODER) {
2027 2028
			u8 master_select =
				bdw_trans_port_sync_master_select(master_transcoder);
2029

2030
			ctl2 |= PORT_SYNC_MODE_ENABLE |
2031
				PORT_SYNC_MODE_MASTER_SELECT(master_select);
2032 2033 2034 2035 2036 2037
		}

		intel_de_write(dev_priv,
			       TRANS_DDI_FUNC_CTL2(cpu_transcoder), ctl2);
	}

2038 2039 2040
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder),
		       intel_ddi_transcoder_func_reg_val_get(encoder,
							     crtc_state));
2041 2042 2043 2044 2045 2046 2047
}

/*
 * Same as intel_ddi_enable_transcoder_func(), but it does not set the enable
 * bit.
 */
static void
2048 2049
intel_ddi_config_transcoder_func(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
2050
{
2051
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2052 2053
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2054
	u32 ctl;
2055

2056
	ctl = intel_ddi_transcoder_func_reg_val_get(encoder, crtc_state);
2057 2058
	ctl &= ~TRANS_DDI_FUNC_ENABLE;
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl);
2059
}
2060

2061
void intel_ddi_disable_transcoder_func(const struct intel_crtc_state *crtc_state)
2062
{
2063
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2064 2065
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2066
	u32 ctl;
2067

2068 2069 2070 2071 2072
	if (INTEL_GEN(dev_priv) >= 11)
		intel_de_write(dev_priv,
			       TRANS_DDI_FUNC_CTL2(cpu_transcoder), 0);

	ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
2073

2074 2075
	drm_WARN_ON(crtc->base.dev, ctl & TRANS_DDI_HDCP_SIGNALLING);

2076
	ctl &= ~TRANS_DDI_FUNC_ENABLE;
2077

2078 2079 2080 2081
	if (IS_GEN_RANGE(dev_priv, 8, 10))
		ctl &= ~(TRANS_DDI_PORT_SYNC_ENABLE |
			 TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK);

2082
	if (INTEL_GEN(dev_priv) >= 12) {
2083
		if (!intel_dp_mst_is_master_trans(crtc_state)) {
2084
			ctl &= ~(TGL_TRANS_DDI_PORT_MASK |
2085 2086
				 TRANS_DDI_MODE_SELECT_MASK);
		}
2087
	} else {
2088
		ctl &= ~(TRANS_DDI_PORT_MASK | TRANS_DDI_MODE_SELECT_MASK);
2089
	}
2090

2091
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl);
2092 2093 2094

	if (dev_priv->quirks & QUIRK_INCREASE_DDI_DISABLED_TIME &&
	    intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
2095 2096
		drm_dbg_kms(&dev_priv->drm,
			    "Quirk Increase DDI disabled time\n");
2097 2098 2099
		/* Quirk time at 100ms for reliable operation */
		msleep(100);
	}
2100 2101
}

S
Sean Paul 已提交
2102
int intel_ddi_toggle_hdcp_signalling(struct intel_encoder *intel_encoder,
2103
				     enum transcoder cpu_transcoder,
S
Sean Paul 已提交
2104 2105 2106 2107
				     bool enable)
{
	struct drm_device *dev = intel_encoder->base.dev;
	struct drm_i915_private *dev_priv = to_i915(dev);
2108
	intel_wakeref_t wakeref;
S
Sean Paul 已提交
2109
	int ret = 0;
2110
	u32 tmp;
S
Sean Paul 已提交
2111

2112 2113
	wakeref = intel_display_power_get_if_enabled(dev_priv,
						     intel_encoder->power_domain);
2114
	if (drm_WARN_ON(dev, !wakeref))
S
Sean Paul 已提交
2115 2116
		return -ENXIO;

2117
	tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
S
Sean Paul 已提交
2118 2119 2120 2121
	if (enable)
		tmp |= TRANS_DDI_HDCP_SIGNALLING;
	else
		tmp &= ~TRANS_DDI_HDCP_SIGNALLING;
2122
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), tmp);
2123
	intel_display_power_put(dev_priv, intel_encoder->power_domain, wakeref);
S
Sean Paul 已提交
2124 2125 2126
	return ret;
}

2127 2128 2129
bool intel_ddi_connector_get_hw_state(struct intel_connector *intel_connector)
{
	struct drm_device *dev = intel_connector->base.dev;
2130
	struct drm_i915_private *dev_priv = to_i915(dev);
2131
	struct intel_encoder *encoder = intel_attached_encoder(intel_connector);
2132
	int type = intel_connector->base.connector_type;
2133
	enum port port = encoder->port;
2134
	enum transcoder cpu_transcoder;
2135 2136
	intel_wakeref_t wakeref;
	enum pipe pipe = 0;
2137
	u32 tmp;
2138
	bool ret;
2139

2140 2141 2142
	wakeref = intel_display_power_get_if_enabled(dev_priv,
						     encoder->power_domain);
	if (!wakeref)
2143 2144
		return false;

2145
	if (!encoder->get_hw_state(encoder, &pipe)) {
2146 2147 2148
		ret = false;
		goto out;
	}
2149

2150
	if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A)
2151 2152
		cpu_transcoder = TRANSCODER_EDP;
	else
D
Daniel Vetter 已提交
2153
		cpu_transcoder = (enum transcoder) pipe;
2154

2155
	tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
2156 2157 2158 2159

	switch (tmp & TRANS_DDI_MODE_SELECT_MASK) {
	case TRANS_DDI_MODE_SELECT_HDMI:
	case TRANS_DDI_MODE_SELECT_DVI:
2160 2161
		ret = type == DRM_MODE_CONNECTOR_HDMIA;
		break;
2162 2163

	case TRANS_DDI_MODE_SELECT_DP_SST:
2164 2165 2166 2167
		ret = type == DRM_MODE_CONNECTOR_eDP ||
		      type == DRM_MODE_CONNECTOR_DisplayPort;
		break;

2168 2169 2170
	case TRANS_DDI_MODE_SELECT_DP_MST:
		/* if the transcoder is in MST state then
		 * connector isn't connected */
2171 2172
		ret = false;
		break;
2173 2174

	case TRANS_DDI_MODE_SELECT_FDI:
2175 2176
		ret = type == DRM_MODE_CONNECTOR_VGA;
		break;
2177 2178

	default:
2179 2180
		ret = false;
		break;
2181
	}
2182 2183

out:
2184
	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
2185 2186

	return ret;
2187 2188
}

2189 2190
static void intel_ddi_get_encoder_pipes(struct intel_encoder *encoder,
					u8 *pipe_mask, bool *is_dp_mst)
2191 2192
{
	struct drm_device *dev = encoder->base.dev;
2193
	struct drm_i915_private *dev_priv = to_i915(dev);
2194
	enum port port = encoder->port;
2195
	intel_wakeref_t wakeref;
2196
	enum pipe p;
2197
	u32 tmp;
2198 2199 2200 2201
	u8 mst_pipe_mask;

	*pipe_mask = 0;
	*is_dp_mst = false;
2202

2203 2204 2205
	wakeref = intel_display_power_get_if_enabled(dev_priv,
						     encoder->power_domain);
	if (!wakeref)
2206
		return;
2207

2208
	tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port));
2209
	if (!(tmp & DDI_BUF_CTL_ENABLE))
2210
		goto out;
2211

2212
	if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) {
2213 2214
		tmp = intel_de_read(dev_priv,
				    TRANS_DDI_FUNC_CTL(TRANSCODER_EDP));
2215

2216
		switch (tmp & TRANS_DDI_EDP_INPUT_MASK) {
2217 2218
		default:
			MISSING_CASE(tmp & TRANS_DDI_EDP_INPUT_MASK);
2219
			fallthrough;
2220 2221
		case TRANS_DDI_EDP_INPUT_A_ON:
		case TRANS_DDI_EDP_INPUT_A_ONOFF:
2222
			*pipe_mask = BIT(PIPE_A);
2223 2224
			break;
		case TRANS_DDI_EDP_INPUT_B_ONOFF:
2225
			*pipe_mask = BIT(PIPE_B);
2226 2227
			break;
		case TRANS_DDI_EDP_INPUT_C_ONOFF:
2228
			*pipe_mask = BIT(PIPE_C);
2229 2230 2231
			break;
		}

2232 2233
		goto out;
	}
2234

2235
	mst_pipe_mask = 0;
2236
	for_each_pipe(dev_priv, p) {
2237
		enum transcoder cpu_transcoder = (enum transcoder)p;
2238
		unsigned int port_mask, ddi_select;
2239 2240 2241 2242 2243 2244
		intel_wakeref_t trans_wakeref;

		trans_wakeref = intel_display_power_get_if_enabled(dev_priv,
								   POWER_DOMAIN_TRANSCODER(cpu_transcoder));
		if (!trans_wakeref)
			continue;
2245 2246 2247 2248 2249 2250 2251 2252

		if (INTEL_GEN(dev_priv) >= 12) {
			port_mask = TGL_TRANS_DDI_PORT_MASK;
			ddi_select = TGL_TRANS_DDI_SELECT_PORT(port);
		} else {
			port_mask = TRANS_DDI_PORT_MASK;
			ddi_select = TRANS_DDI_SELECT_PORT(port);
		}
2253

2254 2255
		tmp = intel_de_read(dev_priv,
				    TRANS_DDI_FUNC_CTL(cpu_transcoder));
2256 2257
		intel_display_power_put(dev_priv, POWER_DOMAIN_TRANSCODER(cpu_transcoder),
					trans_wakeref);
2258

2259
		if ((tmp & port_mask) != ddi_select)
2260
			continue;
2261

2262 2263 2264
		if ((tmp & TRANS_DDI_MODE_SELECT_MASK) ==
		    TRANS_DDI_MODE_SELECT_DP_MST)
			mst_pipe_mask |= BIT(p);
2265

2266
		*pipe_mask |= BIT(p);
2267 2268
	}

2269
	if (!*pipe_mask)
2270 2271 2272
		drm_dbg_kms(&dev_priv->drm,
			    "No pipe for [ENCODER:%d:%s] found\n",
			    encoder->base.base.id, encoder->base.name);
2273 2274

	if (!mst_pipe_mask && hweight8(*pipe_mask) > 1) {
2275 2276 2277 2278
		drm_dbg_kms(&dev_priv->drm,
			    "Multiple pipes for [ENCODER:%d:%s] (pipe_mask %02x)\n",
			    encoder->base.base.id, encoder->base.name,
			    *pipe_mask);
2279 2280 2281 2282
		*pipe_mask = BIT(ffs(*pipe_mask) - 1);
	}

	if (mst_pipe_mask && mst_pipe_mask != *pipe_mask)
2283 2284 2285 2286
		drm_dbg_kms(&dev_priv->drm,
			    "Conflicting MST and non-MST state for [ENCODER:%d:%s] (pipe_mask %02x mst_pipe_mask %02x)\n",
			    encoder->base.base.id, encoder->base.name,
			    *pipe_mask, mst_pipe_mask);
2287 2288
	else
		*is_dp_mst = mst_pipe_mask;
2289

2290
out:
2291
	if (*pipe_mask && IS_GEN9_LP(dev_priv)) {
2292
		tmp = intel_de_read(dev_priv, BXT_PHY_CTL(port));
2293 2294
		if ((tmp & (BXT_PHY_CMNLANE_POWERDOWN_ACK |
			    BXT_PHY_LANE_POWERDOWN_ACK |
2295
			    BXT_PHY_LANE_ENABLED)) != BXT_PHY_LANE_ENABLED)
2296 2297 2298
			drm_err(&dev_priv->drm,
				"[ENCODER:%d:%s] enabled but PHY powered down? (PHY_CTL %08x)\n",
				encoder->base.base.id, encoder->base.name, tmp);
2299 2300
	}

2301
	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
2302
}
2303

2304 2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317
bool intel_ddi_get_hw_state(struct intel_encoder *encoder,
			    enum pipe *pipe)
{
	u8 pipe_mask;
	bool is_mst;

	intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst);

	if (is_mst || !pipe_mask)
		return false;

	*pipe = ffs(pipe_mask) - 1;

	return true;
2318 2319
}

2320
static enum intel_display_power_domain
I
Imre Deak 已提交
2321
intel_ddi_main_link_aux_domain(struct intel_digital_port *dig_port)
2322
{
2323
	/* CNL+ HW requires corresponding AUX IOs to be powered up for PSR with
2324 2325 2326 2327 2328 2329 2330 2331 2332 2333 2334
	 * DC states enabled at the same time, while for driver initiated AUX
	 * transfers we need the same AUX IOs to be powered but with DC states
	 * disabled. Accordingly use the AUX power domain here which leaves DC
	 * states enabled.
	 * However, for non-A AUX ports the corresponding non-EDP transcoders
	 * would have already enabled power well 2 and DC_OFF. This means we can
	 * acquire a wider POWER_DOMAIN_AUX_{B,C,D,F} reference instead of a
	 * specific AUX_IO reference without powering up any extra wells.
	 * Note that PSR is enabled only on Port A even though this function
	 * returns the correct domain for other ports too.
	 */
2335
	return dig_port->aux_ch == AUX_CH_A ? POWER_DOMAIN_AUX_IO_A :
2336
					      intel_aux_power_domain(dig_port);
2337 2338
}

2339 2340
static void intel_ddi_get_power_domains(struct intel_encoder *encoder,
					struct intel_crtc_state *crtc_state)
2341
{
2342
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2343
	struct intel_digital_port *dig_port;
2344
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2345

2346 2347
	/*
	 * TODO: Add support for MST encoders. Atm, the following should never
2348 2349
	 * happen since fake-MST encoders don't set their get_power_domains()
	 * hook.
2350
	 */
2351 2352
	if (drm_WARN_ON(&dev_priv->drm,
			intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)))
2353
		return;
2354

2355
	dig_port = enc_to_dig_port(encoder);
2356 2357

	if (!intel_phy_is_tc(dev_priv, phy) ||
2358 2359 2360 2361 2362
	    dig_port->tc_mode != TC_PORT_TBT_ALT) {
		drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref);
		dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv,
								   dig_port->ddi_io_power_domain);
	}
2363

2364 2365 2366 2367 2368
	/*
	 * AUX power is only needed for (e)DP mode, and for HDMI mode on TC
	 * ports.
	 */
	if (intel_crtc_has_dp_encoder(crtc_state) ||
2369 2370 2371 2372 2373 2374
	    intel_phy_is_tc(dev_priv, phy)) {
		drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref);
		dig_port->aux_wakeref =
			intel_display_power_get(dev_priv,
						intel_ddi_main_link_aux_domain(dig_port));
	}
2375 2376
}

2377 2378
void intel_ddi_enable_pipe_clock(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
2379
{
2380
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2381
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2382
	enum port port = encoder->port;
2383
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2384

2385 2386
	if (cpu_transcoder != TRANSCODER_EDP) {
		if (INTEL_GEN(dev_priv) >= 12)
2387 2388 2389
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TGL_TRANS_CLK_SEL_PORT(port));
2390
		else
2391 2392 2393
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TRANS_CLK_SEL_PORT(port));
2394
	}
2395 2396
}

2397
void intel_ddi_disable_pipe_clock(const struct intel_crtc_state *crtc_state)
2398
{
2399
	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2400
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2401

2402 2403
	if (cpu_transcoder != TRANSCODER_EDP) {
		if (INTEL_GEN(dev_priv) >= 12)
2404 2405 2406
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TGL_TRANS_CLK_SEL_DISABLED);
2407
		else
2408 2409 2410
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TRANS_CLK_SEL_DISABLED);
2411
	}
2412 2413
}

2414
static void _skl_ddi_set_iboost(struct drm_i915_private *dev_priv,
2415
				enum port port, u8 iboost)
2416
{
2417 2418
	u32 tmp;

2419
	tmp = intel_de_read(dev_priv, DISPIO_CR_TX_BMU_CR0);
2420 2421 2422 2423 2424
	tmp &= ~(BALANCE_LEG_MASK(port) | BALANCE_LEG_DISABLE(port));
	if (iboost)
		tmp |= iboost << BALANCE_LEG_SHIFT(port);
	else
		tmp |= BALANCE_LEG_DISABLE(port);
2425
	intel_de_write(dev_priv, DISPIO_CR_TX_BMU_CR0, tmp);
2426 2427
}

2428
static void skl_ddi_set_iboost(struct intel_encoder *encoder,
2429 2430
			       const struct intel_crtc_state *crtc_state,
			       int level)
2431
{
2432
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2433
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2434
	u8 iboost;
2435

2436
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2437
		iboost = intel_bios_hdmi_boost_level(encoder);
2438
	else
2439
		iboost = intel_bios_dp_boost_level(encoder);
2440

2441 2442 2443 2444
	if (iboost == 0) {
		const struct ddi_buf_trans *ddi_translations;
		int n_entries;

2445
		if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2446
			ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
2447 2448
		else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
			ddi_translations = intel_ddi_get_buf_trans_edp(encoder, &n_entries);
2449
		else
2450
			ddi_translations = intel_ddi_get_buf_trans_dp(encoder, &n_entries);
2451

2452
		if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
2453
			return;
2454
		if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
2455 2456
			level = n_entries - 1;

2457
		iboost = ddi_translations[level].i_boost;
2458 2459 2460 2461
	}

	/* Make sure that the requested I_boost is valid */
	if (iboost && iboost != 0x1 && iboost != 0x3 && iboost != 0x7) {
2462
		drm_err(&dev_priv->drm, "Invalid I_boost value %u\n", iboost);
2463 2464 2465
		return;
	}

2466
	_skl_ddi_set_iboost(dev_priv, encoder->port, iboost);
2467

2468
	if (encoder->port == PORT_A && dig_port->max_lanes == 4)
2469
		_skl_ddi_set_iboost(dev_priv, PORT_E, iboost);
2470 2471
}

2472
static void bxt_ddi_vswing_sequence(struct intel_encoder *encoder,
2473 2474
				    const struct intel_crtc_state *crtc_state,
				    int level)
2475
{
2476
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2477
	const struct bxt_ddi_buf_trans *ddi_translations;
2478
	enum port port = encoder->port;
2479
	int n_entries;
2480

2481
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2482
		ddi_translations = bxt_get_buf_trans_hdmi(encoder, &n_entries);
2483
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2484
		ddi_translations = bxt_get_buf_trans_edp(encoder, &n_entries);
2485
	else
2486
		ddi_translations = bxt_get_buf_trans_dp(encoder, &n_entries);
2487

2488
	if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
2489
		return;
2490
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
2491 2492
		level = n_entries - 1;

2493 2494 2495 2496 2497
	bxt_ddi_phy_set_signal_level(dev_priv, port,
				     ddi_translations[level].margin,
				     ddi_translations[level].scale,
				     ddi_translations[level].enable,
				     ddi_translations[level].deemphasis);
2498 2499
}

2500 2501
static u8 intel_ddi_dp_voltage_max(struct intel_dp *intel_dp,
				   const struct intel_crtc_state *crtc_state)
2502
{
2503
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
2504
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2505
	enum port port = encoder->port;
2506
	enum phy phy = intel_port_to_phy(dev_priv, port);
2507 2508
	int n_entries;

2509 2510
	if (INTEL_GEN(dev_priv) >= 12) {
		if (intel_phy_is_combo(dev_priv, phy))
2511
			tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2512
		else
2513
			tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries);
2514
	} else if (INTEL_GEN(dev_priv) == 11) {
2515 2516 2517
		if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE))
			jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
		else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE))
2518
			ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2519
		else if (intel_phy_is_combo(dev_priv, phy))
2520
			icl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2521
		else
2522
			icl_get_mg_buf_trans(encoder, crtc_state, &n_entries);
2523
	} else if (IS_CANNONLAKE(dev_priv)) {
2524
		if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2525
			cnl_get_buf_trans_edp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2526
		else
2527
			cnl_get_buf_trans_dp(encoder, &n_entries);
2528
	} else if (IS_GEN9_LP(dev_priv)) {
2529
		if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2530
			bxt_get_buf_trans_edp(encoder, &n_entries);
2531
		else
2532
			bxt_get_buf_trans_dp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2533
	} else {
2534
		if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2535
			intel_ddi_get_buf_trans_edp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2536
		else
2537
			intel_ddi_get_buf_trans_dp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2538
	}
2539

2540
	if (drm_WARN_ON(&dev_priv->drm, n_entries < 1))
2541
		n_entries = 1;
2542 2543
	if (drm_WARN_ON(&dev_priv->drm,
			n_entries > ARRAY_SIZE(index_to_dp_signal_levels)))
2544 2545 2546 2547 2548 2549
		n_entries = ARRAY_SIZE(index_to_dp_signal_levels);

	return index_to_dp_signal_levels[n_entries - 1] &
		DP_TRAIN_VOLTAGE_SWING_MASK;
}

2550 2551 2552 2553 2554
/*
 * We assume that the full set of pre-emphasis values can be
 * used on all DDI platforms. Should that change we need to
 * rethink this code.
 */
2555
static u8 intel_ddi_dp_preemph_max(struct intel_dp *intel_dp)
2556
{
2557
	return DP_TRAIN_PRE_EMPH_LEVEL_3;
2558 2559
}

2560
static void cnl_ddi_vswing_program(struct intel_encoder *encoder,
2561 2562
				   const struct intel_crtc_state *crtc_state,
				   int level)
2563
{
2564 2565
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	const struct cnl_ddi_buf_trans *ddi_translations;
2566
	enum port port = encoder->port;
2567 2568
	int n_entries, ln;
	u32 val;
2569

2570
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2571
		ddi_translations = cnl_get_buf_trans_hdmi(encoder, &n_entries);
2572
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2573
		ddi_translations = cnl_get_buf_trans_edp(encoder, &n_entries);
2574
	else
2575
		ddi_translations = cnl_get_buf_trans_dp(encoder, &n_entries);
2576

2577
	if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
2578
		return;
2579
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
2580 2581 2582
		level = n_entries - 1;

	/* Set PORT_TX_DW5 Scaling Mode Sel to 010b. */
2583
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2584
	val &= ~SCALING_MODE_SEL_MASK;
2585
	val |= SCALING_MODE_SEL(2);
2586
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2587 2588

	/* Program PORT_TX_DW2 */
2589
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW2_LN0(port));
2590 2591
	val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
		 RCOMP_SCALAR_MASK);
2592 2593 2594 2595
	val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel);
	val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel);
	/* Rcomp scalar is fixed as 0x98 for every table entry */
	val |= RCOMP_SCALAR(0x98);
2596
	intel_de_write(dev_priv, CNL_PORT_TX_DW2_GRP(port), val);
2597

2598
	/* Program PORT_TX_DW4 */
2599 2600
	/* We cannot write to GRP. It would overrite individual loadgen */
	for (ln = 0; ln < 4; ln++) {
2601
		val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port));
2602 2603
		val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
			 CURSOR_COEFF_MASK);
2604 2605 2606
		val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1);
		val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2);
		val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff);
2607
		intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val);
2608 2609
	}

2610
	/* Program PORT_TX_DW5 */
2611
	/* All DW5 values are fixed for every table entry */
2612
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2613
	val &= ~RTERM_SELECT_MASK;
2614 2615
	val |= RTERM_SELECT(6);
	val |= TAP3_DISABLE;
2616
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2617

2618
	/* Program PORT_TX_DW7 */
2619
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW7_LN0(port));
2620
	val &= ~N_SCALAR_MASK;
2621
	val |= N_SCALAR(ddi_translations[level].dw7_n_scalar);
2622
	intel_de_write(dev_priv, CNL_PORT_TX_DW7_GRP(port), val);
2623 2624
}

2625
static void cnl_ddi_vswing_sequence(struct intel_encoder *encoder,
2626 2627
				    const struct intel_crtc_state *crtc_state,
				    int level)
2628
{
2629
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2630
	enum port port = encoder->port;
2631
	int width, rate, ln;
2632
	u32 val;
2633

2634 2635
	width = crtc_state->lane_count;
	rate = crtc_state->port_clock;
2636 2637 2638 2639 2640 2641

	/*
	 * 1. If port type is eDP or DP,
	 * set PORT_PCS_DW1 cmnkeeper_enable to 1b,
	 * else clear to 0b.
	 */
2642
	val = intel_de_read(dev_priv, CNL_PORT_PCS_DW1_LN0(port));
2643
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2644
		val &= ~COMMON_KEEPER_EN;
2645 2646
	else
		val |= COMMON_KEEPER_EN;
2647
	intel_de_write(dev_priv, CNL_PORT_PCS_DW1_GRP(port), val);
2648 2649 2650

	/* 2. Program loadgen select */
	/*
2651 2652 2653 2654
	 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes
	 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1)
	 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0)
	 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0)
2655
	 */
2656
	for (ln = 0; ln <= 3; ln++) {
2657
		val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port));
2658 2659
		val &= ~LOADGEN_SELECT;

2660 2661
		if ((rate <= 600000 && width == 4 && ln >= 1)  ||
		    (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) {
2662 2663
			val |= LOADGEN_SELECT;
		}
2664
		intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val);
2665
	}
2666 2667

	/* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */
2668
	val = intel_de_read(dev_priv, CNL_PORT_CL1CM_DW5);
2669
	val |= SUS_CLOCK_CONFIG;
2670
	intel_de_write(dev_priv, CNL_PORT_CL1CM_DW5, val);
2671 2672

	/* 4. Clear training enable to change swing values */
2673
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2674
	val &= ~TX_TRAINING_EN;
2675
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2676 2677

	/* 5. Program swing and de-emphasis */
2678
	cnl_ddi_vswing_program(encoder, crtc_state, level);
2679 2680

	/* 6. Set training enable to trigger update */
2681
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2682
	val |= TX_TRAINING_EN;
2683
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2684 2685
}

2686
static void icl_ddi_combo_vswing_program(struct intel_encoder *encoder,
2687 2688
					 const struct intel_crtc_state *crtc_state,
					 int level)
2689
{
2690
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2691
	const struct cnl_ddi_buf_trans *ddi_translations;
2692
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2693 2694
	int n_entries, ln;
	u32 val;
2695

2696
	if (INTEL_GEN(dev_priv) >= 12)
2697
		ddi_translations = tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2698 2699 2700
	else if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE))
		ddi_translations = jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
	else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE))
2701
		ddi_translations = ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2702
	else
2703
		ddi_translations = icl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2704 2705 2706 2707
	if (!ddi_translations)
		return;

	if (level >= n_entries) {
2708 2709 2710
		drm_dbg_kms(&dev_priv->drm,
			    "DDI translation not found for level %d. Using %d instead.",
			    level, n_entries - 1);
2711 2712 2713
		level = n_entries - 1;
	}

2714
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) {
2715 2716 2717 2718 2719 2720 2721 2722
		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);

		val = EDP4K2K_MODE_OVRD_EN | EDP4K2K_MODE_OVRD_OPTIMIZED;
		intel_dp->hobl_active = is_hobl_buf_trans(ddi_translations);
		intel_de_rmw(dev_priv, ICL_PORT_CL_DW10(phy), val,
			     intel_dp->hobl_active ? val : 0);
	}

2723
	/* Set PORT_TX_DW5 */
2724
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy));
2725 2726 2727
	val &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK |
		  TAP2_DISABLE | TAP3_DISABLE);
	val |= SCALING_MODE_SEL(0x2);
2728
	val |= RTERM_SELECT(0x6);
2729
	val |= TAP3_DISABLE;
2730
	intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val);
2731 2732

	/* Program PORT_TX_DW2 */
2733
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN0(phy));
2734 2735
	val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
		 RCOMP_SCALAR_MASK);
2736 2737
	val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel);
	val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel);
2738
	/* Program Rcomp scalar for every table entry */
2739
	val |= RCOMP_SCALAR(0x98);
2740
	intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), val);
2741 2742 2743 2744

	/* Program PORT_TX_DW4 */
	/* We cannot write to GRP. It would overwrite individual loadgen. */
	for (ln = 0; ln <= 3; ln++) {
2745
		val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy));
2746 2747
		val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
			 CURSOR_COEFF_MASK);
2748 2749 2750
		val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1);
		val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2);
		val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff);
2751
		intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val);
2752
	}
2753 2754

	/* Program PORT_TX_DW7 */
2755
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW7_LN0(phy));
2756 2757
	val &= ~N_SCALAR_MASK;
	val |= N_SCALAR(ddi_translations[level].dw7_n_scalar);
2758
	intel_de_write(dev_priv, ICL_PORT_TX_DW7_GRP(phy), val);
2759 2760 2761
}

static void icl_combo_phy_ddi_vswing_sequence(struct intel_encoder *encoder,
2762 2763
					      const struct intel_crtc_state *crtc_state,
					      int level)
2764 2765
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2766
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2767
	int width, rate, ln;
2768 2769
	u32 val;

2770 2771
	width = crtc_state->lane_count;
	rate = crtc_state->port_clock;
2772 2773 2774 2775 2776 2777

	/*
	 * 1. If port type is eDP or DP,
	 * set PORT_PCS_DW1 cmnkeeper_enable to 1b,
	 * else clear to 0b.
	 */
2778
	val = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN0(phy));
2779
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2780 2781 2782
		val &= ~COMMON_KEEPER_EN;
	else
		val |= COMMON_KEEPER_EN;
2783
	intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), val);
2784 2785 2786 2787 2788 2789 2790 2791 2792

	/* 2. Program loadgen select */
	/*
	 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes
	 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1)
	 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0)
	 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0)
	 */
	for (ln = 0; ln <= 3; ln++) {
2793
		val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy));
2794 2795 2796 2797 2798 2799
		val &= ~LOADGEN_SELECT;

		if ((rate <= 600000 && width == 4 && ln >= 1) ||
		    (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) {
			val |= LOADGEN_SELECT;
		}
2800
		intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val);
2801 2802 2803
	}

	/* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */
2804
	val = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy));
2805
	val |= SUS_CLOCK_CONFIG;
2806
	intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), val);
2807 2808

	/* 4. Clear training enable to change swing values */
2809
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy));
2810
	val &= ~TX_TRAINING_EN;
2811
	intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val);
2812 2813

	/* 5. Program swing and de-emphasis */
2814
	icl_ddi_combo_vswing_program(encoder, crtc_state, level);
2815 2816

	/* 6. Set training enable to trigger update */
2817
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy));
2818
	val |= TX_TRAINING_EN;
2819
	intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val);
2820 2821
}

2822
static void icl_mg_phy_ddi_vswing_sequence(struct intel_encoder *encoder,
2823 2824
					   const struct intel_crtc_state *crtc_state,
					   int level)
2825 2826
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2827
	enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port);
2828
	const struct icl_mg_phy_ddi_buf_trans *ddi_translations;
2829 2830
	int n_entries, ln;
	u32 val;
2831

2832
	ddi_translations = icl_get_mg_buf_trans(encoder, crtc_state, &n_entries);
2833 2834
	/* The table does not have values for level 3 and level 9. */
	if (level >= n_entries || level == 3 || level == 9) {
2835 2836 2837
		drm_dbg_kms(&dev_priv->drm,
			    "DDI translation not found for level %d. Using %d instead.",
			    level, n_entries - 2);
2838 2839 2840 2841 2842
		level = n_entries - 2;
	}

	/* Set MG_TX_LINK_PARAMS cri_use_fs32 to 0. */
	for (ln = 0; ln < 2; ln++) {
2843
		val = intel_de_read(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port));
2844
		val &= ~CRI_USE_FS32;
2845
		intel_de_write(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port), val);
2846

2847
		val = intel_de_read(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port));
2848
		val &= ~CRI_USE_FS32;
2849
		intel_de_write(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port), val);
2850 2851 2852 2853
	}

	/* Program MG_TX_SWINGCTRL with values from vswing table */
	for (ln = 0; ln < 2; ln++) {
2854
		val = intel_de_read(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port));
2855 2856 2857
		val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK;
		val |= CRI_TXDEEMPH_OVERRIDE_17_12(
			ddi_translations[level].cri_txdeemph_override_17_12);
2858
		intel_de_write(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port), val);
2859

2860
		val = intel_de_read(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port));
2861 2862 2863
		val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK;
		val |= CRI_TXDEEMPH_OVERRIDE_17_12(
			ddi_translations[level].cri_txdeemph_override_17_12);
2864
		intel_de_write(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port), val);
2865 2866 2867 2868
	}

	/* Program MG_TX_DRVCTRL with values from vswing table */
	for (ln = 0; ln < 2; ln++) {
2869
		val = intel_de_read(dev_priv, MG_TX1_DRVCTRL(ln, tc_port));
2870 2871 2872 2873 2874 2875 2876
		val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK |
			 CRI_TXDEEMPH_OVERRIDE_5_0_MASK);
		val |= CRI_TXDEEMPH_OVERRIDE_5_0(
			ddi_translations[level].cri_txdeemph_override_5_0) |
			CRI_TXDEEMPH_OVERRIDE_11_6(
				ddi_translations[level].cri_txdeemph_override_11_6) |
			CRI_TXDEEMPH_OVERRIDE_EN;
2877
		intel_de_write(dev_priv, MG_TX1_DRVCTRL(ln, tc_port), val);
2878

2879
		val = intel_de_read(dev_priv, MG_TX2_DRVCTRL(ln, tc_port));
2880 2881 2882 2883 2884 2885 2886
		val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK |
			 CRI_TXDEEMPH_OVERRIDE_5_0_MASK);
		val |= CRI_TXDEEMPH_OVERRIDE_5_0(
			ddi_translations[level].cri_txdeemph_override_5_0) |
			CRI_TXDEEMPH_OVERRIDE_11_6(
				ddi_translations[level].cri_txdeemph_override_11_6) |
			CRI_TXDEEMPH_OVERRIDE_EN;
2887
		intel_de_write(dev_priv, MG_TX2_DRVCTRL(ln, tc_port), val);
2888 2889 2890 2891 2892 2893 2894 2895 2896 2897

		/* FIXME: Program CRI_LOADGEN_SEL after the spec is updated */
	}

	/*
	 * Program MG_CLKHUB<LN, port being used> with value from frequency table
	 * In case of Legacy mode on MG PHY, both TX1 and TX2 enabled so use the
	 * values from table for which TX1 and TX2 enabled.
	 */
	for (ln = 0; ln < 2; ln++) {
2898
		val = intel_de_read(dev_priv, MG_CLKHUB(ln, tc_port));
2899
		if (crtc_state->port_clock < 300000)
2900 2901 2902
			val |= CFG_LOW_RATE_LKREN_EN;
		else
			val &= ~CFG_LOW_RATE_LKREN_EN;
2903
		intel_de_write(dev_priv, MG_CLKHUB(ln, tc_port), val);
2904 2905 2906 2907
	}

	/* Program the MG_TX_DCC<LN, port being used> based on the link frequency */
	for (ln = 0; ln < 2; ln++) {
2908
		val = intel_de_read(dev_priv, MG_TX1_DCC(ln, tc_port));
2909
		val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK;
2910
		if (crtc_state->port_clock <= 500000) {
2911 2912 2913 2914 2915
			val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN;
		} else {
			val |= CFG_AMI_CK_DIV_OVERRIDE_EN |
				CFG_AMI_CK_DIV_OVERRIDE_VAL(1);
		}
2916
		intel_de_write(dev_priv, MG_TX1_DCC(ln, tc_port), val);
2917

2918
		val = intel_de_read(dev_priv, MG_TX2_DCC(ln, tc_port));
2919
		val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK;
2920
		if (crtc_state->port_clock <= 500000) {
2921 2922 2923 2924 2925
			val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN;
		} else {
			val |= CFG_AMI_CK_DIV_OVERRIDE_EN |
				CFG_AMI_CK_DIV_OVERRIDE_VAL(1);
		}
2926
		intel_de_write(dev_priv, MG_TX2_DCC(ln, tc_port), val);
2927 2928 2929 2930
	}

	/* Program MG_TX_PISO_READLOAD with values from vswing table */
	for (ln = 0; ln < 2; ln++) {
2931 2932
		val = intel_de_read(dev_priv,
				    MG_TX1_PISO_READLOAD(ln, tc_port));
2933
		val |= CRI_CALCINIT;
2934 2935
		intel_de_write(dev_priv, MG_TX1_PISO_READLOAD(ln, tc_port),
			       val);
2936

2937 2938
		val = intel_de_read(dev_priv,
				    MG_TX2_PISO_READLOAD(ln, tc_port));
2939
		val |= CRI_CALCINIT;
2940 2941
		intel_de_write(dev_priv, MG_TX2_PISO_READLOAD(ln, tc_port),
			       val);
2942 2943 2944 2945
	}
}

static void icl_ddi_vswing_sequence(struct intel_encoder *encoder,
2946 2947
				    const struct intel_crtc_state *crtc_state,
				    int level)
2948
{
2949
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2950
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2951

2952
	if (intel_phy_is_combo(dev_priv, phy))
2953
		icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level);
2954
	else
2955
		icl_mg_phy_ddi_vswing_sequence(encoder, crtc_state, level);
2956 2957
}

2958
static void
2959 2960 2961
tgl_dkl_phy_ddi_vswing_sequence(struct intel_encoder *encoder,
				const struct intel_crtc_state *crtc_state,
				int level)
2962 2963 2964 2965
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port);
	const struct tgl_dkl_phy_ddi_buf_trans *ddi_translations;
2966 2967
	u32 val, dpcnt_mask, dpcnt_val;
	int n_entries, ln;
2968

2969
	ddi_translations = tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries);
2970

2971 2972 2973 2974 2975 2976 2977 2978 2979 2980 2981
	if (level >= n_entries)
		level = n_entries - 1;

	dpcnt_mask = (DKL_TX_PRESHOOT_COEFF_MASK |
		      DKL_TX_DE_EMPAHSIS_COEFF_MASK |
		      DKL_TX_VSWING_CONTROL_MASK);
	dpcnt_val = DKL_TX_VSWING_CONTROL(ddi_translations[level].dkl_vswing_control);
	dpcnt_val |= DKL_TX_DE_EMPHASIS_COEFF(ddi_translations[level].dkl_de_emphasis_control);
	dpcnt_val |= DKL_TX_PRESHOOT_COEFF(ddi_translations[level].dkl_preshoot_control);

	for (ln = 0; ln < 2; ln++) {
2982 2983
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, ln));
2984

2985
		intel_de_write(dev_priv, DKL_TX_PMD_LANE_SUS(tc_port), 0);
2986

2987
		/* All the registers are RMW */
2988
		val = intel_de_read(dev_priv, DKL_TX_DPCNTL0(tc_port));
2989 2990
		val &= ~dpcnt_mask;
		val |= dpcnt_val;
2991
		intel_de_write(dev_priv, DKL_TX_DPCNTL0(tc_port), val);
2992

2993
		val = intel_de_read(dev_priv, DKL_TX_DPCNTL1(tc_port));
2994 2995
		val &= ~dpcnt_mask;
		val |= dpcnt_val;
2996
		intel_de_write(dev_priv, DKL_TX_DPCNTL1(tc_port), val);
2997

2998
		val = intel_de_read(dev_priv, DKL_TX_DPCNTL2(tc_port));
2999
		val &= ~DKL_TX_DP20BITMODE;
3000
		intel_de_write(dev_priv, DKL_TX_DPCNTL2(tc_port), val);
3001 3002 3003 3004
	}
}

static void tgl_ddi_vswing_sequence(struct intel_encoder *encoder,
3005 3006
				    const struct intel_crtc_state *crtc_state,
				    int level)
3007 3008 3009 3010 3011
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);

	if (intel_phy_is_combo(dev_priv, phy))
3012
		icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level);
3013
	else
3014
		tgl_dkl_phy_ddi_vswing_sequence(encoder, crtc_state, level);
3015 3016
}

3017 3018
static int translate_signal_level(struct intel_dp *intel_dp,
				  u8 signal_levels)
3019
{
3020
	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3021
	int i;
3022

3023 3024 3025
	for (i = 0; i < ARRAY_SIZE(index_to_dp_signal_levels); i++) {
		if (index_to_dp_signal_levels[i] == signal_levels)
			return i;
3026 3027
	}

3028 3029 3030
	drm_WARN(&i915->drm, 1,
		 "Unsupported voltage swing/pre-emphasis level: 0x%x\n",
		 signal_levels);
3031 3032

	return 0;
3033 3034
}

3035
static int intel_ddi_dp_level(struct intel_dp *intel_dp)
3036
{
3037
	u8 train_set = intel_dp->train_set[0];
3038 3039
	u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
					DP_TRAIN_PRE_EMPHASIS_MASK);
3040

3041
	return translate_signal_level(intel_dp, signal_levels);
3042 3043
}

3044
static void
3045 3046
tgl_set_signal_levels(struct intel_dp *intel_dp,
		      const struct intel_crtc_state *crtc_state)
3047
{
3048
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
3049
	int level = intel_ddi_dp_level(intel_dp);
3050

3051
	tgl_ddi_vswing_sequence(encoder, crtc_state, level);
3052
}
3053

3054
static void
3055 3056
icl_set_signal_levels(struct intel_dp *intel_dp,
		      const struct intel_crtc_state *crtc_state)
3057 3058 3059 3060
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	int level = intel_ddi_dp_level(intel_dp);

3061
	icl_ddi_vswing_sequence(encoder, crtc_state, level);
3062 3063
}

3064
static void
3065 3066
cnl_set_signal_levels(struct intel_dp *intel_dp,
		      const struct intel_crtc_state *crtc_state)
3067
{
3068
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
3069
	int level = intel_ddi_dp_level(intel_dp);
3070

3071
	cnl_ddi_vswing_sequence(encoder, crtc_state, level);
3072 3073 3074
}

static void
3075 3076
bxt_set_signal_levels(struct intel_dp *intel_dp,
		      const struct intel_crtc_state *crtc_state)
3077 3078 3079 3080
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	int level = intel_ddi_dp_level(intel_dp);

3081
	bxt_ddi_vswing_sequence(encoder, crtc_state, level);
3082 3083 3084
}

static void
3085 3086
hsw_set_signal_levels(struct intel_dp *intel_dp,
		      const struct intel_crtc_state *crtc_state)
3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	int level = intel_ddi_dp_level(intel_dp);
	enum port port = encoder->port;
	u32 signal_levels;

	signal_levels = DDI_BUF_TRANS_SELECT(level);

	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
		    signal_levels);

	intel_dp->DP &= ~DDI_BUF_EMP_MASK;
	intel_dp->DP |= signal_levels;

3102
	if (IS_GEN9_BC(dev_priv))
3103
		skl_ddi_set_iboost(encoder, crtc_state, level);
3104

3105 3106
	intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP);
	intel_de_posting_read(dev_priv, DDI_BUF_CTL(port));
3107 3108
}

3109 3110
static u32 icl_dpclka_cfgcr0_clk_off(struct drm_i915_private *dev_priv,
				     enum phy phy)
3111
{
3112 3113 3114
	if (IS_ROCKETLAKE(dev_priv)) {
		return RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
	} else if (intel_phy_is_combo(dev_priv, phy)) {
3115 3116 3117 3118
		return ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
	} else if (intel_phy_is_tc(dev_priv, phy)) {
		enum tc_port tc_port = intel_port_to_tc(dev_priv,
							(enum port)phy);
3119 3120 3121 3122 3123 3124 3125

		return ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port);
	}

	return 0;
}

3126 3127 3128 3129 3130 3131 3132 3133 3134 3135 3136 3137 3138 3139 3140 3141 3142 3143 3144 3145 3146 3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157 3158 3159
static void dg1_map_plls_to_ports(struct intel_encoder *encoder,
				  const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	struct intel_shared_dpll *pll = crtc_state->shared_dpll;
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
	u32 val;

	/*
	 * If we fail this, something went very wrong: first 2 PLLs should be
	 * used by first 2 phys and last 2 PLLs by last phys
	 */
	if (drm_WARN_ON(&dev_priv->drm,
			(pll->info->id < DPLL_ID_DG1_DPLL2 && phy >= PHY_C) ||
			(pll->info->id >= DPLL_ID_DG1_DPLL2 && phy < PHY_C)))
		return;

	mutex_lock(&dev_priv->dpll.lock);

	val = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy));
	drm_WARN_ON(&dev_priv->drm,
		    (val & DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)) == 0);

	val &= ~DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
	val |= DG1_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
	intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val);
	intel_de_posting_read(dev_priv, DG1_DPCLKA_CFGCR0(phy));

	val &= ~DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
	intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val);

	mutex_unlock(&dev_priv->dpll.lock);
}

3160 3161
static void icl_map_plls_to_ports(struct intel_encoder *encoder,
				  const struct intel_crtc_state *crtc_state)
3162
{
3163
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3164
	struct intel_shared_dpll *pll = crtc_state->shared_dpll;
3165
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3166
	u32 val;
3167

3168
	mutex_lock(&dev_priv->dpll.lock);
3169

3170
	val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
3171 3172
	drm_WARN_ON(&dev_priv->drm,
		    (val & icl_dpclka_cfgcr0_clk_off(dev_priv, phy)) == 0);
3173

3174
	if (intel_phy_is_combo(dev_priv, phy)) {
3175 3176 3177 3178 3179 3180 3181 3182 3183 3184
		u32 mask, sel;

		if (IS_ROCKETLAKE(dev_priv)) {
			mask = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
			sel = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
		} else {
			mask = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
			sel = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
		}

3185 3186 3187 3188 3189 3190 3191 3192 3193 3194
		/*
		 * Even though this register references DDIs, note that we
		 * want to pass the PHY rather than the port (DDI).  For
		 * ICL, port=phy in all cases so it doesn't matter, but for
		 * EHL the bspec notes the following:
		 *
		 *   "DDID clock tied to DDIA clock, so DPCLKA_CFGCR0 DDIA
		 *   Clock Select chooses the PLL for both DDIA and DDID and
		 *   drives port A in all cases."
		 */
3195 3196
		val &= ~mask;
		val |= sel;
3197 3198
		intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
		intel_de_posting_read(dev_priv, ICL_DPCLKA_CFGCR0);
3199
	}
3200

3201
	val &= ~icl_dpclka_cfgcr0_clk_off(dev_priv, phy);
3202
	intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
3203

3204
	mutex_unlock(&dev_priv->dpll.lock);
3205 3206
}

3207 3208 3209 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219
static void dg1_unmap_plls_to_ports(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);

	mutex_lock(&dev_priv->dpll.lock);

	intel_de_rmw(dev_priv, DG1_DPCLKA_CFGCR0(phy), 0,
		     DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy));

	mutex_unlock(&dev_priv->dpll.lock);
}

3220
static void icl_unmap_plls_to_ports(struct intel_encoder *encoder)
3221
{
3222
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3223
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3224
	u32 val;
3225

3226
	mutex_lock(&dev_priv->dpll.lock);
3227

3228
	val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
3229
	val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy);
3230
	intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
3231

3232
	mutex_unlock(&dev_priv->dpll.lock);
3233 3234
}

3235 3236 3237 3238 3239 3240 3241 3242 3243 3244 3245 3246 3247 3248 3249 3250 3251 3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3262 3263 3264 3265
static void dg1_sanitize_port_clk_off(struct drm_i915_private *dev_priv,
				      u32 port_mask, bool ddi_clk_needed)
{
	enum port port;
	u32 val;

	for_each_port_masked(port, port_mask) {
		enum phy phy = intel_port_to_phy(dev_priv, port);
		bool ddi_clk_off;

		val = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy));
		ddi_clk_off = val & DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);

		if (ddi_clk_needed == !ddi_clk_off)
			continue;

		/*
		 * Punt on the case now where clock is gated, but it would
		 * be needed by the port. Something else is really broken then.
		 */
		if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed))
			continue;

		drm_notice(&dev_priv->drm,
			   "PHY %c is disabled with an ungated DDI clock, gate it\n",
			   phy_name(phy));
		val |= DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
		intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val);
	}
}

3266 3267 3268 3269 3270 3271
static void icl_sanitize_port_clk_off(struct drm_i915_private *dev_priv,
				      u32 port_mask, bool ddi_clk_needed)
{
	enum port port;
	u32 val;

3272
	val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
3273 3274
	for_each_port_masked(port, port_mask) {
		enum phy phy = intel_port_to_phy(dev_priv, port);
3275 3276
		bool ddi_clk_off = val & icl_dpclka_cfgcr0_clk_off(dev_priv,
								   phy);
3277

3278
		if (ddi_clk_needed == !ddi_clk_off)
3279 3280 3281 3282 3283 3284
			continue;

		/*
		 * Punt on the case now where clock is gated, but it would
		 * be needed by the port. Something else is really broken then.
		 */
3285
		if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed))
3286 3287
			continue;

3288 3289 3290
		drm_notice(&dev_priv->drm,
			   "PHY %c is disabled/in DSI mode with an ungated DDI clock, gate it\n",
			   phy_name(phy));
3291
		val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy);
3292
		intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
3293 3294 3295
	}
}

3296 3297 3298
void icl_sanitize_encoder_pll_mapping(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3299 3300
	u32 port_mask;
	bool ddi_clk_needed;
3301 3302 3303 3304 3305 3306 3307 3308 3309 3310 3311 3312 3313 3314 3315 3316 3317

	/*
	 * In case of DP MST, we sanitize the primary encoder only, not the
	 * virtual ones.
	 */
	if (encoder->type == INTEL_OUTPUT_DP_MST)
		return;

	if (!encoder->base.crtc && intel_encoder_is_dp(encoder)) {
		u8 pipe_mask;
		bool is_mst;

		intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst);
		/*
		 * In the unlikely case that BIOS enables DP in MST mode, just
		 * warn since our MST HW readout is incomplete.
		 */
3318
		if (drm_WARN_ON(&dev_priv->drm, is_mst))
3319 3320
			return;
	}
3321

3322 3323
	port_mask = BIT(encoder->port);
	ddi_clk_needed = encoder->base.crtc;
3324

3325 3326
	if (encoder->type == INTEL_OUTPUT_DSI) {
		struct intel_encoder *other_encoder;
3327

3328 3329 3330 3331 3332 3333 3334 3335 3336
		port_mask = intel_dsi_encoder_ports(encoder);
		/*
		 * Sanity check that we haven't incorrectly registered another
		 * encoder using any of the ports of this DSI encoder.
		 */
		for_each_intel_encoder(&dev_priv->drm, other_encoder) {
			if (other_encoder == encoder)
				continue;

3337 3338
			if (drm_WARN_ON(&dev_priv->drm,
					port_mask & BIT(other_encoder->port)))
3339 3340 3341
				return;
		}
		/*
3342 3343
		 * For DSI we keep the ddi clocks gated
		 * except during enable/disable sequence.
3344
		 */
3345
		ddi_clk_needed = false;
3346 3347
	}

3348 3349 3350 3351
	if (IS_DG1(dev_priv))
		dg1_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed);
	else
		icl_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed);
3352 3353
}

3354
static void intel_ddi_clk_select(struct intel_encoder *encoder,
3355
				 const struct intel_crtc_state *crtc_state)
3356
{
3357
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3358
	enum port port = encoder->port;
3359
	enum phy phy = intel_port_to_phy(dev_priv, port);
3360
	u32 val;
3361
	const struct intel_shared_dpll *pll = crtc_state->shared_dpll;
3362

3363
	if (drm_WARN_ON(&dev_priv->drm, !pll))
3364 3365
		return;

3366
	mutex_lock(&dev_priv->dpll.lock);
3367

3368
	if (INTEL_GEN(dev_priv) >= 11) {
3369
		if (!intel_phy_is_combo(dev_priv, phy))
3370 3371
			intel_de_write(dev_priv, DDI_CLK_SEL(port),
				       icl_pll_to_ddi_clk_sel(encoder, crtc_state));
3372
		else if (IS_JSL_EHL(dev_priv) && port >= PORT_C)
3373 3374 3375 3376
			/*
			 * MG does not exist but the programming is required
			 * to ungate DDIC and DDID
			 */
3377 3378
			intel_de_write(dev_priv, DDI_CLK_SEL(port),
				       DDI_CLK_SEL_MG);
3379
	} else if (IS_CANNONLAKE(dev_priv)) {
R
Rodrigo Vivi 已提交
3380
		/* Configure DPCLKA_CFGCR0 to map the DPLL to the DDI. */
3381
		val = intel_de_read(dev_priv, DPCLKA_CFGCR0);
3382
		val &= ~DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port);
3383
		val |= DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, port);
3384
		intel_de_write(dev_priv, DPCLKA_CFGCR0, val);
3385

R
Rodrigo Vivi 已提交
3386 3387 3388 3389 3390
		/*
		 * Configure DPCLKA_CFGCR0 to turn on the clock for the DDI.
		 * This step and the step before must be done with separate
		 * register writes.
		 */
3391
		val = intel_de_read(dev_priv, DPCLKA_CFGCR0);
R
Rodrigo Vivi 已提交
3392
		val &= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port);
3393
		intel_de_write(dev_priv, DPCLKA_CFGCR0, val);
R
Rodrigo Vivi 已提交
3394
	} else if (IS_GEN9_BC(dev_priv)) {
3395
		/* DDI -> PLL mapping  */
3396
		val = intel_de_read(dev_priv, DPLL_CTRL2);
3397 3398

		val &= ~(DPLL_CTRL2_DDI_CLK_OFF(port) |
3399
			 DPLL_CTRL2_DDI_CLK_SEL_MASK(port));
3400
		val |= (DPLL_CTRL2_DDI_CLK_SEL(pll->info->id, port) |
3401 3402
			DPLL_CTRL2_DDI_SEL_OVERRIDE(port));

3403
		intel_de_write(dev_priv, DPLL_CTRL2, val);
3404

3405
	} else if (INTEL_GEN(dev_priv) < 9) {
3406 3407
		intel_de_write(dev_priv, PORT_CLK_SEL(port),
			       hsw_pll_to_ddi_pll_sel(pll));
3408
	}
3409

3410
	mutex_unlock(&dev_priv->dpll.lock);
3411 3412
}

3413 3414 3415
static void intel_ddi_clk_disable(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3416
	enum port port = encoder->port;
3417
	enum phy phy = intel_port_to_phy(dev_priv, port);
3418

3419
	if (INTEL_GEN(dev_priv) >= 11) {
3420
		if (!intel_phy_is_combo(dev_priv, phy) ||
3421
		    (IS_JSL_EHL(dev_priv) && port >= PORT_C))
3422 3423
			intel_de_write(dev_priv, DDI_CLK_SEL(port),
				       DDI_CLK_SEL_NONE);
3424
	} else if (IS_CANNONLAKE(dev_priv)) {
3425 3426
		intel_de_write(dev_priv, DPCLKA_CFGCR0,
			       intel_de_read(dev_priv, DPCLKA_CFGCR0) | DPCLKA_CFGCR0_DDI_CLK_OFF(port));
3427
	} else if (IS_GEN9_BC(dev_priv)) {
3428 3429
		intel_de_write(dev_priv, DPLL_CTRL2,
			       intel_de_read(dev_priv, DPLL_CTRL2) | DPLL_CTRL2_DDI_CLK_OFF(port));
3430
	} else if (INTEL_GEN(dev_priv) < 9) {
3431 3432
		intel_de_write(dev_priv, PORT_CLK_SEL(port),
			       PORT_CLK_SEL_NONE);
3433
	}
3434 3435
}

3436
static void
3437
icl_program_mg_dp_mode(struct intel_digital_port *dig_port,
3438
		       const struct intel_crtc_state *crtc_state)
3439
{
3440 3441
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
	enum tc_port tc_port = intel_port_to_tc(dev_priv, dig_port->base.port);
3442 3443
	u32 ln0, ln1, pin_assignment;
	u8 width;
3444

3445
	if (dig_port->tc_mode == TC_PORT_TBT_ALT)
3446 3447
		return;

3448
	if (INTEL_GEN(dev_priv) >= 12) {
3449 3450 3451 3452 3453 3454
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x0));
		ln0 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port));
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x1));
		ln1 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port));
3455
	} else {
3456 3457
		ln0 = intel_de_read(dev_priv, MG_DP_MODE(0, tc_port));
		ln1 = intel_de_read(dev_priv, MG_DP_MODE(1, tc_port));
3458
	}
3459

3460
	ln0 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE);
3461
	ln1 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE);
3462

3463
	/* DPPATC */
3464
	pin_assignment = intel_tc_port_get_pin_assignment_mask(dig_port);
3465
	width = crtc_state->lane_count;
3466

3467 3468
	switch (pin_assignment) {
	case 0x0:
3469
		drm_WARN_ON(&dev_priv->drm,
3470
			    dig_port->tc_mode != TC_PORT_LEGACY);
3471 3472 3473 3474 3475 3476 3477 3478 3479 3480 3481 3482 3483 3484 3485 3486 3487 3488 3489 3490 3491 3492
		if (width == 1) {
			ln1 |= MG_DP_MODE_CFG_DP_X1_MODE;
		} else {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
		break;
	case 0x1:
		if (width == 4) {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
		break;
	case 0x2:
		if (width == 2) {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
		break;
	case 0x3:
	case 0x5:
		if (width == 1) {
3493 3494
			ln0 |= MG_DP_MODE_CFG_DP_X1_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X1_MODE;
3495 3496 3497
		} else {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
3498 3499
		}
		break;
3500 3501 3502 3503 3504 3505 3506 3507 3508
	case 0x4:
	case 0x6:
		if (width == 1) {
			ln0 |= MG_DP_MODE_CFG_DP_X1_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X1_MODE;
		} else {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
3509 3510
		break;
	default:
3511
		MISSING_CASE(pin_assignment);
3512 3513
	}

3514
	if (INTEL_GEN(dev_priv) >= 12) {
3515 3516 3517 3518 3519 3520
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x0));
		intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln0);
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x1));
		intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln1);
3521
	} else {
3522 3523
		intel_de_write(dev_priv, MG_DP_MODE(0, tc_port), ln0);
		intel_de_write(dev_priv, MG_DP_MODE(1, tc_port), ln1);
3524
	}
3525 3526
}

3527 3528 3529 3530 3531 3532 3533 3534 3535 3536 3537 3538 3539 3540 3541 3542 3543 3544 3545 3546 3547 3548 3549 3550 3551 3552 3553 3554 3555 3556 3557
static enum transcoder
tgl_dp_tp_transcoder(const struct intel_crtc_state *crtc_state)
{
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST))
		return crtc_state->mst_master_transcoder;
	else
		return crtc_state->cpu_transcoder;
}

i915_reg_t dp_tp_ctl_reg(struct intel_encoder *encoder,
			 const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

	if (INTEL_GEN(dev_priv) >= 12)
		return TGL_DP_TP_CTL(tgl_dp_tp_transcoder(crtc_state));
	else
		return DP_TP_CTL(encoder->port);
}

i915_reg_t dp_tp_status_reg(struct intel_encoder *encoder,
			    const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

	if (INTEL_GEN(dev_priv) >= 12)
		return TGL_DP_TP_STATUS(tgl_dp_tp_transcoder(crtc_state));
	else
		return DP_TP_STATUS(encoder->port);
}

3558 3559 3560
static void intel_dp_sink_set_fec_ready(struct intel_dp *intel_dp,
					const struct intel_crtc_state *crtc_state)
{
3561 3562
	struct drm_i915_private *i915 = dp_to_i915(intel_dp);

3563 3564 3565 3566
	if (!crtc_state->fec_enable)
		return;

	if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_FEC_CONFIGURATION, DP_FEC_READY) <= 0)
3567 3568
		drm_dbg_kms(&i915->drm,
			    "Failed to set FEC_READY in the sink\n");
3569 3570
}

3571 3572 3573 3574
static void intel_ddi_enable_fec(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3575
	struct intel_dp *intel_dp;
3576 3577 3578 3579 3580
	u32 val;

	if (!crtc_state->fec_enable)
		return;

3581
	intel_dp = enc_to_intel_dp(encoder);
3582
	val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
3583
	val |= DP_TP_CTL_FEC_ENABLE;
3584
	intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val);
3585 3586
}

A
Anusha Srivatsa 已提交
3587 3588 3589 3590
static void intel_ddi_disable_fec_state(struct intel_encoder *encoder,
					const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3591
	struct intel_dp *intel_dp;
A
Anusha Srivatsa 已提交
3592 3593 3594 3595 3596
	u32 val;

	if (!crtc_state->fec_enable)
		return;

3597
	intel_dp = enc_to_intel_dp(encoder);
3598
	val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
A
Anusha Srivatsa 已提交
3599
	val &= ~DP_TP_CTL_FEC_ENABLE;
3600 3601
	intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val);
	intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
A
Anusha Srivatsa 已提交
3602 3603
}

3604 3605
static void tgl_ddi_pre_enable_dp(struct intel_atomic_state *state,
				  struct intel_encoder *encoder,
3606 3607 3608
				  const struct intel_crtc_state *crtc_state,
				  const struct drm_connector_state *conn_state)
{
3609
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3610 3611
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3612
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3613 3614 3615
	bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST);
	int level = intel_ddi_dp_level(intel_dp);

3616 3617 3618
	intel_dp_set_link_params(intel_dp,
				 crtc_state->port_clock,
				 crtc_state->lane_count);
3619

3620 3621 3622 3623 3624 3625
	/*
	 * 1. Enable Power Wells
	 *
	 * This was handled at the beginning of intel_atomic_commit_tail(),
	 * before we called down into this function.
	 */
3626

3627
	/* 2. Enable Panel Power if PPS is required */
3628 3629 3630
	intel_edp_panel_on(intel_dp);

	/*
3631 3632 3633 3634
	 * 3. For non-TBT Type-C ports, set FIA lane count
	 * (DFLEXDPSP.DPX4TXLATC)
	 *
	 * This was done before tgl_ddi_pre_enable_dp by
3635
	 * hsw_crtc_enable()->intel_encoders_pre_pll_enable().
3636 3637
	 */

3638 3639 3640 3641
	/*
	 * 4. Enable the port PLL.
	 *
	 * The PLL enabling itself was already done before this function by
3642
	 * hsw_crtc_enable()->intel_enable_shared_dpll().  We need only
3643 3644
	 * configure the PLL to port mapping here.
	 */
3645 3646
	intel_ddi_clk_select(encoder, crtc_state);

3647
	/* 5. If IO power is controlled through PWR_WELL_CTL, Enable IO Power */
3648
	if (!intel_phy_is_tc(dev_priv, phy) ||
3649 3650 3651 3652 3653
	    dig_port->tc_mode != TC_PORT_TBT_ALT) {
		drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref);
		dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv,
								   dig_port->ddi_io_power_domain);
	}
3654

3655
	/* 6. Program DP_MODE */
3656
	icl_program_mg_dp_mode(dig_port, crtc_state);
3657 3658

	/*
3659 3660 3661 3662 3663 3664 3665 3666 3667 3668 3669 3670
	 * 7. The rest of the below are substeps under the bspec's "Enable and
	 * Train Display Port" step.  Note that steps that are specific to
	 * MST will be handled by intel_mst_pre_enable_dp() before/after it
	 * calls into this function.  Also intel_mst_pre_enable_dp() only calls
	 * us when active_mst_links==0, so any steps designated for "single
	 * stream or multi-stream master transcoder" can just be performed
	 * unconditionally here.
	 */

	/*
	 * 7.a Configure Transcoder Clock Select to direct the Port clock to the
	 * Transcoder.
3671
	 */
3672
	intel_ddi_enable_pipe_clock(encoder, crtc_state);
3673

3674 3675 3676 3677
	/*
	 * 7.b Configure TRANS_DDI_FUNC_CTL DDI Select, DDI Mode Select & MST
	 * Transport Select
	 */
3678
	intel_ddi_config_transcoder_func(encoder, crtc_state);
3679

3680 3681 3682 3683 3684 3685 3686 3687 3688
	/*
	 * 7.c Configure & enable DP_TP_CTL with link training pattern 1
	 * selected
	 *
	 * This will be handled by the intel_dp_start_link_train() farther
	 * down this function.
	 */

	/* 7.e Configure voltage swing and related IO settings */
3689
	tgl_ddi_vswing_sequence(encoder, crtc_state, level);
3690

3691 3692 3693 3694
	/*
	 * 7.f Combo PHY: Configure PORT_CL_DW10 Static Power Down to power up
	 * the used lanes of the DDI.
	 */
3695 3696 3697 3698 3699 3700 3701 3702 3703
	if (intel_phy_is_combo(dev_priv, phy)) {
		bool lane_reversal =
			dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL;

		intel_combo_phy_power_up_lanes(dev_priv, phy, false,
					       crtc_state->lane_count,
					       lane_reversal);
	}

3704 3705 3706 3707 3708 3709 3710 3711
	/*
	 * 7.g Configure and enable DDI_BUF_CTL
	 * 7.h Wait for DDI_BUF_CTL DDI Idle Status = 0b (Not Idle), timeout
	 *     after 500 us.
	 *
	 * We only configure what the register value will be here.  Actual
	 * enabling happens during link training farther down.
	 */
3712
	intel_ddi_init_dp_buf_reg(encoder, crtc_state);
3713 3714

	if (!is_mst)
3715
		intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
3716

3717
	intel_dp_configure_protocol_converter(intel_dp, crtc_state);
3718 3719 3720 3721 3722 3723 3724
	intel_dp_sink_set_decompression_state(intel_dp, crtc_state, true);
	/*
	 * DDI FEC: "anticipates enabling FEC encoding sets the FEC_READY bit
	 * in the FEC_CONFIGURATION register to 1 before initiating link
	 * training
	 */
	intel_dp_sink_set_fec_ready(intel_dp, crtc_state);
3725

3726
	intel_dp_check_frl_training(intel_dp);
3727
	intel_dp_pcon_dsc_configure(intel_dp, crtc_state);
3728

3729 3730 3731 3732 3733 3734 3735
	/*
	 * 7.i Follow DisplayPort specification training sequence (see notes for
	 *     failure handling)
	 * 7.j If DisplayPort multi-stream - Set DP_TP_CTL link training to Idle
	 *     Pattern, wait for 5 idle patterns (DP_TP_STATUS Min_Idles_Sent)
	 *     (timeout after 800 us)
	 */
3736
	intel_dp_start_link_train(intel_dp, crtc_state);
3737

3738
	/* 7.k Set DP_TP_CTL link training to Normal */
3739
	if (!is_trans_port_sync_mode(crtc_state))
3740
		intel_dp_stop_link_train(intel_dp, crtc_state);
3741

3742
	/* 7.l Configure and enable FEC if needed */
3743
	intel_ddi_enable_fec(encoder, crtc_state);
3744 3745
	if (!crtc_state->bigjoiner)
		intel_dsc_enable(encoder, crtc_state);
3746 3747
}

3748 3749
static void hsw_ddi_pre_enable_dp(struct intel_atomic_state *state,
				  struct intel_encoder *encoder,
3750 3751
				  const struct intel_crtc_state *crtc_state,
				  const struct drm_connector_state *conn_state)
3752
{
3753
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3754
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3755
	enum port port = encoder->port;
3756
	enum phy phy = intel_port_to_phy(dev_priv, port);
3757
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3758
	bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST);
3759
	int level = intel_ddi_dp_level(intel_dp);
3760

3761
	if (INTEL_GEN(dev_priv) < 11)
3762 3763
		drm_WARN_ON(&dev_priv->drm,
			    is_mst && (port == PORT_A || port == PORT_E));
3764
	else
3765
		drm_WARN_ON(&dev_priv->drm, is_mst && port == PORT_A);
3766

3767 3768 3769
	intel_dp_set_link_params(intel_dp,
				 crtc_state->port_clock,
				 crtc_state->lane_count);
3770 3771

	intel_edp_panel_on(intel_dp);
3772

3773
	intel_ddi_clk_select(encoder, crtc_state);
3774

3775
	if (!intel_phy_is_tc(dev_priv, phy) ||
3776 3777 3778 3779 3780
	    dig_port->tc_mode != TC_PORT_TBT_ALT) {
		drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref);
		dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv,
								   dig_port->ddi_io_power_domain);
	}
3781

3782
	icl_program_mg_dp_mode(dig_port, crtc_state);
P
Paulo Zanoni 已提交
3783

3784
	if (INTEL_GEN(dev_priv) >= 11)
3785
		icl_ddi_vswing_sequence(encoder, crtc_state, level);
3786
	else if (IS_CANNONLAKE(dev_priv))
3787
		cnl_ddi_vswing_sequence(encoder, crtc_state, level);
3788
	else if (IS_GEN9_LP(dev_priv))
3789
		bxt_ddi_vswing_sequence(encoder, crtc_state, level);
3790
	else
3791
		intel_prepare_dp_ddi_buffers(encoder, crtc_state);
3792

3793
	if (intel_phy_is_combo(dev_priv, phy)) {
3794 3795 3796
		bool lane_reversal =
			dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL;

3797
		intel_combo_phy_power_up_lanes(dev_priv, phy, false,
3798 3799 3800 3801
					       crtc_state->lane_count,
					       lane_reversal);
	}

3802
	intel_ddi_init_dp_buf_reg(encoder, crtc_state);
3803
	if (!is_mst)
3804
		intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
3805
	intel_dp_configure_protocol_converter(intel_dp, crtc_state);
3806 3807
	intel_dp_sink_set_decompression_state(intel_dp, crtc_state,
					      true);
3808
	intel_dp_sink_set_fec_ready(intel_dp, crtc_state);
3809
	intel_dp_start_link_train(intel_dp, crtc_state);
3810 3811
	if ((port != PORT_A || INTEL_GEN(dev_priv) >= 9) &&
	    !is_trans_port_sync_mode(crtc_state))
3812
		intel_dp_stop_link_train(intel_dp, crtc_state);
3813

3814 3815
	intel_ddi_enable_fec(encoder, crtc_state);

3816
	if (!is_mst)
3817
		intel_ddi_enable_pipe_clock(encoder, crtc_state);
3818

3819 3820
	if (!crtc_state->bigjoiner)
		intel_dsc_enable(encoder, crtc_state);
3821
}
3822

3823 3824
static void intel_ddi_pre_enable_dp(struct intel_atomic_state *state,
				    struct intel_encoder *encoder,
3825 3826 3827 3828 3829 3830
				    const struct intel_crtc_state *crtc_state,
				    const struct drm_connector_state *conn_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

	if (INTEL_GEN(dev_priv) >= 12)
3831
		tgl_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state);
3832
	else
3833
		hsw_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state);
3834

3835 3836 3837
	/* MST will call a setting of MSA after an allocating of Virtual Channel
	 * from MST encoder pre_enable callback.
	 */
3838
	if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) {
3839
		intel_ddi_set_dp_msa(crtc_state, conn_state);
3840

3841 3842
		intel_dp_set_m_n(crtc_state, M1_N1);
	}
3843 3844
}

3845 3846
static void intel_ddi_pre_enable_hdmi(struct intel_atomic_state *state,
				      struct intel_encoder *encoder,
3847
				      const struct intel_crtc_state *crtc_state,
3848
				      const struct drm_connector_state *conn_state)
3849
{
3850 3851
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
	struct intel_hdmi *intel_hdmi = &dig_port->hdmi;
3852
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3853
	int level = intel_ddi_hdmi_level(encoder, crtc_state);
3854

3855
	intel_dp_dual_mode_set_tmds_output(intel_hdmi, true);
3856
	intel_ddi_clk_select(encoder, crtc_state);
3857

3858 3859 3860
	drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref);
	dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv,
							   dig_port->ddi_io_power_domain);
3861

3862
	icl_program_mg_dp_mode(dig_port, crtc_state);
3863

3864
	if (INTEL_GEN(dev_priv) >= 12)
3865
		tgl_ddi_vswing_sequence(encoder, crtc_state, level);
3866
	else if (INTEL_GEN(dev_priv) == 11)
3867
		icl_ddi_vswing_sequence(encoder, crtc_state, level);
3868
	else if (IS_CANNONLAKE(dev_priv))
3869
		cnl_ddi_vswing_sequence(encoder, crtc_state, level);
3870
	else if (IS_GEN9_LP(dev_priv))
3871
		bxt_ddi_vswing_sequence(encoder, crtc_state, level);
3872
	else
3873
		intel_prepare_hdmi_ddi_buffers(encoder, level);
3874 3875

	if (IS_GEN9_BC(dev_priv))
3876
		skl_ddi_set_iboost(encoder, crtc_state, level);
3877

3878
	intel_ddi_enable_pipe_clock(encoder, crtc_state);
3879

3880 3881 3882
	dig_port->set_infoframes(encoder,
				 crtc_state->has_infoframe,
				 crtc_state, conn_state);
3883
}
3884

3885 3886
static void intel_ddi_pre_enable(struct intel_atomic_state *state,
				 struct intel_encoder *encoder,
3887
				 const struct intel_crtc_state *crtc_state,
3888
				 const struct drm_connector_state *conn_state)
3889
{
3890
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
3891 3892
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum pipe pipe = crtc->pipe;
3893

3894 3895 3896 3897 3898 3899 3900 3901 3902 3903 3904 3905 3906
	/*
	 * When called from DP MST code:
	 * - conn_state will be NULL
	 * - encoder will be the main encoder (ie. mst->primary)
	 * - the main connector associated with this port
	 *   won't be active or linked to a crtc
	 * - crtc_state will be the state of the first stream to
	 *   be activated on this port, and it may not be the same
	 *   stream that will be deactivated last, but each stream
	 *   should have a state that is identical when it comes to
	 *   the DP link parameteres
	 */

3907
	drm_WARN_ON(&dev_priv->drm, crtc_state->has_pch_encoder);
3908

3909 3910 3911
	if (IS_DG1(dev_priv))
		dg1_map_plls_to_ports(encoder, crtc_state);
	else if (INTEL_GEN(dev_priv) >= 11)
3912 3913
		icl_map_plls_to_ports(encoder, crtc_state);

3914 3915
	intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);

3916
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
3917 3918
		intel_ddi_pre_enable_hdmi(state, encoder, crtc_state,
					  conn_state);
3919
	} else {
3920
		struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3921

3922 3923
		intel_ddi_pre_enable_dp(state, encoder, crtc_state,
					conn_state);
3924

3925 3926 3927
		/* FIXME precompute everything properly */
		/* FIXME how do we turn infoframes off again? */
		if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink)
3928 3929 3930 3931
			dig_port->set_infoframes(encoder,
						 crtc_state->has_infoframe,
						 crtc_state, conn_state);
	}
3932 3933
}

A
Anusha Srivatsa 已提交
3934 3935
static void intel_disable_ddi_buf(struct intel_encoder *encoder,
				  const struct intel_crtc_state *crtc_state)
3936 3937
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3938
	enum port port = encoder->port;
3939 3940 3941
	bool wait = false;
	u32 val;

3942
	val = intel_de_read(dev_priv, DDI_BUF_CTL(port));
3943 3944
	if (val & DDI_BUF_CTL_ENABLE) {
		val &= ~DDI_BUF_CTL_ENABLE;
3945
		intel_de_write(dev_priv, DDI_BUF_CTL(port), val);
3946 3947 3948
		wait = true;
	}

3949
	if (intel_crtc_has_dp_encoder(crtc_state)) {
3950
		val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
3951 3952
		val &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK);
		val |= DP_TP_CTL_LINK_TRAIN_PAT1;
3953
		intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val);
3954
	}
3955

A
Anusha Srivatsa 已提交
3956 3957 3958
	/* Disable FEC in DP Sink */
	intel_ddi_disable_fec_state(encoder, crtc_state);

3959 3960 3961 3962
	if (wait)
		intel_wait_ddi_buf_idle(dev_priv, port);
}

3963 3964
static void intel_ddi_post_disable_dp(struct intel_atomic_state *state,
				      struct intel_encoder *encoder,
3965 3966
				      const struct intel_crtc_state *old_crtc_state,
				      const struct drm_connector_state *old_conn_state)
3967
{
3968
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3969
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3970
	struct intel_dp *intel_dp = &dig_port->dp;
3971 3972
	bool is_mst = intel_crtc_has_type(old_crtc_state,
					  INTEL_OUTPUT_DP_MST);
3973
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3974

3975 3976 3977
	if (!is_mst)
		intel_dp_set_infoframes(encoder, false,
					old_crtc_state, old_conn_state);
3978

3979 3980 3981 3982
	/*
	 * Power down sink before disabling the port, otherwise we end
	 * up getting interrupts from the sink on detecting link loss.
	 */
3983
	intel_dp_set_power(intel_dp, DP_SET_POWER_D3);
3984

3985 3986 3987 3988 3989
	if (INTEL_GEN(dev_priv) >= 12) {
		if (is_mst) {
			enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
			u32 val;

3990 3991
			val = intel_de_read(dev_priv,
					    TRANS_DDI_FUNC_CTL(cpu_transcoder));
3992 3993
			val &= ~(TGL_TRANS_DDI_PORT_MASK |
				 TRANS_DDI_MODE_SELECT_MASK);
3994 3995 3996
			intel_de_write(dev_priv,
				       TRANS_DDI_FUNC_CTL(cpu_transcoder),
				       val);
3997 3998 3999 4000 4001
		}
	} else {
		if (!is_mst)
			intel_ddi_disable_pipe_clock(old_crtc_state);
	}
4002

A
Anusha Srivatsa 已提交
4003
	intel_disable_ddi_buf(encoder, old_crtc_state);
4004

4005 4006 4007 4008 4009 4010 4011 4012
	/*
	 * From TGL spec: "If single stream or multi-stream master transcoder:
	 * Configure Transcoder Clock select to direct no clock to the
	 * transcoder"
	 */
	if (INTEL_GEN(dev_priv) >= 12)
		intel_ddi_disable_pipe_clock(old_crtc_state);

4013 4014
	intel_edp_panel_vdd_on(intel_dp);
	intel_edp_panel_off(intel_dp);
4015

4016
	if (!intel_phy_is_tc(dev_priv, phy) ||
4017
	    dig_port->tc_mode != TC_PORT_TBT_ALT)
4018 4019 4020
		intel_display_power_put(dev_priv,
					dig_port->ddi_io_power_domain,
					fetch_and_zero(&dig_port->ddi_io_wakeref));
4021

4022 4023
	intel_ddi_clk_disable(encoder);
}
4024

4025 4026
static void intel_ddi_post_disable_hdmi(struct intel_atomic_state *state,
					struct intel_encoder *encoder,
4027 4028 4029 4030
					const struct intel_crtc_state *old_crtc_state,
					const struct drm_connector_state *old_conn_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4031
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4032
	struct intel_hdmi *intel_hdmi = &dig_port->hdmi;
4033

4034
	dig_port->set_infoframes(encoder, false,
4035 4036
				 old_crtc_state, old_conn_state);

4037 4038
	intel_ddi_disable_pipe_clock(old_crtc_state);

A
Anusha Srivatsa 已提交
4039
	intel_disable_ddi_buf(encoder, old_crtc_state);
4040

4041 4042 4043
	intel_display_power_put(dev_priv,
				dig_port->ddi_io_power_domain,
				fetch_and_zero(&dig_port->ddi_io_wakeref));
4044

4045 4046 4047 4048 4049
	intel_ddi_clk_disable(encoder);

	intel_dp_dual_mode_set_tmds_output(intel_hdmi, false);
}

4050 4051
static void intel_ddi_post_disable(struct intel_atomic_state *state,
				   struct intel_encoder *encoder,
4052 4053 4054
				   const struct intel_crtc_state *old_crtc_state,
				   const struct drm_connector_state *old_conn_state)
{
4055
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4056
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4057 4058
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
	bool is_tc_port = intel_phy_is_tc(dev_priv, phy);
4059

4060 4061
	if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DP_MST)) {
		intel_crtc_vblank_off(old_crtc_state);
4062

4063
		intel_disable_pipe(old_crtc_state);
4064

4065
		intel_ddi_disable_transcoder_func(old_crtc_state);
4066

4067
		intel_dsc_disable(old_crtc_state);
4068

4069 4070 4071 4072 4073
		if (INTEL_GEN(dev_priv) >= 9)
			skl_scaler_disable(old_crtc_state);
		else
			ilk_pfit_disable(old_crtc_state);
	}
4074

4075 4076 4077 4078 4079 4080 4081 4082 4083 4084 4085 4086 4087 4088 4089
	if (old_crtc_state->bigjoiner_linked_crtc) {
		struct intel_atomic_state *state =
			to_intel_atomic_state(old_crtc_state->uapi.state);
		struct intel_crtc *slave =
			old_crtc_state->bigjoiner_linked_crtc;
		const struct intel_crtc_state *old_slave_crtc_state =
			intel_atomic_get_old_crtc_state(state, slave);

		intel_crtc_vblank_off(old_slave_crtc_state);
		trace_intel_pipe_disable(slave);

		intel_dsc_disable(old_slave_crtc_state);
		skl_scaler_disable(old_slave_crtc_state);
	}

4090
	/*
4091 4092 4093 4094 4095 4096 4097 4098 4099 4100
	 * When called from DP MST code:
	 * - old_conn_state will be NULL
	 * - encoder will be the main encoder (ie. mst->primary)
	 * - the main connector associated with this port
	 *   won't be active or linked to a crtc
	 * - old_crtc_state will be the state of the last stream to
	 *   be deactivated on this port, and it may not be the same
	 *   stream that was activated last, but each stream
	 *   should have a state that is identical when it comes to
	 *   the DP link parameteres
4101
	 */
4102 4103

	if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI))
4104 4105
		intel_ddi_post_disable_hdmi(state, encoder, old_crtc_state,
					    old_conn_state);
4106
	else
4107 4108
		intel_ddi_post_disable_dp(state, encoder, old_crtc_state,
					  old_conn_state);
4109

4110 4111 4112
	if (IS_DG1(dev_priv))
		dg1_unmap_plls_to_ports(encoder);
	else if (INTEL_GEN(dev_priv) >= 11)
4113
		icl_unmap_plls_to_ports(encoder);
4114 4115

	if (intel_crtc_has_dp_encoder(old_crtc_state) || is_tc_port)
4116 4117 4118
		intel_display_power_put(dev_priv,
					intel_ddi_main_link_aux_domain(dig_port),
					fetch_and_zero(&dig_port->aux_wakeref));
4119 4120 4121

	if (is_tc_port)
		intel_tc_port_put_link(dig_port);
4122 4123
}

4124 4125
void intel_ddi_fdi_post_disable(struct intel_atomic_state *state,
				struct intel_encoder *encoder,
4126 4127
				const struct intel_crtc_state *old_crtc_state,
				const struct drm_connector_state *old_conn_state)
4128
{
4129
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4130
	u32 val;
4131 4132 4133 4134 4135 4136 4137

	/*
	 * Bspec lists this as both step 13 (before DDI_BUF_CTL disable)
	 * and step 18 (after clearing PORT_CLK_SEL). Based on a BUN,
	 * step 13 is the correct place for it. Step 18 is where it was
	 * originally before the BUN.
	 */
4138
	val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A));
4139
	val &= ~FDI_RX_ENABLE;
4140
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val);
4141

A
Anusha Srivatsa 已提交
4142
	intel_disable_ddi_buf(encoder, old_crtc_state);
4143
	intel_ddi_clk_disable(encoder);
4144

4145
	val = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A));
4146 4147
	val &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK);
	val |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2);
4148
	intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), val);
4149

4150
	val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A));
4151
	val &= ~FDI_PCDCLK;
4152
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val);
4153

4154
	val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A));
4155
	val &= ~FDI_RX_PLL_ENABLE;
4156
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val);
4157 4158
}

4159 4160 4161 4162 4163 4164 4165 4166 4167 4168 4169 4170 4171 4172 4173 4174 4175 4176 4177 4178 4179 4180 4181 4182 4183 4184 4185
static void trans_port_sync_stop_link_train(struct intel_atomic_state *state,
					    struct intel_encoder *encoder,
					    const struct intel_crtc_state *crtc_state)
{
	const struct drm_connector_state *conn_state;
	struct drm_connector *conn;
	int i;

	if (!crtc_state->sync_mode_slaves_mask)
		return;

	for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
		struct intel_encoder *slave_encoder =
			to_intel_encoder(conn_state->best_encoder);
		struct intel_crtc *slave_crtc = to_intel_crtc(conn_state->crtc);
		const struct intel_crtc_state *slave_crtc_state;

		if (!slave_crtc)
			continue;

		slave_crtc_state =
			intel_atomic_get_new_crtc_state(state, slave_crtc);

		if (slave_crtc_state->master_transcoder !=
		    crtc_state->cpu_transcoder)
			continue;

4186 4187
		intel_dp_stop_link_train(enc_to_intel_dp(slave_encoder),
					 slave_crtc_state);
4188 4189 4190 4191
	}

	usleep_range(200, 400);

4192 4193
	intel_dp_stop_link_train(enc_to_intel_dp(encoder),
				 crtc_state);
4194 4195
}

4196 4197
static void intel_enable_ddi_dp(struct intel_atomic_state *state,
				struct intel_encoder *encoder,
4198 4199
				const struct intel_crtc_state *crtc_state,
				const struct drm_connector_state *conn_state)
4200
{
4201
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4202
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4203
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4204
	enum port port = encoder->port;
4205

4206
	if (port == PORT_A && INTEL_GEN(dev_priv) < 9)
4207
		intel_dp_stop_link_train(intel_dp, crtc_state);
4208

4209
	intel_edp_backlight_on(crtc_state, conn_state);
4210
	intel_psr_enable(intel_dp, crtc_state, conn_state);
4211 4212 4213 4214

	if (!dig_port->lspcon.active || dig_port->dp.has_hdmi_sink)
		intel_dp_set_infoframes(encoder, true, crtc_state, conn_state);

4215
	intel_edp_drrs_enable(intel_dp, crtc_state);
4216

4217 4218
	if (crtc_state->has_audio)
		intel_audio_codec_enable(encoder, crtc_state, conn_state);
4219 4220

	trans_port_sync_stop_link_train(state, encoder, crtc_state);
4221 4222
}

4223 4224 4225 4226
static i915_reg_t
gen9_chicken_trans_reg_by_port(struct drm_i915_private *dev_priv,
			       enum port port)
{
4227 4228 4229 4230 4231 4232
	static const enum transcoder trans[] = {
		[PORT_A] = TRANSCODER_EDP,
		[PORT_B] = TRANSCODER_A,
		[PORT_C] = TRANSCODER_B,
		[PORT_D] = TRANSCODER_C,
		[PORT_E] = TRANSCODER_A,
4233 4234
	};

4235
	drm_WARN_ON(&dev_priv->drm, INTEL_GEN(dev_priv) < 9);
4236

4237
	if (drm_WARN_ON(&dev_priv->drm, port < PORT_A || port > PORT_E))
4238 4239
		port = PORT_A;

4240
	return CHICKEN_TRANS(trans[port]);
4241 4242
}

4243 4244
static void intel_enable_ddi_hdmi(struct intel_atomic_state *state,
				  struct intel_encoder *encoder,
4245 4246 4247 4248
				  const struct intel_crtc_state *crtc_state,
				  const struct drm_connector_state *conn_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4249
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4250
	struct drm_connector *connector = conn_state->connector;
4251
	enum port port = encoder->port;
4252

4253 4254 4255
	if (!intel_hdmi_handle_sink_scrambling(encoder, connector,
					       crtc_state->hdmi_high_tmds_clock_ratio,
					       crtc_state->hdmi_scrambling))
4256 4257 4258
		drm_dbg_kms(&dev_priv->drm,
			    "[CONNECTOR:%d:%s] Failed to configure sink scrambling/TMDS bit clock ratio\n",
			    connector->base.id, connector->name);
4259

4260 4261 4262 4263 4264 4265 4266 4267
	/* Display WA #1143: skl,kbl,cfl */
	if (IS_GEN9_BC(dev_priv)) {
		/*
		 * For some reason these chicken bits have been
		 * stuffed into a transcoder register, event though
		 * the bits affect a specific DDI port rather than
		 * a specific transcoder.
		 */
4268
		i915_reg_t reg = gen9_chicken_trans_reg_by_port(dev_priv, port);
4269 4270
		u32 val;

4271
		val = intel_de_read(dev_priv, reg);
4272 4273 4274 4275 4276 4277 4278 4279

		if (port == PORT_E)
			val |= DDIE_TRAINING_OVERRIDE_ENABLE |
				DDIE_TRAINING_OVERRIDE_VALUE;
		else
			val |= DDI_TRAINING_OVERRIDE_ENABLE |
				DDI_TRAINING_OVERRIDE_VALUE;

4280 4281
		intel_de_write(dev_priv, reg, val);
		intel_de_posting_read(dev_priv, reg);
4282 4283 4284 4285 4286 4287 4288 4289 4290 4291

		udelay(1);

		if (port == PORT_E)
			val &= ~(DDIE_TRAINING_OVERRIDE_ENABLE |
				 DDIE_TRAINING_OVERRIDE_VALUE);
		else
			val &= ~(DDI_TRAINING_OVERRIDE_ENABLE |
				 DDI_TRAINING_OVERRIDE_VALUE);

4292
		intel_de_write(dev_priv, reg, val);
4293 4294
	}

4295 4296 4297 4298
	/* In HDMI/DVI mode, the port width, and swing/emphasis values
	 * are ignored so nothing special needs to be done besides
	 * enabling the port.
	 */
4299 4300
	intel_de_write(dev_priv, DDI_BUF_CTL(port),
		       dig_port->saved_port_bits | DDI_BUF_CTL_ENABLE);
4301

4302 4303 4304 4305
	if (crtc_state->has_audio)
		intel_audio_codec_enable(encoder, crtc_state, conn_state);
}

4306 4307
static void intel_enable_ddi(struct intel_atomic_state *state,
			     struct intel_encoder *encoder,
4308 4309 4310
			     const struct intel_crtc_state *crtc_state,
			     const struct drm_connector_state *conn_state)
{
4311
	drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder);
4312

4313 4314
	if (!crtc_state->bigjoiner_slave)
		intel_ddi_enable_transcoder_func(encoder, crtc_state);
4315

4316 4317 4318 4319
	intel_enable_pipe(crtc_state);

	intel_crtc_vblank_on(crtc_state);

4320
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
4321
		intel_enable_ddi_hdmi(state, encoder, crtc_state, conn_state);
4322
	else
4323
		intel_enable_ddi_dp(state, encoder, crtc_state, conn_state);
4324 4325 4326 4327

	/* Enable hdcp if it's desired */
	if (conn_state->content_protection ==
	    DRM_MODE_CONTENT_PROTECTION_DESIRED)
4328
		intel_hdcp_enable(to_intel_connector(conn_state->connector),
4329
				  crtc_state,
4330
				  (u8)conn_state->hdcp_content_type);
4331 4332
}

4333 4334
static void intel_disable_ddi_dp(struct intel_atomic_state *state,
				 struct intel_encoder *encoder,
4335 4336
				 const struct intel_crtc_state *old_crtc_state,
				 const struct drm_connector_state *old_conn_state)
4337
{
4338
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4339

4340 4341
	intel_dp->link_trained = false;

4342
	if (old_crtc_state->has_audio)
4343 4344
		intel_audio_codec_disable(encoder,
					  old_crtc_state, old_conn_state);
4345

4346 4347 4348
	intel_edp_drrs_disable(intel_dp, old_crtc_state);
	intel_psr_disable(intel_dp, old_crtc_state);
	intel_edp_backlight_off(old_conn_state);
4349 4350 4351
	/* Disable the decompression in DP Sink */
	intel_dp_sink_set_decompression_state(intel_dp, old_crtc_state,
					      false);
4352
}
S
Shashank Sharma 已提交
4353

4354 4355
static void intel_disable_ddi_hdmi(struct intel_atomic_state *state,
				   struct intel_encoder *encoder,
4356 4357 4358
				   const struct intel_crtc_state *old_crtc_state,
				   const struct drm_connector_state *old_conn_state)
{
4359
	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
4360 4361
	struct drm_connector *connector = old_conn_state->connector;

4362
	if (old_crtc_state->has_audio)
4363 4364
		intel_audio_codec_disable(encoder,
					  old_crtc_state, old_conn_state);
4365

4366 4367
	if (!intel_hdmi_handle_sink_scrambling(encoder, connector,
					       false, false))
4368 4369 4370
		drm_dbg_kms(&i915->drm,
			    "[CONNECTOR:%d:%s] Failed to reset sink scrambling/TMDS bit clock ratio\n",
			    connector->base.id, connector->name);
4371 4372
}

4373 4374
static void intel_disable_ddi(struct intel_atomic_state *state,
			      struct intel_encoder *encoder,
4375 4376 4377
			      const struct intel_crtc_state *old_crtc_state,
			      const struct drm_connector_state *old_conn_state)
{
4378 4379
	intel_hdcp_disable(to_intel_connector(old_conn_state->connector));

4380
	if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI))
4381 4382
		intel_disable_ddi_hdmi(state, encoder, old_crtc_state,
				       old_conn_state);
4383
	else
4384 4385
		intel_disable_ddi_dp(state, encoder, old_crtc_state,
				     old_conn_state);
4386
}
P
Paulo Zanoni 已提交
4387

4388 4389
static void intel_ddi_update_pipe_dp(struct intel_atomic_state *state,
				     struct intel_encoder *encoder,
4390 4391 4392
				     const struct intel_crtc_state *crtc_state,
				     const struct drm_connector_state *conn_state)
{
4393
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4394

4395
	intel_ddi_set_dp_msa(crtc_state, conn_state);
4396

4397
	intel_psr_update(intel_dp, crtc_state, conn_state);
4398
	intel_dp_set_infoframes(encoder, true, crtc_state, conn_state);
4399
	intel_edp_drrs_update(intel_dp, crtc_state);
4400

4401
	intel_panel_update_backlight(state, encoder, crtc_state, conn_state);
4402 4403
}

4404 4405 4406 4407
void intel_ddi_update_pipe(struct intel_atomic_state *state,
			   struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
			   const struct drm_connector_state *conn_state)
4408
{
4409

4410 4411
	if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) &&
	    !intel_encoder_is_mst(encoder))
4412 4413
		intel_ddi_update_pipe_dp(state, encoder, crtc_state,
					 conn_state);
4414

4415
	intel_hdcp_update_pipe(state, encoder, crtc_state, conn_state);
4416 4417
}

4418 4419 4420 4421 4422 4423 4424 4425 4426
static void
intel_ddi_update_prepare(struct intel_atomic_state *state,
			 struct intel_encoder *encoder,
			 struct intel_crtc *crtc)
{
	struct intel_crtc_state *crtc_state =
		crtc ? intel_atomic_get_new_crtc_state(state, crtc) : NULL;
	int required_lanes = crtc_state ? crtc_state->lane_count : 1;

4427
	drm_WARN_ON(state->base.dev, crtc && crtc->active);
4428

4429 4430
	intel_tc_port_get_link(enc_to_dig_port(encoder),
		               required_lanes);
4431
	if (crtc_state && crtc_state->hw.active)
4432 4433 4434 4435 4436 4437 4438 4439
		intel_update_active_dpll(state, crtc, encoder);
}

static void
intel_ddi_update_complete(struct intel_atomic_state *state,
			  struct intel_encoder *encoder,
			  struct intel_crtc *crtc)
{
4440
	intel_tc_port_put_link(enc_to_dig_port(encoder));
4441 4442
}

I
Imre Deak 已提交
4443
static void
4444 4445
intel_ddi_pre_pll_enable(struct intel_atomic_state *state,
			 struct intel_encoder *encoder,
I
Imre Deak 已提交
4446 4447
			 const struct intel_crtc_state *crtc_state,
			 const struct drm_connector_state *conn_state)
4448
{
I
Imre Deak 已提交
4449
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4450
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4451 4452
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
	bool is_tc_port = intel_phy_is_tc(dev_priv, phy);
I
Imre Deak 已提交
4453

4454 4455 4456
	if (is_tc_port)
		intel_tc_port_get_link(dig_port, crtc_state->lane_count);

4457 4458 4459 4460 4461 4462
	if (intel_crtc_has_dp_encoder(crtc_state) || is_tc_port) {
		drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref);
		dig_port->aux_wakeref =
			intel_display_power_get(dev_priv,
						intel_ddi_main_link_aux_domain(dig_port));
	}
I
Imre Deak 已提交
4463

4464 4465 4466 4467 4468 4469 4470
	if (is_tc_port && dig_port->tc_mode != TC_PORT_TBT_ALT)
		/*
		 * Program the lane count for static/dynamic connections on
		 * Type-C ports.  Skip this step for TBT.
		 */
		intel_tc_port_set_fia_lane_count(dig_port, crtc_state->lane_count);
	else if (IS_GEN9_LP(dev_priv))
I
Imre Deak 已提交
4471 4472 4473 4474
		bxt_ddi_phy_set_lane_optim_mask(encoder,
						crtc_state->lane_lat_optim_mask);
}

4475 4476
static void intel_ddi_prepare_link_retrain(struct intel_dp *intel_dp,
					   const struct intel_crtc_state *crtc_state)
4477
{
4478 4479 4480
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum port port = encoder->port;
4481
	u32 dp_tp_ctl, ddi_buf_ctl;
4482
	bool wait = false;
4483

4484
	dp_tp_ctl = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
4485 4486

	if (dp_tp_ctl & DP_TP_CTL_ENABLE) {
4487
		ddi_buf_ctl = intel_de_read(dev_priv, DDI_BUF_CTL(port));
4488
		if (ddi_buf_ctl & DDI_BUF_CTL_ENABLE) {
4489 4490
			intel_de_write(dev_priv, DDI_BUF_CTL(port),
				       ddi_buf_ctl & ~DDI_BUF_CTL_ENABLE);
4491 4492 4493
			wait = true;
		}

4494 4495
		dp_tp_ctl &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK);
		dp_tp_ctl |= DP_TP_CTL_LINK_TRAIN_PAT1;
4496 4497
		intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl);
		intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
4498 4499 4500 4501 4502

		if (wait)
			intel_wait_ddi_buf_idle(dev_priv, port);
	}

4503
	dp_tp_ctl = DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_PAT1;
4504
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) {
4505
		dp_tp_ctl |= DP_TP_CTL_MODE_MST;
4506
	} else {
4507
		dp_tp_ctl |= DP_TP_CTL_MODE_SST;
4508
		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
4509
			dp_tp_ctl |= DP_TP_CTL_ENHANCED_FRAME_ENABLE;
4510
	}
4511 4512
	intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl);
	intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
4513 4514

	intel_dp->DP |= DDI_BUF_CTL_ENABLE;
4515 4516
	intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP);
	intel_de_posting_read(dev_priv, DDI_BUF_CTL(port));
4517

4518
	intel_wait_ddi_buf_active(dev_priv, port);
4519
}
P
Paulo Zanoni 已提交
4520

4521
static void intel_ddi_set_link_train(struct intel_dp *intel_dp,
4522
				     const struct intel_crtc_state *crtc_state,
4523 4524
				     u8 dp_train_pat)
{
4525 4526
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4527 4528
	u32 temp;

4529
	temp = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
4530 4531

	temp &= ~DP_TP_CTL_LINK_TRAIN_MASK;
4532
	switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
4533 4534 4535 4536 4537 4538 4539 4540 4541 4542 4543 4544 4545 4546 4547 4548 4549
	case DP_TRAINING_PATTERN_DISABLE:
		temp |= DP_TP_CTL_LINK_TRAIN_NORMAL;
		break;
	case DP_TRAINING_PATTERN_1:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT1;
		break;
	case DP_TRAINING_PATTERN_2:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT2;
		break;
	case DP_TRAINING_PATTERN_3:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT3;
		break;
	case DP_TRAINING_PATTERN_4:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT4;
		break;
	}

4550
	intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), temp);
4551 4552
}

4553 4554
static void intel_ddi_set_idle_link_train(struct intel_dp *intel_dp,
					  const struct intel_crtc_state *crtc_state)
4555 4556 4557 4558 4559 4560
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum port port = encoder->port;
	u32 val;

4561
	val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
4562 4563
	val &= ~DP_TP_CTL_LINK_TRAIN_MASK;
	val |= DP_TP_CTL_LINK_TRAIN_IDLE;
4564
	intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val);
4565 4566 4567 4568 4569 4570 4571 4572 4573 4574 4575

	/*
	 * Until TGL on PORT_A we can have only eDP in SST mode. There the only
	 * reason we need to set idle transmission mode is to work around a HW
	 * issue where we enable the pipe while not in idle link-training mode.
	 * In this case there is requirement to wait for a minimum number of
	 * idle patterns to be sent.
	 */
	if (port == PORT_A && INTEL_GEN(dev_priv) < 12)
		return;

4576 4577
	if (intel_de_wait_for_set(dev_priv,
				  dp_tp_status_reg(encoder, crtc_state),
4578 4579 4580 4581 4582
				  DP_TP_STATUS_IDLE_DONE, 1))
		drm_err(&dev_priv->drm,
			"Timed out waiting for DP idle patterns\n");
}

4583 4584
static bool intel_ddi_is_audio_enabled(struct drm_i915_private *dev_priv,
				       enum transcoder cpu_transcoder)
4585
{
4586 4587
	if (cpu_transcoder == TRANSCODER_EDP)
		return false;
4588

4589 4590 4591
	if (!intel_display_power_is_enabled(dev_priv, POWER_DOMAIN_AUDIO))
		return false;

4592
	return intel_de_read(dev_priv, HSW_AUD_PIN_ELD_CP_VLD) &
4593
		AUDIO_OUTPUT_ENABLE(cpu_transcoder);
4594 4595
}

4596 4597 4598
void intel_ddi_compute_min_voltage_level(struct drm_i915_private *dev_priv,
					 struct intel_crtc_state *crtc_state)
{
4599 4600
	if (INTEL_GEN(dev_priv) >= 12 && crtc_state->port_clock > 594000)
		crtc_state->min_voltage_level = 2;
4601
	else if (IS_JSL_EHL(dev_priv) && crtc_state->port_clock > 594000)
4602 4603
		crtc_state->min_voltage_level = 3;
	else if (INTEL_GEN(dev_priv) >= 11 && crtc_state->port_clock > 594000)
4604
		crtc_state->min_voltage_level = 1;
4605 4606
	else if (IS_CANNONLAKE(dev_priv) && crtc_state->port_clock > 594000)
		crtc_state->min_voltage_level = 2;
4607 4608
}

4609 4610
static enum transcoder bdw_transcoder_master_readout(struct drm_i915_private *dev_priv,
						     enum transcoder cpu_transcoder)
4611
{
4612 4613 4614 4615
	u32 master_select;

	if (INTEL_GEN(dev_priv) >= 11) {
		u32 ctl2 = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL2(cpu_transcoder));
4616

4617 4618
		if ((ctl2 & PORT_SYNC_MODE_ENABLE) == 0)
			return INVALID_TRANSCODER;
4619

4620 4621 4622
		master_select = REG_FIELD_GET(PORT_SYNC_MODE_MASTER_SELECT_MASK, ctl2);
	} else {
		u32 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
4623

4624 4625 4626 4627 4628
		if ((ctl & TRANS_DDI_PORT_SYNC_ENABLE) == 0)
			return INVALID_TRANSCODER;

		master_select = REG_FIELD_GET(TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK, ctl);
	}
4629 4630 4631 4632 4633 4634 4635

	if (master_select == 0)
		return TRANSCODER_EDP;
	else
		return master_select - 1;
}

4636
static void bdw_get_trans_port_sync_config(struct intel_crtc_state *crtc_state)
4637 4638 4639 4640 4641 4642 4643
{
	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
	u32 transcoders = BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
		BIT(TRANSCODER_C) | BIT(TRANSCODER_D);
	enum transcoder cpu_transcoder;

	crtc_state->master_transcoder =
4644
		bdw_transcoder_master_readout(dev_priv, crtc_state->cpu_transcoder);
4645 4646 4647 4648 4649 4650 4651 4652 4653 4654 4655 4656

	for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) {
		enum intel_display_power_domain power_domain;
		intel_wakeref_t trans_wakeref;

		power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder);
		trans_wakeref = intel_display_power_get_if_enabled(dev_priv,
								   power_domain);

		if (!trans_wakeref)
			continue;

4657
		if (bdw_transcoder_master_readout(dev_priv, cpu_transcoder) ==
4658 4659 4660 4661 4662 4663 4664 4665 4666 4667 4668
		    crtc_state->cpu_transcoder)
			crtc_state->sync_mode_slaves_mask |= BIT(cpu_transcoder);

		intel_display_power_put(dev_priv, power_domain, trans_wakeref);
	}

	drm_WARN_ON(&dev_priv->drm,
		    crtc_state->master_transcoder != INVALID_TRANSCODER &&
		    crtc_state->sync_mode_slaves_mask);
}

4669 4670
static void intel_ddi_read_func_ctl(struct intel_encoder *encoder,
				    struct intel_crtc_state *pipe_config)
4671
{
4672
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4673
	struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->uapi.crtc);
4674
	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
4675
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4676 4677
	u32 temp, flags = 0;

4678
	temp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
4679 4680 4681 4682 4683 4684 4685 4686 4687
	if (temp & TRANS_DDI_PHSYNC)
		flags |= DRM_MODE_FLAG_PHSYNC;
	else
		flags |= DRM_MODE_FLAG_NHSYNC;
	if (temp & TRANS_DDI_PVSYNC)
		flags |= DRM_MODE_FLAG_PVSYNC;
	else
		flags |= DRM_MODE_FLAG_NVSYNC;

4688
	pipe_config->hw.adjusted_mode.flags |= flags;
4689 4690 4691 4692 4693 4694 4695 4696 4697 4698 4699 4700 4701 4702 4703 4704 4705

	switch (temp & TRANS_DDI_BPC_MASK) {
	case TRANS_DDI_BPC_6:
		pipe_config->pipe_bpp = 18;
		break;
	case TRANS_DDI_BPC_8:
		pipe_config->pipe_bpp = 24;
		break;
	case TRANS_DDI_BPC_10:
		pipe_config->pipe_bpp = 30;
		break;
	case TRANS_DDI_BPC_12:
		pipe_config->pipe_bpp = 36;
		break;
	default:
		break;
	}
4706 4707 4708

	switch (temp & TRANS_DDI_MODE_SELECT_MASK) {
	case TRANS_DDI_MODE_SELECT_HDMI:
4709
		pipe_config->has_hdmi_sink = true;
4710

4711 4712 4713 4714
		pipe_config->infoframes.enable |=
			intel_hdmi_infoframes_enabled(encoder, pipe_config);

		if (pipe_config->infoframes.enable)
4715
			pipe_config->has_infoframe = true;
S
Shashank Sharma 已提交
4716

4717
		if (temp & TRANS_DDI_HDMI_SCRAMBLING)
S
Shashank Sharma 已提交
4718 4719 4720
			pipe_config->hdmi_scrambling = true;
		if (temp & TRANS_DDI_HIGH_TMDS_CHAR_RATE)
			pipe_config->hdmi_high_tmds_clock_ratio = true;
4721
		fallthrough;
4722
	case TRANS_DDI_MODE_SELECT_DVI:
4723
		pipe_config->output_types |= BIT(INTEL_OUTPUT_HDMI);
4724 4725
		pipe_config->lane_count = 4;
		break;
4726
	case TRANS_DDI_MODE_SELECT_FDI:
4727
		pipe_config->output_types |= BIT(INTEL_OUTPUT_ANALOG);
4728 4729
		break;
	case TRANS_DDI_MODE_SELECT_DP_SST:
4730 4731 4732 4733 4734 4735 4736
		if (encoder->type == INTEL_OUTPUT_EDP)
			pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP);
		else
			pipe_config->output_types |= BIT(INTEL_OUTPUT_DP);
		pipe_config->lane_count =
			((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1;
		intel_dp_get_m_n(intel_crtc, pipe_config);
4737 4738

		if (INTEL_GEN(dev_priv) >= 11) {
4739
			i915_reg_t dp_tp_ctl = dp_tp_ctl_reg(encoder, pipe_config);
4740 4741

			pipe_config->fec_enable =
4742
				intel_de_read(dev_priv, dp_tp_ctl) & DP_TP_CTL_FEC_ENABLE;
4743

4744 4745 4746 4747
			drm_dbg_kms(&dev_priv->drm,
				    "[ENCODER:%d:%s] Fec status: %u\n",
				    encoder->base.base.id, encoder->base.name,
				    pipe_config->fec_enable);
4748 4749
		}

4750 4751 4752 4753 4754 4755
		if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink)
			pipe_config->infoframes.enable |=
				intel_lspcon_infoframes_enabled(encoder, pipe_config);
		else
			pipe_config->infoframes.enable |=
				intel_hdmi_infoframes_enabled(encoder, pipe_config);
4756
		break;
4757
	case TRANS_DDI_MODE_SELECT_DP_MST:
4758
		pipe_config->output_types |= BIT(INTEL_OUTPUT_DP_MST);
4759 4760
		pipe_config->lane_count =
			((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1;
4761 4762 4763 4764 4765

		if (INTEL_GEN(dev_priv) >= 12)
			pipe_config->mst_master_transcoder =
					REG_FIELD_GET(TRANS_DDI_MST_TRANSPORT_SELECT_MASK, temp);

4766
		intel_dp_get_m_n(intel_crtc, pipe_config);
4767 4768 4769

		pipe_config->infoframes.enable |=
			intel_hdmi_infoframes_enabled(encoder, pipe_config);
4770 4771 4772 4773
		break;
	default:
		break;
	}
4774 4775 4776 4777 4778 4779 4780 4781 4782 4783 4784 4785 4786 4787 4788 4789 4790 4791 4792 4793 4794 4795 4796 4797
}

void intel_ddi_get_config(struct intel_encoder *encoder,
			  struct intel_crtc_state *pipe_config)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;

	/* XXX: DSI transcoder paranoia */
	if (drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder)))
		return;

	if (pipe_config->bigjoiner_slave) {
		/* read out pipe settings from master */
		enum transcoder save = pipe_config->cpu_transcoder;

		/* Our own transcoder needs to be disabled when reading it in intel_ddi_read_func_ctl() */
		WARN_ON(pipe_config->output_types);
		pipe_config->cpu_transcoder = (enum transcoder)pipe_config->bigjoiner_linked_crtc->pipe;
		intel_ddi_read_func_ctl(encoder, pipe_config);
		pipe_config->cpu_transcoder = save;
	} else {
		intel_ddi_read_func_ctl(encoder, pipe_config);
	}
4798

4799
	pipe_config->has_audio =
4800
		intel_ddi_is_audio_enabled(dev_priv, cpu_transcoder);
4801

4802 4803
	if (encoder->type == INTEL_OUTPUT_EDP && dev_priv->vbt.edp.bpp &&
	    pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) {
4804 4805 4806 4807 4808 4809 4810 4811 4812 4813 4814 4815 4816
		/*
		 * This is a big fat ugly hack.
		 *
		 * Some machines in UEFI boot mode provide us a VBT that has 18
		 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons
		 * unknown we fail to light up. Yet the same BIOS boots up with
		 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as
		 * max, not what it tells us to use.
		 *
		 * Note: This will still be broken if the eDP panel is not lit
		 * up by the BIOS, and thus we can't get the mode at module
		 * load.
		 */
4817 4818 4819
		drm_dbg_kms(&dev_priv->drm,
			    "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n",
			    pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp);
4820
		dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp;
4821
	}
4822

4823 4824
	if (!pipe_config->bigjoiner_slave)
		intel_ddi_clock_get(encoder, pipe_config);
4825

4826
	if (IS_GEN9_LP(dev_priv))
4827 4828
		pipe_config->lane_lat_optim_mask =
			bxt_ddi_phy_get_lane_lat_optim_mask(encoder);
4829 4830

	intel_ddi_compute_min_voltage_level(dev_priv, pipe_config);
4831 4832 4833 4834 4835 4836 4837 4838 4839 4840 4841 4842

	intel_hdmi_read_gcp_infoframe(encoder, pipe_config);

	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_AVI,
			     &pipe_config->infoframes.avi);
	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_SPD,
			     &pipe_config->infoframes.spd);
	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_VENDOR,
			     &pipe_config->infoframes.hdmi);
4843 4844 4845
	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_DRM,
			     &pipe_config->infoframes.drm);
4846

4847 4848
	if (INTEL_GEN(dev_priv) >= 8)
		bdw_get_trans_port_sync_config(pipe_config);
4849 4850

	intel_read_dp_sdp(encoder, pipe_config, HDMI_PACKET_TYPE_GAMUT_METADATA);
4851
	intel_read_dp_sdp(encoder, pipe_config, DP_SDP_VSC);
4852 4853
}

4854 4855 4856 4857 4858 4859 4860
static void intel_ddi_sync_state(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
{
	if (intel_crtc_has_dp_encoder(crtc_state))
		intel_dp_sync_state(encoder, crtc_state);
}

4861 4862 4863 4864 4865 4866 4867 4868 4869
static bool intel_ddi_initial_fastset_check(struct intel_encoder *encoder,
					    struct intel_crtc_state *crtc_state)
{
	if (intel_crtc_has_dp_encoder(crtc_state))
		return intel_dp_initial_fastset_check(encoder, crtc_state);

	return true;
}

4870 4871 4872 4873 4874 4875 4876 4877 4878 4879 4880 4881 4882 4883 4884 4885 4886 4887
static enum intel_output_type
intel_ddi_compute_output_type(struct intel_encoder *encoder,
			      struct intel_crtc_state *crtc_state,
			      struct drm_connector_state *conn_state)
{
	switch (conn_state->connector->connector_type) {
	case DRM_MODE_CONNECTOR_HDMIA:
		return INTEL_OUTPUT_HDMI;
	case DRM_MODE_CONNECTOR_eDP:
		return INTEL_OUTPUT_EDP;
	case DRM_MODE_CONNECTOR_DisplayPort:
		return INTEL_OUTPUT_DP;
	default:
		MISSING_CASE(conn_state->connector->connector_type);
		return INTEL_OUTPUT_UNUSED;
	}
}

4888 4889 4890
static int intel_ddi_compute_config(struct intel_encoder *encoder,
				    struct intel_crtc_state *pipe_config,
				    struct drm_connector_state *conn_state)
P
Paulo Zanoni 已提交
4891
{
4892
	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
4893
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4894
	enum port port = encoder->port;
4895
	int ret;
P
Paulo Zanoni 已提交
4896

4897
	if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A)
4898 4899
		pipe_config->cpu_transcoder = TRANSCODER_EDP;

4900
	if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI)) {
4901
		ret = intel_hdmi_compute_config(encoder, pipe_config, conn_state);
4902
	} else {
4903
		ret = intel_dp_compute_config(encoder, pipe_config, conn_state);
4904 4905
	}

4906 4907
	if (ret)
		return ret;
4908

4909 4910 4911 4912 4913 4914
	if (IS_HASWELL(dev_priv) && crtc->pipe == PIPE_A &&
	    pipe_config->cpu_transcoder == TRANSCODER_EDP)
		pipe_config->pch_pfit.force_thru =
			pipe_config->pch_pfit.enabled ||
			pipe_config->crc_enabled;

4915
	if (IS_GEN9_LP(dev_priv))
4916
		pipe_config->lane_lat_optim_mask =
4917
			bxt_ddi_phy_calc_lane_lat_optim_mask(pipe_config->lane_count);
4918

4919 4920
	intel_ddi_compute_min_voltage_level(dev_priv, pipe_config);

4921
	return 0;
P
Paulo Zanoni 已提交
4922 4923
}

4924 4925 4926 4927 4928 4929 4930 4931 4932 4933 4934 4935 4936 4937 4938 4939 4940 4941 4942 4943 4944 4945 4946 4947 4948 4949 4950 4951 4952 4953 4954 4955 4956 4957 4958 4959 4960 4961 4962 4963 4964 4965 4966 4967 4968
static bool mode_equal(const struct drm_display_mode *mode1,
		       const struct drm_display_mode *mode2)
{
	return drm_mode_match(mode1, mode2,
			      DRM_MODE_MATCH_TIMINGS |
			      DRM_MODE_MATCH_FLAGS |
			      DRM_MODE_MATCH_3D_FLAGS) &&
		mode1->clock == mode2->clock; /* we want an exact match */
}

static bool m_n_equal(const struct intel_link_m_n *m_n_1,
		      const struct intel_link_m_n *m_n_2)
{
	return m_n_1->tu == m_n_2->tu &&
		m_n_1->gmch_m == m_n_2->gmch_m &&
		m_n_1->gmch_n == m_n_2->gmch_n &&
		m_n_1->link_m == m_n_2->link_m &&
		m_n_1->link_n == m_n_2->link_n;
}

static bool crtcs_port_sync_compatible(const struct intel_crtc_state *crtc_state1,
				       const struct intel_crtc_state *crtc_state2)
{
	return crtc_state1->hw.active && crtc_state2->hw.active &&
		crtc_state1->output_types == crtc_state2->output_types &&
		crtc_state1->output_format == crtc_state2->output_format &&
		crtc_state1->lane_count == crtc_state2->lane_count &&
		crtc_state1->port_clock == crtc_state2->port_clock &&
		mode_equal(&crtc_state1->hw.adjusted_mode,
			   &crtc_state2->hw.adjusted_mode) &&
		m_n_equal(&crtc_state1->dp_m_n, &crtc_state2->dp_m_n);
}

static u8
intel_ddi_port_sync_transcoders(const struct intel_crtc_state *ref_crtc_state,
				int tile_group_id)
{
	struct drm_connector *connector;
	const struct drm_connector_state *conn_state;
	struct drm_i915_private *dev_priv = to_i915(ref_crtc_state->uapi.crtc->dev);
	struct intel_atomic_state *state =
		to_intel_atomic_state(ref_crtc_state->uapi.state);
	u8 transcoders = 0;
	int i;

4969 4970 4971 4972 4973
	/*
	 * We don't enable port sync on BDW due to missing w/as and
	 * due to not having adjusted the modeset sequence appropriately.
	 */
	if (INTEL_GEN(dev_priv) < 9)
4974 4975 4976 4977 4978 4979 4980 4981 4982 4983 4984 4985 4986 4987 4988 4989 4990 4991 4992 4993 4994 4995 4996 4997 4998 4999 5000 5001 5002 5003 5004
		return 0;

	if (!intel_crtc_has_type(ref_crtc_state, INTEL_OUTPUT_DP))
		return 0;

	for_each_new_connector_in_state(&state->base, connector, conn_state, i) {
		struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc);
		const struct intel_crtc_state *crtc_state;

		if (!crtc)
			continue;

		if (!connector->has_tile ||
		    connector->tile_group->id !=
		    tile_group_id)
			continue;
		crtc_state = intel_atomic_get_new_crtc_state(state,
							     crtc);
		if (!crtcs_port_sync_compatible(ref_crtc_state,
						crtc_state))
			continue;
		transcoders |= BIT(crtc_state->cpu_transcoder);
	}

	return transcoders;
}

static int intel_ddi_compute_config_late(struct intel_encoder *encoder,
					 struct intel_crtc_state *crtc_state,
					 struct drm_connector_state *conn_state)
{
5005
	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
5006 5007 5008
	struct drm_connector *connector = conn_state->connector;
	u8 port_sync_transcoders = 0;

5009 5010 5011
	drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] [CRTC:%d:%s]",
		    encoder->base.base.id, encoder->base.name,
		    crtc_state->uapi.crtc->base.id, crtc_state->uapi.crtc->name);
5012 5013 5014 5015 5016 5017 5018 5019 5020 5021 5022 5023 5024 5025 5026 5027 5028 5029 5030 5031 5032 5033 5034

	if (connector->has_tile)
		port_sync_transcoders = intel_ddi_port_sync_transcoders(crtc_state,
									connector->tile_group->id);

	/*
	 * EDP Transcoders cannot be ensalved
	 * make them a master always when present
	 */
	if (port_sync_transcoders & BIT(TRANSCODER_EDP))
		crtc_state->master_transcoder = TRANSCODER_EDP;
	else
		crtc_state->master_transcoder = ffs(port_sync_transcoders) - 1;

	if (crtc_state->master_transcoder == crtc_state->cpu_transcoder) {
		crtc_state->master_transcoder = INVALID_TRANSCODER;
		crtc_state->sync_mode_slaves_mask =
			port_sync_transcoders & ~BIT(crtc_state->cpu_transcoder);
	}

	return 0;
}

5035 5036
static void intel_ddi_encoder_destroy(struct drm_encoder *encoder)
{
5037
	struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder));
5038 5039 5040 5041 5042 5043 5044

	intel_dp_encoder_flush_work(encoder);

	drm_encoder_cleanup(encoder);
	kfree(dig_port);
}

P
Paulo Zanoni 已提交
5045
static const struct drm_encoder_funcs intel_ddi_funcs = {
5046
	.reset = intel_dp_encoder_reset,
5047
	.destroy = intel_ddi_encoder_destroy,
P
Paulo Zanoni 已提交
5048 5049
};

5050
static struct intel_connector *
5051
intel_ddi_init_dp_connector(struct intel_digital_port *dig_port)
5052
{
5053
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
5054
	struct intel_connector *connector;
5055
	enum port port = dig_port->base.port;
5056

5057
	connector = intel_connector_alloc();
5058 5059 5060
	if (!connector)
		return NULL;

5061 5062 5063 5064
	dig_port->dp.output_reg = DDI_BUF_CTL(port);
	dig_port->dp.prepare_link_retrain = intel_ddi_prepare_link_retrain;
	dig_port->dp.set_link_train = intel_ddi_set_link_train;
	dig_port->dp.set_idle_link_train = intel_ddi_set_idle_link_train;
5065

5066
	if (INTEL_GEN(dev_priv) >= 12)
5067
		dig_port->dp.set_signal_levels = tgl_set_signal_levels;
5068
	else if (INTEL_GEN(dev_priv) >= 11)
5069
		dig_port->dp.set_signal_levels = icl_set_signal_levels;
5070
	else if (IS_CANNONLAKE(dev_priv))
5071
		dig_port->dp.set_signal_levels = cnl_set_signal_levels;
5072
	else if (IS_GEN9_LP(dev_priv))
5073
		dig_port->dp.set_signal_levels = bxt_set_signal_levels;
5074
	else
5075
		dig_port->dp.set_signal_levels = hsw_set_signal_levels;
5076

5077 5078
	dig_port->dp.voltage_max = intel_ddi_dp_voltage_max;
	dig_port->dp.preemph_max = intel_ddi_dp_preemph_max;
5079

5080
	if (!intel_dp_init_connector(dig_port, connector)) {
5081 5082 5083 5084 5085 5086 5087
		kfree(connector);
		return NULL;
	}

	return connector;
}

5088 5089 5090 5091 5092 5093 5094 5095 5096 5097 5098 5099 5100 5101 5102 5103 5104 5105 5106
static int modeset_pipe(struct drm_crtc *crtc,
			struct drm_modeset_acquire_ctx *ctx)
{
	struct drm_atomic_state *state;
	struct drm_crtc_state *crtc_state;
	int ret;

	state = drm_atomic_state_alloc(crtc->dev);
	if (!state)
		return -ENOMEM;

	state->acquire_ctx = ctx;

	crtc_state = drm_atomic_get_crtc_state(state, crtc);
	if (IS_ERR(crtc_state)) {
		ret = PTR_ERR(crtc_state);
		goto out;
	}

5107
	crtc_state->connectors_changed = true;
5108 5109

	ret = drm_atomic_commit(state);
5110
out:
5111 5112 5113 5114 5115 5116 5117 5118 5119
	drm_atomic_state_put(state);

	return ret;
}

static int intel_hdmi_reset_link(struct intel_encoder *encoder,
				 struct drm_modeset_acquire_ctx *ctx)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5120
	struct intel_hdmi *hdmi = enc_to_intel_hdmi(encoder);
5121 5122 5123 5124 5125 5126 5127 5128 5129 5130 5131 5132 5133 5134 5135 5136 5137 5138 5139 5140 5141 5142 5143 5144 5145 5146 5147 5148 5149
	struct intel_connector *connector = hdmi->attached_connector;
	struct i2c_adapter *adapter =
		intel_gmbus_get_adapter(dev_priv, hdmi->ddc_bus);
	struct drm_connector_state *conn_state;
	struct intel_crtc_state *crtc_state;
	struct intel_crtc *crtc;
	u8 config;
	int ret;

	if (!connector || connector->base.status != connector_status_connected)
		return 0;

	ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex,
			       ctx);
	if (ret)
		return ret;

	conn_state = connector->base.state;

	crtc = to_intel_crtc(conn_state->crtc);
	if (!crtc)
		return 0;

	ret = drm_modeset_lock(&crtc->base.mutex, ctx);
	if (ret)
		return ret;

	crtc_state = to_intel_crtc_state(crtc->base.state);

5150 5151
	drm_WARN_ON(&dev_priv->drm,
		    !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI));
5152

5153
	if (!crtc_state->hw.active)
5154 5155 5156 5157 5158 5159 5160 5161 5162 5163 5164 5165
		return 0;

	if (!crtc_state->hdmi_high_tmds_clock_ratio &&
	    !crtc_state->hdmi_scrambling)
		return 0;

	if (conn_state->commit &&
	    !try_wait_for_completion(&conn_state->commit->hw_done))
		return 0;

	ret = drm_scdc_readb(adapter, SCDC_TMDS_CONFIG, &config);
	if (ret < 0) {
5166 5167
		drm_err(&dev_priv->drm, "Failed to read TMDS config: %d\n",
			ret);
5168 5169 5170 5171 5172 5173 5174 5175 5176 5177 5178 5179 5180 5181 5182 5183 5184 5185 5186 5187 5188
		return 0;
	}

	if (!!(config & SCDC_TMDS_BIT_CLOCK_RATIO_BY_40) ==
	    crtc_state->hdmi_high_tmds_clock_ratio &&
	    !!(config & SCDC_SCRAMBLING_ENABLE) ==
	    crtc_state->hdmi_scrambling)
		return 0;

	/*
	 * HDMI 2.0 says that one should not send scrambled data
	 * prior to configuring the sink scrambling, and that
	 * TMDS clock/data transmission should be suspended when
	 * changing the TMDS clock rate in the sink. So let's
	 * just do a full modeset here, even though some sinks
	 * would be perfectly happy if were to just reconfigure
	 * the SCDC settings on the fly.
	 */
	return modeset_pipe(&crtc->base, ctx);
}

5189 5190
static enum intel_hotplug_state
intel_ddi_hotplug(struct intel_encoder *encoder,
5191
		  struct intel_connector *connector)
5192
{
5193
	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
5194
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
5195 5196
	enum phy phy = intel_port_to_phy(i915, encoder->port);
	bool is_tc = intel_phy_is_tc(i915, phy);
5197
	struct drm_modeset_acquire_ctx ctx;
5198
	enum intel_hotplug_state state;
5199 5200
	int ret;

5201
	state = intel_encoder_hotplug(encoder, connector);
5202 5203 5204 5205

	drm_modeset_acquire_init(&ctx, 0);

	for (;;) {
5206 5207 5208 5209
		if (connector->base.connector_type == DRM_MODE_CONNECTOR_HDMIA)
			ret = intel_hdmi_reset_link(encoder, &ctx);
		else
			ret = intel_dp_retrain_link(encoder, &ctx);
5210 5211 5212 5213 5214 5215 5216 5217 5218 5219 5220

		if (ret == -EDEADLK) {
			drm_modeset_backoff(&ctx);
			continue;
		}

		break;
	}

	drm_modeset_drop_locks(&ctx);
	drm_modeset_acquire_fini(&ctx);
5221 5222
	drm_WARN(encoder->base.dev, ret,
		 "Acquiring modeset locks failed with %i\n", ret);
5223

5224 5225 5226 5227 5228 5229 5230 5231 5232 5233 5234 5235 5236 5237 5238
	/*
	 * Unpowered type-c dongles can take some time to boot and be
	 * responsible, so here giving some time to those dongles to power up
	 * and then retrying the probe.
	 *
	 * On many platforms the HDMI live state signal is known to be
	 * unreliable, so we can't use it to detect if a sink is connected or
	 * not. Instead we detect if it's connected based on whether we can
	 * read the EDID or not. That in turn has a problem during disconnect,
	 * since the HPD interrupt may be raised before the DDC lines get
	 * disconnected (due to how the required length of DDC vs. HPD
	 * connector pins are specified) and so we'll still be able to get a
	 * valid EDID. To solve this schedule another detection cycle if this
	 * time around we didn't detect any change in the sink's connection
	 * status.
5239 5240 5241 5242 5243 5244
	 *
	 * Type-c connectors which get their HPD signal deasserted then
	 * reasserted, without unplugging/replugging the sink from the
	 * connector, introduce a delay until the AUX channel communication
	 * becomes functional. Retry the detection for 5 seconds on type-c
	 * connectors to account for this delay.
5245
	 */
5246 5247
	if (state == INTEL_HOTPLUG_UNCHANGED &&
	    connector->hotplug_retries < (is_tc ? 5 : 1) &&
5248 5249 5250
	    !dig_port->dp.is_mst)
		state = INTEL_HOTPLUG_RETRY;

5251
	return state;
5252 5253
}

5254 5255 5256
static bool lpt_digital_port_connected(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5257
	u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin];
5258 5259 5260 5261 5262 5263 5264

	return intel_de_read(dev_priv, SDEISR) & bit;
}

static bool hsw_digital_port_connected(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5265
	u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin];
5266

5267
	return intel_de_read(dev_priv, DEISR) & bit;
5268 5269 5270 5271 5272
}

static bool bdw_digital_port_connected(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5273
	u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin];
5274 5275 5276 5277

	return intel_de_read(dev_priv, GEN8_DE_PORT_ISR) & bit;
}

5278
static struct intel_connector *
5279
intel_ddi_init_hdmi_connector(struct intel_digital_port *dig_port)
5280 5281
{
	struct intel_connector *connector;
5282
	enum port port = dig_port->base.port;
5283

5284
	connector = intel_connector_alloc();
5285 5286 5287
	if (!connector)
		return NULL;

5288 5289
	dig_port->hdmi.hdmi_reg = DDI_BUF_CTL(port);
	intel_hdmi_init_connector(dig_port, connector);
5290 5291 5292 5293

	return connector;
}

5294
static bool intel_ddi_a_force_4_lanes(struct intel_digital_port *dig_port)
5295
{
5296
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
5297

5298
	if (dig_port->base.port != PORT_A)
5299 5300
		return false;

5301
	if (dig_port->saved_port_bits & DDI_A_4_LANES)
5302 5303 5304 5305 5306 5307 5308 5309 5310 5311 5312 5313 5314 5315 5316 5317 5318 5319 5320 5321
		return false;

	/* Broxton/Geminilake: Bspec says that DDI_A_4_LANES is the only
	 *                     supported configuration
	 */
	if (IS_GEN9_LP(dev_priv))
		return true;

	/* Cannonlake: Most of SKUs don't support DDI_E, and the only
	 *             one who does also have a full A/E split called
	 *             DDI_F what makes DDI_E useless. However for this
	 *             case let's trust VBT info.
	 */
	if (IS_CANNONLAKE(dev_priv) &&
	    !intel_bios_is_port_present(dev_priv, PORT_E))
		return true;

	return false;
}

5322
static int
5323
intel_ddi_max_lanes(struct intel_digital_port *dig_port)
5324
{
5325 5326
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
	enum port port = dig_port->base.port;
5327 5328 5329 5330 5331 5332
	int max_lanes = 4;

	if (INTEL_GEN(dev_priv) >= 11)
		return max_lanes;

	if (port == PORT_A || port == PORT_E) {
5333
		if (intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES)
5334 5335 5336 5337 5338 5339 5340 5341 5342 5343 5344
			max_lanes = port == PORT_A ? 4 : 0;
		else
			/* Both A and E share 2 lanes */
			max_lanes = 2;
	}

	/*
	 * Some BIOS might fail to set this bit on port A if eDP
	 * wasn't lit up at boot.  Force this bit set when needed
	 * so we use the proper lane count for our calculations.
	 */
5345
	if (intel_ddi_a_force_4_lanes(dig_port)) {
5346 5347
		drm_dbg_kms(&dev_priv->drm,
			    "Forcing DDI_A_4_LANES for port A\n");
5348
		dig_port->saved_port_bits |= DDI_A_4_LANES;
5349 5350 5351 5352 5353 5354
		max_lanes = 4;
	}

	return max_lanes;
}

M
Matt Roper 已提交
5355 5356 5357
static bool hti_uses_phy(struct drm_i915_private *i915, enum phy phy)
{
	return i915->hti_state & HDPORT_ENABLED &&
5358
	       i915->hti_state & HDPORT_DDI_USED(phy);
M
Matt Roper 已提交
5359 5360
}

5361 5362 5363
static enum hpd_pin dg1_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
5364 5365
	if (port >= PORT_TC1)
		return HPD_PORT_C + port - PORT_TC1;
5366 5367 5368 5369
	else
		return HPD_PORT_A + port - PORT_A;
}

5370 5371 5372
static enum hpd_pin tgl_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
5373 5374
	if (port >= PORT_TC1)
		return HPD_PORT_TC1 + port - PORT_TC1;
5375 5376 5377 5378 5379 5380 5381 5382 5383 5384
	else
		return HPD_PORT_A + port - PORT_A;
}

static enum hpd_pin rkl_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
	if (HAS_PCH_TGP(dev_priv))
		return tgl_hpd_pin(dev_priv, port);

5385 5386
	if (port >= PORT_TC1)
		return HPD_PORT_C + port - PORT_TC1;
5387 5388 5389 5390 5391 5392 5393 5394 5395 5396 5397 5398 5399 5400 5401 5402 5403 5404 5405 5406 5407 5408 5409 5410 5411 5412 5413 5414 5415 5416 5417 5418 5419 5420
	else
		return HPD_PORT_A + port - PORT_A;
}

static enum hpd_pin icl_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
	if (port >= PORT_C)
		return HPD_PORT_TC1 + port - PORT_C;
	else
		return HPD_PORT_A + port - PORT_A;
}

static enum hpd_pin ehl_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
	if (port == PORT_D)
		return HPD_PORT_A;

	if (HAS_PCH_MCC(dev_priv))
		return icl_hpd_pin(dev_priv, port);

	return HPD_PORT_A + port - PORT_A;
}

static enum hpd_pin cnl_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
	if (port == PORT_F)
		return HPD_PORT_E;

	return HPD_PORT_A + port - PORT_A;
}

5421 5422 5423
#define port_tc_name(port) ((port) - PORT_TC1 + '1')
#define tc_port_name(tc_port) ((tc_port) - TC_PORT_1 + '1')

5424
void intel_ddi_init(struct drm_i915_private *dev_priv, enum port port)
P
Paulo Zanoni 已提交
5425
{
5426
	struct intel_digital_port *dig_port;
5427
	struct intel_encoder *encoder;
5428
	bool init_hdmi, init_dp;
5429
	enum phy phy = intel_port_to_phy(dev_priv, port);
5430

M
Matt Roper 已提交
5431 5432 5433 5434 5435 5436 5437 5438 5439 5440 5441 5442
	/*
	 * On platforms with HTI (aka HDPORT), if it's enabled at boot it may
	 * have taken over some of the PHYs and made them unavailable to the
	 * driver.  In that case we should skip initializing the corresponding
	 * outputs.
	 */
	if (hti_uses_phy(dev_priv, phy)) {
		drm_dbg_kms(&dev_priv->drm, "PORT %c / PHY %c reserved by HTI\n",
			    port_name(port), phy_name(phy));
		return;
	}

5443 5444 5445
	init_hdmi = intel_bios_port_supports_dvi(dev_priv, port) ||
		intel_bios_port_supports_hdmi(dev_priv, port);
	init_dp = intel_bios_port_supports_dp(dev_priv, port);
5446 5447 5448 5449 5450 5451 5452 5453 5454

	if (intel_bios_is_lspcon_present(dev_priv, port)) {
		/*
		 * Lspcon device needs to be driven with DP connector
		 * with special detection sequence. So make sure DP
		 * is initialized before lspcon.
		 */
		init_dp = true;
		init_hdmi = false;
5455 5456
		drm_dbg_kms(&dev_priv->drm, "VBT says port %c has lspcon\n",
			    port_name(port));
5457 5458
	}

5459
	if (!init_dp && !init_hdmi) {
5460 5461 5462
		drm_dbg_kms(&dev_priv->drm,
			    "VBT says port %c is not DVI/HDMI/DP compatible, respect it\n",
			    port_name(port));
5463
		return;
5464
	}
P
Paulo Zanoni 已提交
5465

5466 5467
	dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL);
	if (!dig_port)
P
Paulo Zanoni 已提交
5468 5469
		return;

5470
	encoder = &dig_port->base;
P
Paulo Zanoni 已提交
5471

5472 5473 5474 5475 5476 5477 5478
	if (INTEL_GEN(dev_priv) >= 12) {
		enum tc_port tc_port = intel_port_to_tc(dev_priv, port);

		drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs,
				 DRM_MODE_ENCODER_TMDS,
				 "DDI %s%c/PHY %s%c",
				 port >= PORT_TC1 ? "TC" : "",
5479
				 port >= PORT_TC1 ? port_tc_name(port) : port_name(port),
5480
				 tc_port != TC_PORT_NONE ? "TC" : "",
5481
				 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy));
5482 5483 5484 5485 5486 5487 5488 5489 5490
	} else if (INTEL_GEN(dev_priv) >= 11) {
		enum tc_port tc_port = intel_port_to_tc(dev_priv, port);

		drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs,
				 DRM_MODE_ENCODER_TMDS,
				 "DDI %c%s/PHY %s%c",
				 port_name(port),
				 port >= PORT_C ? " (TC)" : "",
				 tc_port != TC_PORT_NONE ? "TC" : "",
5491
				 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy));
5492 5493 5494 5495 5496
	} else {
		drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs,
				 DRM_MODE_ENCODER_TMDS,
				 "DDI %c/PHY %c", port_name(port),  phy_name(phy));
	}
P
Paulo Zanoni 已提交
5497

5498 5499 5500
	mutex_init(&dig_port->hdcp_mutex);
	dig_port->num_hdcp_streams = 0;

5501 5502 5503
	encoder->hotplug = intel_ddi_hotplug;
	encoder->compute_output_type = intel_ddi_compute_output_type;
	encoder->compute_config = intel_ddi_compute_config;
5504
	encoder->compute_config_late = intel_ddi_compute_config_late;
5505 5506 5507 5508 5509 5510 5511 5512
	encoder->enable = intel_enable_ddi;
	encoder->pre_pll_enable = intel_ddi_pre_pll_enable;
	encoder->pre_enable = intel_ddi_pre_enable;
	encoder->disable = intel_disable_ddi;
	encoder->post_disable = intel_ddi_post_disable;
	encoder->update_pipe = intel_ddi_update_pipe;
	encoder->get_hw_state = intel_ddi_get_hw_state;
	encoder->get_config = intel_ddi_get_config;
5513
	encoder->sync_state = intel_ddi_sync_state;
5514
	encoder->initial_fastset_check = intel_ddi_initial_fastset_check;
5515
	encoder->suspend = intel_dp_encoder_suspend;
5516
	encoder->shutdown = intel_dp_encoder_shutdown;
5517 5518 5519 5520 5521 5522 5523
	encoder->get_power_domains = intel_ddi_get_power_domains;

	encoder->type = INTEL_OUTPUT_DDI;
	encoder->power_domain = intel_port_to_power_domain(port);
	encoder->port = port;
	encoder->cloneable = 0;
	encoder->pipe_mask = ~0;
5524

5525 5526 5527
	if (IS_DG1(dev_priv))
		encoder->hpd_pin = dg1_hpd_pin(dev_priv, port);
	else if (IS_ROCKETLAKE(dev_priv))
5528 5529 5530
		encoder->hpd_pin = rkl_hpd_pin(dev_priv, port);
	else if (INTEL_GEN(dev_priv) >= 12)
		encoder->hpd_pin = tgl_hpd_pin(dev_priv, port);
5531
	else if (IS_JSL_EHL(dev_priv))
5532 5533 5534 5535 5536 5537 5538
		encoder->hpd_pin = ehl_hpd_pin(dev_priv, port);
	else if (IS_GEN(dev_priv, 11))
		encoder->hpd_pin = icl_hpd_pin(dev_priv, port);
	else if (IS_GEN(dev_priv, 10))
		encoder->hpd_pin = cnl_hpd_pin(dev_priv, port);
	else
		encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port);
P
Paulo Zanoni 已提交
5539

5540
	if (INTEL_GEN(dev_priv) >= 11)
5541 5542 5543
		dig_port->saved_port_bits =
			intel_de_read(dev_priv, DDI_BUF_CTL(port))
			& DDI_BUF_PORT_REVERSAL;
5544
	else
5545 5546 5547
		dig_port->saved_port_bits =
			intel_de_read(dev_priv, DDI_BUF_CTL(port))
			& (DDI_BUF_PORT_REVERSAL | DDI_A_4_LANES);
5548

5549 5550 5551
	dig_port->dp.output_reg = INVALID_MMIO_REG;
	dig_port->max_lanes = intel_ddi_max_lanes(dig_port);
	dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port);
P
Paulo Zanoni 已提交
5552

5553
	if (intel_phy_is_tc(dev_priv, phy)) {
5554 5555 5556
		bool is_legacy =
			!intel_bios_port_supports_typec_usb(dev_priv, port) &&
			!intel_bios_port_supports_tbt(dev_priv, port);
5557

5558
		intel_tc_port_init(dig_port, is_legacy);
5559

5560 5561
		encoder->update_prepare = intel_ddi_update_prepare;
		encoder->update_complete = intel_ddi_update_complete;
5562
	}
5563

5564
	drm_WARN_ON(&dev_priv->drm, port > PORT_I);
5565
	dig_port->ddi_io_power_domain = POWER_DOMAIN_PORT_DDI_A_IO +
5566
					      port - PORT_A;
5567

5568
	if (init_dp) {
5569
		if (!intel_ddi_init_dp_connector(dig_port))
5570
			goto err;
5571

5572
		dig_port->hpd_pulse = intel_dp_hpd_pulse;
5573
	}
5574

5575 5576
	/* In theory we don't need the encoder->type check, but leave it just in
	 * case we have some really bad VBTs... */
5577
	if (encoder->type != INTEL_OUTPUT_EDP && init_hdmi) {
5578
		if (!intel_ddi_init_hdmi_connector(dig_port))
5579
			goto err;
5580
	}
5581

5582 5583
	if (INTEL_GEN(dev_priv) >= 11) {
		if (intel_phy_is_tc(dev_priv, phy))
5584
			dig_port->connected = intel_tc_port_connected;
5585
		else
5586
			dig_port->connected = lpt_digital_port_connected;
5587 5588
	} else if (INTEL_GEN(dev_priv) >= 8) {
		if (port == PORT_A || IS_GEN9_LP(dev_priv))
5589
			dig_port->connected = bdw_digital_port_connected;
5590
		else
5591
			dig_port->connected = lpt_digital_port_connected;
5592
	} else {
5593
		if (port == PORT_A)
5594
			dig_port->connected = hsw_digital_port_connected;
5595
		else
5596
			dig_port->connected = lpt_digital_port_connected;
5597 5598
	}

5599
	intel_infoframe_init(dig_port);
5600

5601 5602 5603
	return;

err:
5604
	drm_encoder_cleanup(&encoder->base);
5605
	kfree(dig_port);
P
Paulo Zanoni 已提交
5606
}