intel_ddi.c 180.7 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
/*
 * Copyright © 2012 Intel Corporation
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice (including the next
 * paragraph) shall be included in all copies or substantial portions of the
 * Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 * IN THE SOFTWARE.
 *
 * Authors:
 *    Eugeni Dodonov <eugeni.dodonov@intel.com>
 *
 */

28
#include <drm/drm_scdc_helper.h>
29

30
#include "i915_drv.h"
31
#include "i915_trace.h"
32
#include "intel_audio.h"
33
#include "intel_combo_phy.h"
34
#include "intel_connector.h"
35
#include "intel_ddi.h"
36
#include "intel_display_types.h"
37
#include "intel_dp.h"
38
#include "intel_dp_mst.h"
39
#include "intel_dp_link_training.h"
40
#include "intel_dpio_phy.h"
41
#include "intel_dsi.h"
42
#include "intel_fifo_underrun.h"
43
#include "intel_gmbus.h"
44
#include "intel_hdcp.h"
45
#include "intel_hdmi.h"
46
#include "intel_hotplug.h"
47
#include "intel_lspcon.h"
48
#include "intel_panel.h"
49
#include "intel_pps.h"
50
#include "intel_psr.h"
51
#include "intel_sprite.h"
52
#include "intel_tc.h"
53
#include "intel_vdsc.h"
54

55 56 57
struct ddi_buf_trans {
	u32 trans1;	/* balance leg enable, de-emph level */
	u32 trans2;	/* vref sel, vswing */
58
	u8 i_boost;	/* SKL: I_boost; valid: 0x0, 0x1, 0x3, 0x7 */
59 60
};

61 62 63 64 65 66 67 68 69 70 71 72 73
static const u8 index_to_dp_signal_levels[] = {
	[0] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0,
	[1] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1,
	[2] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2,
	[3] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_3,
	[4] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0,
	[5] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1,
	[6] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2,
	[7] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0,
	[8] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1,
	[9] = DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0,
};

74 75 76 77
/* HDMI/DVI modes ignore everything but the last 2 items. So we share
 * them for both DP and FDI transports, allowing those ports to
 * automatically adapt to HDMI connections as well
 */
78
static const struct ddi_buf_trans hsw_ddi_translations_dp[] = {
79 80 81 82 83 84 85 86 87
	{ 0x00FFFFFF, 0x0006000E, 0x0 },
	{ 0x00D75FFF, 0x0005000A, 0x0 },
	{ 0x00C30FFF, 0x00040006, 0x0 },
	{ 0x80AAAFFF, 0x000B0000, 0x0 },
	{ 0x00FFFFFF, 0x0005000A, 0x0 },
	{ 0x00D75FFF, 0x000C0004, 0x0 },
	{ 0x80C30FFF, 0x000B0000, 0x0 },
	{ 0x00FFFFFF, 0x00040006, 0x0 },
	{ 0x80D75FFF, 0x000B0000, 0x0 },
88 89
};

90
static const struct ddi_buf_trans hsw_ddi_translations_fdi[] = {
91 92 93 94 95 96 97 98 99
	{ 0x00FFFFFF, 0x0007000E, 0x0 },
	{ 0x00D75FFF, 0x000F000A, 0x0 },
	{ 0x00C30FFF, 0x00060006, 0x0 },
	{ 0x00AAAFFF, 0x001E0000, 0x0 },
	{ 0x00FFFFFF, 0x000F000A, 0x0 },
	{ 0x00D75FFF, 0x00160004, 0x0 },
	{ 0x00C30FFF, 0x001E0000, 0x0 },
	{ 0x00FFFFFF, 0x00060006, 0x0 },
	{ 0x00D75FFF, 0x001E0000, 0x0 },
100 101
};

102 103
static const struct ddi_buf_trans hsw_ddi_translations_hdmi[] = {
					/* Idx	NT mV d	T mV d	db	*/
104 105 106 107 108 109 110 111 112 113 114 115
	{ 0x00FFFFFF, 0x0006000E, 0x0 },/* 0:	400	400	0	*/
	{ 0x00E79FFF, 0x000E000C, 0x0 },/* 1:	400	500	2	*/
	{ 0x00D75FFF, 0x0005000A, 0x0 },/* 2:	400	600	3.5	*/
	{ 0x00FFFFFF, 0x0005000A, 0x0 },/* 3:	600	600	0	*/
	{ 0x00E79FFF, 0x001D0007, 0x0 },/* 4:	600	750	2	*/
	{ 0x00D75FFF, 0x000C0004, 0x0 },/* 5:	600	900	3.5	*/
	{ 0x00FFFFFF, 0x00040006, 0x0 },/* 6:	800	800	0	*/
	{ 0x80E79FFF, 0x00030002, 0x0 },/* 7:	800	1000	2	*/
	{ 0x00FFFFFF, 0x00140005, 0x0 },/* 8:	850	850	0	*/
	{ 0x00FFFFFF, 0x000C0004, 0x0 },/* 9:	900	900	0	*/
	{ 0x00FFFFFF, 0x001C0003, 0x0 },/* 10:	950	950	0	*/
	{ 0x80FFFFFF, 0x00030002, 0x0 },/* 11:	1000	1000	0	*/
116 117
};

118
static const struct ddi_buf_trans bdw_ddi_translations_edp[] = {
119 120 121 122 123 124 125 126 127
	{ 0x00FFFFFF, 0x00000012, 0x0 },
	{ 0x00EBAFFF, 0x00020011, 0x0 },
	{ 0x00C71FFF, 0x0006000F, 0x0 },
	{ 0x00AAAFFF, 0x000E000A, 0x0 },
	{ 0x00FFFFFF, 0x00020011, 0x0 },
	{ 0x00DB6FFF, 0x0005000F, 0x0 },
	{ 0x00BEEFFF, 0x000A000C, 0x0 },
	{ 0x00FFFFFF, 0x0005000F, 0x0 },
	{ 0x00DB6FFF, 0x000A000C, 0x0 },
128 129
};

130
static const struct ddi_buf_trans bdw_ddi_translations_dp[] = {
131 132 133 134 135 136 137 138 139
	{ 0x00FFFFFF, 0x0007000E, 0x0 },
	{ 0x00D75FFF, 0x000E000A, 0x0 },
	{ 0x00BEFFFF, 0x00140006, 0x0 },
	{ 0x80B2CFFF, 0x001B0002, 0x0 },
	{ 0x00FFFFFF, 0x000E000A, 0x0 },
	{ 0x00DB6FFF, 0x00160005, 0x0 },
	{ 0x80C71FFF, 0x001A0002, 0x0 },
	{ 0x00F7DFFF, 0x00180004, 0x0 },
	{ 0x80D75FFF, 0x001B0002, 0x0 },
140 141
};

142
static const struct ddi_buf_trans bdw_ddi_translations_fdi[] = {
143 144 145 146 147 148 149 150 151
	{ 0x00FFFFFF, 0x0001000E, 0x0 },
	{ 0x00D75FFF, 0x0004000A, 0x0 },
	{ 0x00C30FFF, 0x00070006, 0x0 },
	{ 0x00AAAFFF, 0x000C0000, 0x0 },
	{ 0x00FFFFFF, 0x0004000A, 0x0 },
	{ 0x00D75FFF, 0x00090004, 0x0 },
	{ 0x00C30FFF, 0x000C0000, 0x0 },
	{ 0x00FFFFFF, 0x00070006, 0x0 },
	{ 0x00D75FFF, 0x000C0000, 0x0 },
152 153
};

154 155
static const struct ddi_buf_trans bdw_ddi_translations_hdmi[] = {
					/* Idx	NT mV d	T mV df	db	*/
156 157 158 159 160 161 162 163 164 165
	{ 0x00FFFFFF, 0x0007000E, 0x0 },/* 0:	400	400	0	*/
	{ 0x00D75FFF, 0x000E000A, 0x0 },/* 1:	400	600	3.5	*/
	{ 0x00BEFFFF, 0x00140006, 0x0 },/* 2:	400	800	6	*/
	{ 0x00FFFFFF, 0x0009000D, 0x0 },/* 3:	450	450	0	*/
	{ 0x00FFFFFF, 0x000E000A, 0x0 },/* 4:	600	600	0	*/
	{ 0x00D7FFFF, 0x00140006, 0x0 },/* 5:	600	800	2.5	*/
	{ 0x80CB2FFF, 0x001B0002, 0x0 },/* 6:	600	1000	4.5	*/
	{ 0x00FFFFFF, 0x00140006, 0x0 },/* 7:	800	800	0	*/
	{ 0x80E79FFF, 0x001B0002, 0x0 },/* 8:	800	1000	2	*/
	{ 0x80FFFFFF, 0x001B0002, 0x0 },/* 9:	1000	1000	0	*/
166 167
};

168
/* Skylake H and S */
169
static const struct ddi_buf_trans skl_ddi_translations_dp[] = {
170 171 172
	{ 0x00002016, 0x000000A0, 0x0 },
	{ 0x00005012, 0x0000009B, 0x0 },
	{ 0x00007011, 0x00000088, 0x0 },
173
	{ 0x80009010, 0x000000C0, 0x1 },
174 175
	{ 0x00002016, 0x0000009B, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
176
	{ 0x80007011, 0x000000C0, 0x1 },
177
	{ 0x00002016, 0x000000DF, 0x0 },
178
	{ 0x80005012, 0x000000C0, 0x1 },
179 180
};

181 182
/* Skylake U */
static const struct ddi_buf_trans skl_u_ddi_translations_dp[] = {
183
	{ 0x0000201B, 0x000000A2, 0x0 },
184
	{ 0x00005012, 0x00000088, 0x0 },
185
	{ 0x80007011, 0x000000CD, 0x1 },
186
	{ 0x80009010, 0x000000C0, 0x1 },
187
	{ 0x0000201B, 0x0000009D, 0x0 },
188 189
	{ 0x80005012, 0x000000C0, 0x1 },
	{ 0x80007011, 0x000000C0, 0x1 },
190
	{ 0x00002016, 0x00000088, 0x0 },
191
	{ 0x80005012, 0x000000C0, 0x1 },
192 193
};

194 195
/* Skylake Y */
static const struct ddi_buf_trans skl_y_ddi_translations_dp[] = {
196 197
	{ 0x00000018, 0x000000A2, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
198
	{ 0x80007011, 0x000000CD, 0x3 },
199
	{ 0x80009010, 0x000000C0, 0x3 },
200
	{ 0x00000018, 0x0000009D, 0x0 },
201 202
	{ 0x80005012, 0x000000C0, 0x3 },
	{ 0x80007011, 0x000000C0, 0x3 },
203
	{ 0x00000018, 0x00000088, 0x0 },
204
	{ 0x80005012, 0x000000C0, 0x3 },
205 206
};

207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243 244 245
/* Kabylake H and S */
static const struct ddi_buf_trans kbl_ddi_translations_dp[] = {
	{ 0x00002016, 0x000000A0, 0x0 },
	{ 0x00005012, 0x0000009B, 0x0 },
	{ 0x00007011, 0x00000088, 0x0 },
	{ 0x80009010, 0x000000C0, 0x1 },
	{ 0x00002016, 0x0000009B, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
	{ 0x80007011, 0x000000C0, 0x1 },
	{ 0x00002016, 0x00000097, 0x0 },
	{ 0x80005012, 0x000000C0, 0x1 },
};

/* Kabylake U */
static const struct ddi_buf_trans kbl_u_ddi_translations_dp[] = {
	{ 0x0000201B, 0x000000A1, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
	{ 0x80007011, 0x000000CD, 0x3 },
	{ 0x80009010, 0x000000C0, 0x3 },
	{ 0x0000201B, 0x0000009D, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
	{ 0x80007011, 0x000000C0, 0x3 },
	{ 0x00002016, 0x0000004F, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
};

/* Kabylake Y */
static const struct ddi_buf_trans kbl_y_ddi_translations_dp[] = {
	{ 0x00001017, 0x000000A1, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
	{ 0x80007011, 0x000000CD, 0x3 },
	{ 0x8000800F, 0x000000C0, 0x3 },
	{ 0x00001017, 0x0000009D, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
	{ 0x80007011, 0x000000C0, 0x3 },
	{ 0x00001017, 0x0000004C, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
};

246
/*
247
 * Skylake/Kabylake H and S
248 249
 * eDP 1.4 low vswing translation parameters
 */
250
static const struct ddi_buf_trans skl_ddi_translations_edp[] = {
251 252 253 254 255 256 257 258 259 260 261 262 263
	{ 0x00000018, 0x000000A8, 0x0 },
	{ 0x00004013, 0x000000A9, 0x0 },
	{ 0x00007011, 0x000000A2, 0x0 },
	{ 0x00009010, 0x0000009C, 0x0 },
	{ 0x00000018, 0x000000A9, 0x0 },
	{ 0x00006013, 0x000000A2, 0x0 },
	{ 0x00007011, 0x000000A6, 0x0 },
	{ 0x00000018, 0x000000AB, 0x0 },
	{ 0x00007013, 0x0000009F, 0x0 },
	{ 0x00000018, 0x000000DF, 0x0 },
};

/*
264
 * Skylake/Kabylake U
265 266 267 268 269 270 271 272 273 274 275 276 277
 * eDP 1.4 low vswing translation parameters
 */
static const struct ddi_buf_trans skl_u_ddi_translations_edp[] = {
	{ 0x00000018, 0x000000A8, 0x0 },
	{ 0x00004013, 0x000000A9, 0x0 },
	{ 0x00007011, 0x000000A2, 0x0 },
	{ 0x00009010, 0x0000009C, 0x0 },
	{ 0x00000018, 0x000000A9, 0x0 },
	{ 0x00006013, 0x000000A2, 0x0 },
	{ 0x00007011, 0x000000A6, 0x0 },
	{ 0x00002016, 0x000000AB, 0x0 },
	{ 0x00005013, 0x0000009F, 0x0 },
	{ 0x00000018, 0x000000DF, 0x0 },
278 279
};

280
/*
281
 * Skylake/Kabylake Y
282 283
 * eDP 1.4 low vswing translation parameters
 */
284
static const struct ddi_buf_trans skl_y_ddi_translations_edp[] = {
285 286 287 288 289 290 291 292 293 294 295
	{ 0x00000018, 0x000000A8, 0x0 },
	{ 0x00004013, 0x000000AB, 0x0 },
	{ 0x00007011, 0x000000A4, 0x0 },
	{ 0x00009010, 0x000000DF, 0x0 },
	{ 0x00000018, 0x000000AA, 0x0 },
	{ 0x00006013, 0x000000A4, 0x0 },
	{ 0x00007011, 0x0000009D, 0x0 },
	{ 0x00000018, 0x000000A0, 0x0 },
	{ 0x00006012, 0x000000DF, 0x0 },
	{ 0x00000018, 0x0000008A, 0x0 },
};
296

297
/* Skylake/Kabylake U, H and S */
298
static const struct ddi_buf_trans skl_ddi_translations_hdmi[] = {
299 300 301 302 303 304
	{ 0x00000018, 0x000000AC, 0x0 },
	{ 0x00005012, 0x0000009D, 0x0 },
	{ 0x00007011, 0x00000088, 0x0 },
	{ 0x00000018, 0x000000A1, 0x0 },
	{ 0x00000018, 0x00000098, 0x0 },
	{ 0x00004013, 0x00000088, 0x0 },
305
	{ 0x80006012, 0x000000CD, 0x1 },
306
	{ 0x00000018, 0x000000DF, 0x0 },
307 308 309
	{ 0x80003015, 0x000000CD, 0x1 },	/* Default */
	{ 0x80003015, 0x000000C0, 0x1 },
	{ 0x80000018, 0x000000C0, 0x1 },
310 311
};

312
/* Skylake/Kabylake Y */
313
static const struct ddi_buf_trans skl_y_ddi_translations_hdmi[] = {
314 315
	{ 0x00000018, 0x000000A1, 0x0 },
	{ 0x00005012, 0x000000DF, 0x0 },
316
	{ 0x80007011, 0x000000CB, 0x3 },
317 318 319
	{ 0x00000018, 0x000000A4, 0x0 },
	{ 0x00000018, 0x0000009D, 0x0 },
	{ 0x00004013, 0x00000080, 0x0 },
320
	{ 0x80006013, 0x000000C0, 0x3 },
321
	{ 0x00000018, 0x0000008A, 0x0 },
322 323 324
	{ 0x80003015, 0x000000C0, 0x3 },	/* Default */
	{ 0x80003015, 0x000000C0, 0x3 },
	{ 0x80000018, 0x000000C0, 0x3 },
325 326
};

327
struct bxt_ddi_buf_trans {
328 329 330 331
	u8 margin;	/* swing value */
	u8 scale;	/* scale value */
	u8 enable;	/* scale enable */
	u8 deemphasis;
332 333 334 335
};

static const struct bxt_ddi_buf_trans bxt_ddi_translations_dp[] = {
					/* Idx	NT mV diff	db  */
336 337 338 339 340 341 342 343 344 345
	{ 52,  0x9A, 0, 128, },	/* 0:	400		0   */
	{ 78,  0x9A, 0, 85,  },	/* 1:	400		3.5 */
	{ 104, 0x9A, 0, 64,  },	/* 2:	400		6   */
	{ 154, 0x9A, 0, 43,  },	/* 3:	400		9.5 */
	{ 77,  0x9A, 0, 128, },	/* 4:	600		0   */
	{ 116, 0x9A, 0, 85,  },	/* 5:	600		3.5 */
	{ 154, 0x9A, 0, 64,  },	/* 6:	600		6   */
	{ 102, 0x9A, 0, 128, },	/* 7:	800		0   */
	{ 154, 0x9A, 0, 85,  },	/* 8:	800		3.5 */
	{ 154, 0x9A, 1, 128, },	/* 9:	1200		0   */
346 347
};

348 349
static const struct bxt_ddi_buf_trans bxt_ddi_translations_edp[] = {
					/* Idx	NT mV diff	db  */
350 351 352 353 354 355 356 357 358 359
	{ 26, 0, 0, 128, },	/* 0:	200		0   */
	{ 38, 0, 0, 112, },	/* 1:	200		1.5 */
	{ 48, 0, 0, 96,  },	/* 2:	200		4   */
	{ 54, 0, 0, 69,  },	/* 3:	200		6   */
	{ 32, 0, 0, 128, },	/* 4:	250		0   */
	{ 48, 0, 0, 104, },	/* 5:	250		1.5 */
	{ 54, 0, 0, 85,  },	/* 6:	250		4   */
	{ 43, 0, 0, 128, },	/* 7:	300		0   */
	{ 54, 0, 0, 101, },	/* 8:	300		1.5 */
	{ 48, 0, 0, 128, },	/* 9:	300		0   */
360 361
};

362 363 364 365 366
/* BSpec has 2 recommended values - entries 0 and 8.
 * Using the entry with higher vswing.
 */
static const struct bxt_ddi_buf_trans bxt_ddi_translations_hdmi[] = {
					/* Idx	NT mV diff	db  */
367 368 369 370 371 372 373 374 375 376
	{ 52,  0x9A, 0, 128, },	/* 0:	400		0   */
	{ 52,  0x9A, 0, 85,  },	/* 1:	400		3.5 */
	{ 52,  0x9A, 0, 64,  },	/* 2:	400		6   */
	{ 42,  0x9A, 0, 43,  },	/* 3:	400		9.5 */
	{ 77,  0x9A, 0, 128, },	/* 4:	600		0   */
	{ 77,  0x9A, 0, 85,  },	/* 5:	600		3.5 */
	{ 77,  0x9A, 0, 64,  },	/* 6:	600		6   */
	{ 102, 0x9A, 0, 128, },	/* 7:	800		0   */
	{ 102, 0x9A, 0, 85,  },	/* 8:	800		3.5 */
	{ 154, 0x9A, 1, 128, },	/* 9:	1200		0   */
377 378
};

379
struct cnl_ddi_buf_trans {
380 381 382 383 384
	u8 dw2_swing_sel;
	u8 dw7_n_scalar;
	u8 dw4_cursor_coeff;
	u8 dw4_post_cursor_2;
	u8 dw4_post_cursor_1;
385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516 517 518
};

/* Voltage Swing Programming for VccIO 0.85V for DP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_85V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5D, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x6A, 0x38, 0x00, 0x07 },	/* 350   500      3.1   */
	{ 0xB, 0x7A, 0x32, 0x00, 0x0D },	/* 350   700      6.0   */
	{ 0x6, 0x7C, 0x2D, 0x00, 0x12 },	/* 350   900      8.2   */
	{ 0xA, 0x69, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xB, 0x7A, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7C, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xB, 0x7D, 0x3C, 0x00, 0x03 },	/* 650   725      0.9   */
	{ 0x6, 0x7C, 0x34, 0x00, 0x0B },	/* 600   900      3.5   */
	{ 0x6, 0x7B, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

/* Voltage Swing Programming for VccIO 0.85V for HDMI */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_85V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x60, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x73, 0x36, 0x00, 0x09 },	/* 450   650      3.2   */
	{ 0x6, 0x7F, 0x31, 0x00, 0x0E },	/* 450   850      5.5   */
	{ 0xB, 0x73, 0x3F, 0x00, 0x00 },	/* 650   650      0.0   */
	{ 0x6, 0x7F, 0x37, 0x00, 0x08 },	/* 650   850      2.3   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 850   850      0.0   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
};

/* Voltage Swing Programming for VccIO 0.85V for eDP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_85V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x66, 0x3A, 0x00, 0x05 },	/* 384   500      2.3   */
	{ 0x0, 0x7F, 0x38, 0x00, 0x07 },	/* 153   200      2.3   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 192   250      2.3   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 230   300      2.3   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 269   350      2.3   */
	{ 0xA, 0x66, 0x3C, 0x00, 0x03 },	/* 446   500      1.0   */
	{ 0xB, 0x70, 0x3C, 0x00, 0x03 },	/* 460   600      2.3   */
	{ 0xC, 0x75, 0x3C, 0x00, 0x03 },	/* 537   700      2.3   */
	{ 0x2, 0x7F, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
};

/* Voltage Swing Programming for VccIO 0.95V for DP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_95V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5D, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x6A, 0x38, 0x00, 0x07 },	/* 350   500      3.1   */
	{ 0xB, 0x7A, 0x32, 0x00, 0x0D },	/* 350   700      6.0   */
	{ 0x6, 0x7C, 0x2D, 0x00, 0x12 },	/* 350   900      8.2   */
	{ 0xA, 0x69, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xB, 0x7A, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7C, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xB, 0x7D, 0x3C, 0x00, 0x03 },	/* 650   725      0.9   */
	{ 0x6, 0x7C, 0x34, 0x00, 0x0B },	/* 600   900      3.5   */
	{ 0x6, 0x7B, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

/* Voltage Swing Programming for VccIO 0.95V for HDMI */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_95V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5C, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
	{ 0xB, 0x69, 0x37, 0x00, 0x08 },	/* 400   600      3.5   */
	{ 0x5, 0x76, 0x31, 0x00, 0x0E },	/* 400   800      6.0   */
	{ 0xA, 0x5E, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x69, 0x3F, 0x00, 0x00 },	/* 600   600      0.0   */
	{ 0xB, 0x79, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
	{ 0x6, 0x7D, 0x32, 0x00, 0x0D },	/* 600   1000     4.4   */
	{ 0x5, 0x76, 0x3F, 0x00, 0x00 },	/* 800   800      0.0   */
	{ 0x6, 0x7D, 0x39, 0x00, 0x06 },	/* 800   1000     1.9   */
	{ 0x6, 0x7F, 0x39, 0x00, 0x06 },	/* 850   1050     1.8   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1050  1050     0.0   */
};

/* Voltage Swing Programming for VccIO 0.95V for eDP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_95V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x61, 0x3A, 0x00, 0x05 },	/* 384   500      2.3   */
	{ 0x0, 0x7F, 0x38, 0x00, 0x07 },	/* 153   200      2.3   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 192   250      2.3   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 230   300      2.3   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 269   350      2.3   */
	{ 0xA, 0x61, 0x3C, 0x00, 0x03 },	/* 446   500      1.0   */
	{ 0xB, 0x68, 0x39, 0x00, 0x06 },	/* 460   600      2.3   */
	{ 0xC, 0x6E, 0x39, 0x00, 0x06 },	/* 537   700      2.3   */
	{ 0x4, 0x7F, 0x3A, 0x00, 0x05 },	/* 460   600      2.3   */
	{ 0x2, 0x7F, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
};

/* Voltage Swing Programming for VccIO 1.05V for DP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_1_05V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x58, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
	{ 0xB, 0x64, 0x37, 0x00, 0x08 },	/* 400   600      3.5   */
	{ 0x5, 0x70, 0x31, 0x00, 0x0E },	/* 400   800      6.0   */
	{ 0x6, 0x7F, 0x2C, 0x00, 0x13 },	/* 400   1050     8.4   */
	{ 0xB, 0x64, 0x3F, 0x00, 0x00 },	/* 600   600      0.0   */
	{ 0x5, 0x73, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
	{ 0x6, 0x7F, 0x30, 0x00, 0x0F },	/* 550   1050     5.6   */
	{ 0x5, 0x76, 0x3E, 0x00, 0x01 },	/* 850   900      0.5   */
	{ 0x6, 0x7F, 0x36, 0x00, 0x09 },	/* 750   1050     2.9   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1050  1050     0.0   */
};

/* Voltage Swing Programming for VccIO 1.05V for HDMI */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_1_05V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x58, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
	{ 0xB, 0x64, 0x37, 0x00, 0x08 },	/* 400   600      3.5   */
	{ 0x5, 0x70, 0x31, 0x00, 0x0E },	/* 400   800      6.0   */
	{ 0xA, 0x5B, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x64, 0x3F, 0x00, 0x00 },	/* 600   600      0.0   */
	{ 0x5, 0x73, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
	{ 0x6, 0x7C, 0x32, 0x00, 0x0D },	/* 600   1000     4.4   */
	{ 0x5, 0x70, 0x3F, 0x00, 0x00 },	/* 800   800      0.0   */
	{ 0x6, 0x7C, 0x39, 0x00, 0x06 },	/* 800   1000     1.9   */
	{ 0x6, 0x7F, 0x39, 0x00, 0x06 },	/* 850   1050     1.8   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1050  1050     0.0   */
};

/* Voltage Swing Programming for VccIO 1.05V for eDP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_1_05V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5E, 0x3A, 0x00, 0x05 },	/* 384   500      2.3   */
	{ 0x0, 0x7F, 0x38, 0x00, 0x07 },	/* 153   200      2.3   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 192   250      2.3   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 230   300      2.3   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 269   350      2.3   */
	{ 0xA, 0x5E, 0x3C, 0x00, 0x03 },	/* 446   500      1.0   */
	{ 0xB, 0x64, 0x39, 0x00, 0x06 },	/* 460   600      2.3   */
	{ 0xE, 0x6A, 0x39, 0x00, 0x06 },	/* 537   700      2.3   */
	{ 0x2, 0x7F, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
};

519 520 521 522 523 524 525 526 527 528 529 530 531
/* icl_combo_phy_ddi_translations */
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_dp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x71, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6C, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
532 533
};

534 535 536 537 538 539 540 541 542 543 544 545
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0x0, 0x7F, 0x3F, 0x00, 0x00 },	/* 200   200      0.0   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 200   250      1.9   */
	{ 0x1, 0x7F, 0x33, 0x00, 0x0C },	/* 200   300      3.5   */
	{ 0x9, 0x7F, 0x31, 0x00, 0x0E },	/* 200   350      4.9   */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },	/* 250   250      0.0   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 250   300      1.6   */
	{ 0x9, 0x7F, 0x35, 0x00, 0x0A },	/* 250   350      2.9   */
	{ 0x1, 0x7F, 0x3F, 0x00, 0x00 },	/* 300   300      0.0   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 300   350      1.3   */
	{ 0x9, 0x7F, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
546 547
};

548 549 550 551 552 553 554 555 556 557 558 559
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr3[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x71, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6C, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
560 561
};

562 563 564 565 566 567 568 569 570
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_hdmi[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x60, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x73, 0x36, 0x00, 0x09 },	/* 450   650      3.2   */
	{ 0x6, 0x7F, 0x31, 0x00, 0x0E },	/* 450   850      5.5   */
	{ 0xB, 0x73, 0x3F, 0x00, 0x00 },	/* 650   650      0.0   ALS */
	{ 0x6, 0x7F, 0x37, 0x00, 0x08 },	/* 650   850      2.3   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 850   850      0.0   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
571 572
};

573
static const struct cnl_ddi_buf_trans ehl_combo_phy_ddi_translations_dp[] = {
574 575 576
						/* NT mV Trans mV db    */
	{ 0xA, 0x33, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x47, 0x36, 0x00, 0x09 },	/* 350   500      3.1   */
577 578
	{ 0xC, 0x64, 0x34, 0x00, 0x0B },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x30, 0x00, 0x0F },	/* 350   900      8.2   */
579
	{ 0xA, 0x46, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
580 581
	{ 0xC, 0x64, 0x38, 0x00, 0x07 },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x32, 0x00, 0x0D },	/* 500   900      5.1   */
582
	{ 0xC, 0x61, 0x3F, 0x00, 0x00 },	/* 650   700      0.6   */
583
	{ 0x6, 0x7F, 0x38, 0x00, 0x07 },	/* 600   900      3.5   */
584 585 586
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

587 588 589 590 591 592 593 594 595 596 597 598 599 600 601 602 603 604 605 606 607 608 609 610 611 612 613 614
static const struct cnl_ddi_buf_trans jsl_combo_phy_ddi_translations_edp_hbr[] = {
						/* NT mV Trans mV db    */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },        /* 200   200      0.0   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },        /* 200   250      1.9   */
	{ 0x1, 0x7F, 0x33, 0x00, 0x0C },        /* 200   300      3.5   */
	{ 0xA, 0x35, 0x36, 0x00, 0x09 },        /* 200   350      4.9   */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },        /* 250   250      0.0   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },        /* 250   300      1.6   */
	{ 0xA, 0x35, 0x35, 0x00, 0x0A },        /* 250   350      2.9   */
	{ 0x1, 0x7F, 0x3F, 0x00, 0x00 },        /* 300   300      0.0   */
	{ 0xA, 0x35, 0x38, 0x00, 0x07 },        /* 300   350      1.3   */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },        /* 350   350      0.0   */
};

static const struct cnl_ddi_buf_trans jsl_combo_phy_ddi_translations_edp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },        /* 200   200      0.0   */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },        /* 200   250      1.9   */
	{ 0x1, 0x7F, 0x3D, 0x00, 0x02 },        /* 200   300      3.5   */
	{ 0xA, 0x35, 0x38, 0x00, 0x07 },        /* 200   350      4.9   */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },        /* 250   250      0.0   */
	{ 0x1, 0x7F, 0x3F, 0x00, 0x00 },        /* 250   300      1.6   */
	{ 0xA, 0x35, 0x3A, 0x00, 0x05 },        /* 250   350      2.9   */
	{ 0x1, 0x7F, 0x3F, 0x00, 0x00 },        /* 300   300      0.0   */
	{ 0xA, 0x35, 0x38, 0x00, 0x07 },        /* 300   350      1.3   */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },        /* 350   350      0.0   */
};

615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630 631 632 633 634 635 636 637 638 639 640 641 642
static const struct cnl_ddi_buf_trans dg1_combo_phy_ddi_translations_dp_rbr_hbr[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x32, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x48, 0x35, 0x00, 0x0A },	/* 350   500      3.1   */
	{ 0xC, 0x63, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2C, 0x00, 0x13 },	/* 350   900      8.2   */
	{ 0xA, 0x43, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x60, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xC, 0x60, 0x3F, 0x00, 0x00 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x37, 0x00, 0x08 },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

static const struct cnl_ddi_buf_trans dg1_combo_phy_ddi_translations_dp_hbr2_hbr3[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x32, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x48, 0x35, 0x00, 0x0A },	/* 350   500      3.1   */
	{ 0xC, 0x63, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2C, 0x00, 0x13 },	/* 350   900      8.2   */
	{ 0xA, 0x43, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x60, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xC, 0x58, 0x3F, 0x00, 0x00 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

643 644
struct icl_mg_phy_ddi_buf_trans {
	u32 cri_txdeemph_override_11_6;
645
	u32 cri_txdeemph_override_5_0;
646 647 648
	u32 cri_txdeemph_override_17_12;
};

649 650 651 652 653 654 655 656 657 658 659 660 661 662 663
static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_rbr_hbr[] = {
				/* Voltage swing  pre-emphasis */
	{ 0x18, 0x00, 0x00 },	/* 0              0   */
	{ 0x1D, 0x00, 0x05 },	/* 0              1   */
	{ 0x24, 0x00, 0x0C },	/* 0              2   */
	{ 0x2B, 0x00, 0x14 },	/* 0              3   */
	{ 0x21, 0x00, 0x00 },	/* 1              0   */
	{ 0x2B, 0x00, 0x08 },	/* 1              1   */
	{ 0x30, 0x00, 0x0F },	/* 1              2   */
	{ 0x31, 0x00, 0x03 },	/* 2              0   */
	{ 0x34, 0x00, 0x0B },	/* 2              1   */
	{ 0x3F, 0x00, 0x00 },	/* 3              0   */
};

static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hbr2_hbr3[] = {
664
				/* Voltage swing  pre-emphasis */
665 666 667 668 669 670 671 672 673 674 675 676 677 678 679 680 681 682 683 684 685 686 687 688
	{ 0x18, 0x00, 0x00 },	/* 0              0   */
	{ 0x1D, 0x00, 0x05 },	/* 0              1   */
	{ 0x24, 0x00, 0x0C },	/* 0              2   */
	{ 0x2B, 0x00, 0x14 },	/* 0              3   */
	{ 0x26, 0x00, 0x00 },	/* 1              0   */
	{ 0x2C, 0x00, 0x07 },	/* 1              1   */
	{ 0x33, 0x00, 0x0C },	/* 1              2   */
	{ 0x2E, 0x00, 0x00 },	/* 2              0   */
	{ 0x36, 0x00, 0x09 },	/* 2              1   */
	{ 0x3F, 0x00, 0x00 },	/* 3              0   */
};

static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hdmi[] = {
				/* HDMI Preset	VS	Pre-emph */
	{ 0x1A, 0x0, 0x0 },	/* 1		400mV	0dB */
	{ 0x20, 0x0, 0x0 },	/* 2		500mV	0dB */
	{ 0x29, 0x0, 0x0 },	/* 3		650mV	0dB */
	{ 0x32, 0x0, 0x0 },	/* 4		800mV	0dB */
	{ 0x3F, 0x0, 0x0 },	/* 5		1000mV	0dB */
	{ 0x3A, 0x0, 0x5 },	/* 6		Full	-1.5 dB */
	{ 0x39, 0x0, 0x6 },	/* 7		Full	-1.8 dB */
	{ 0x38, 0x0, 0x7 },	/* 8		Full	-2 dB */
	{ 0x37, 0x0, 0x8 },	/* 9		Full	-2.5 dB */
	{ 0x36, 0x0, 0x9 },	/* 10		Full	-3 dB */
689 690
};

691 692 693 694 695 696
struct tgl_dkl_phy_ddi_buf_trans {
	u32 dkl_vswing_control;
	u32 dkl_preshoot_control;
	u32 dkl_de_emphasis_control;
};

697
static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans[] = {
698 699
				/* VS	pre-emp	Non-trans mV	Pre-emph dB */
	{ 0x7, 0x0, 0x00 },	/* 0	0	400mV		0 dB */
700 701
	{ 0x5, 0x0, 0x05 },	/* 0	1	400mV		3.5 dB */
	{ 0x2, 0x0, 0x0B },	/* 0	2	400mV		6 dB */
702 703 704 705 706 707 708 709 710 711 712 713 714 715
	{ 0x0, 0x0, 0x18 },	/* 0	3	400mV		9.5 dB */
	{ 0x5, 0x0, 0x00 },	/* 1	0	600mV		0 dB */
	{ 0x2, 0x0, 0x08 },	/* 1	1	600mV		3.5 dB */
	{ 0x0, 0x0, 0x14 },	/* 1	2	600mV		6 dB */
	{ 0x2, 0x0, 0x00 },	/* 2	0	800mV		0 dB */
	{ 0x0, 0x0, 0x0B },	/* 2	1	800mV		3.5 dB */
	{ 0x0, 0x0, 0x00 },	/* 3	0	1200mV		0 dB HDMI default */
};

static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans_hbr2[] = {
				/* VS	pre-emp	Non-trans mV	Pre-emph dB */
	{ 0x7, 0x0, 0x00 },	/* 0	0	400mV		0 dB */
	{ 0x5, 0x0, 0x05 },	/* 0	1	400mV		3.5 dB */
	{ 0x2, 0x0, 0x0B },	/* 0	2	400mV		6 dB */
716 717
	{ 0x0, 0x0, 0x19 },	/* 0	3	400mV		9.5 dB */
	{ 0x5, 0x0, 0x00 },	/* 1	0	600mV		0 dB */
718
	{ 0x2, 0x0, 0x08 },	/* 1	1	600mV		3.5 dB */
719 720 721 722 723 724
	{ 0x0, 0x0, 0x14 },	/* 1	2	600mV		6 dB */
	{ 0x2, 0x0, 0x00 },	/* 2	0	800mV		0 dB */
	{ 0x0, 0x0, 0x0B },	/* 2	1	800mV		3.5 dB */
	{ 0x0, 0x0, 0x00 },	/* 3	0	1200mV		0 dB HDMI default */
};

725 726 727 728 729 730 731 732 733 734 735 736 737 738
static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_hdmi_ddi_trans[] = {
				/* HDMI Preset	VS	Pre-emph */
	{ 0x7, 0x0, 0x0 },	/* 1		400mV	0dB */
	{ 0x6, 0x0, 0x0 },	/* 2		500mV	0dB */
	{ 0x4, 0x0, 0x0 },	/* 3		650mV	0dB */
	{ 0x2, 0x0, 0x0 },	/* 4		800mV	0dB */
	{ 0x0, 0x0, 0x0 },	/* 5		1000mV	0dB */
	{ 0x0, 0x0, 0x5 },	/* 6		Full	-1.5 dB */
	{ 0x0, 0x0, 0x6 },	/* 7		Full	-1.8 dB */
	{ 0x0, 0x0, 0x7 },	/* 8		Full	-2 dB */
	{ 0x0, 0x0, 0x8 },	/* 9		Full	-2.5 dB */
	{ 0x0, 0x0, 0xA },	/* 10		Full	-3 dB */
};

739 740 741 742 743 744 745 746 747 748 749 750 751 752 753 754 755 756 757 758 759 760 761 762 763 764 765 766
static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x32, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x71, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7D, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6C, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x63, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x47, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x63, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x61, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7B, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

767 768 769 770 771 772 773 774 775 776 777 778 779 780
static const struct cnl_ddi_buf_trans tgl_uy_combo_phy_ddi_translations_dp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x36, 0x00, 0x09 },	/* 350   500      3.1   */
	{ 0xC, 0x60, 0x32, 0x00, 0x0D },	/* 350   700      6.0   */
	{ 0xC, 0x7F, 0x2D, 0x00, 0x12 },	/* 350   900      8.2   */
	{ 0xC, 0x47, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x6F, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7D, 0x32, 0x00, 0x0D },	/* 500   900      5.1   */
	{ 0x6, 0x60, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x34, 0x00, 0x0B },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

781 782 783 784 785 786 787 788 789 790 791 792 793 794 795 796 797
/*
 * Cloned the HOBL entry to comply with the voltage and pre-emphasis entries
 * that DisplayPort specification requires
 */
static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_edp_hbr2_hobl[] = {
						/* VS	pre-emp	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	0	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	1	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	2	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	3	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1	0	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1	1	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1	2	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 2	0	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 2	1	*/
};

798 799 800 801 802 803 804 805 806 807 808 809 810 811 812 813 814 815 816 817 818 819 820 821 822 823 824 825
static const struct cnl_ddi_buf_trans rkl_combo_phy_ddi_translations_dp_hbr[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x2F, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x63, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7D, 0x2A, 0x00, 0x15 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6E, 0x3E, 0x00, 0x01 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

static const struct cnl_ddi_buf_trans rkl_combo_phy_ddi_translations_dp_hbr2_hbr3[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x50, 0x38, 0x00, 0x07 },	/* 350   500      3.1   */
	{ 0xC, 0x61, 0x33, 0x00, 0x0C },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2E, 0x00, 0x11 },	/* 350   900      8.2   */
	{ 0xA, 0x47, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x5F, 0x38, 0x00, 0x07 },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x5F, 0x3F, 0x00, 0x00 },	/* 650   700      0.6   */
	{ 0x6, 0x7E, 0x36, 0x00, 0x09 },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

826 827 828 829 830
static bool is_hobl_buf_trans(const struct cnl_ddi_buf_trans *table)
{
	return table == tgl_combo_phy_ddi_translations_edp_hbr2_hobl;
}

831
static const struct ddi_buf_trans *
832
bdw_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
833
{
834 835
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

836 837 838 839 840 841 842 843 844
	if (dev_priv->vbt.edp.low_vswing) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_edp);
		return bdw_ddi_translations_edp;
	} else {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_dp);
		return bdw_ddi_translations_dp;
	}
}

845
static const struct ddi_buf_trans *
846
skl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
847
{
848 849
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

850
	if (IS_SKL_ULX(dev_priv)) {
851
		*n_entries = ARRAY_SIZE(skl_y_ddi_translations_dp);
852
		return skl_y_ddi_translations_dp;
853
	} else if (IS_SKL_ULT(dev_priv)) {
854
		*n_entries = ARRAY_SIZE(skl_u_ddi_translations_dp);
855
		return skl_u_ddi_translations_dp;
856 857
	} else {
		*n_entries = ARRAY_SIZE(skl_ddi_translations_dp);
858
		return skl_ddi_translations_dp;
859 860 861
	}
}

862
static const struct ddi_buf_trans *
863
kbl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
864
{
865 866
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

867 868 869
	if (IS_KBL_ULX(dev_priv) ||
	    IS_CFL_ULX(dev_priv) ||
	    IS_CML_ULX(dev_priv)) {
870 871
		*n_entries = ARRAY_SIZE(kbl_y_ddi_translations_dp);
		return kbl_y_ddi_translations_dp;
872 873 874
	} else if (IS_KBL_ULT(dev_priv) ||
		   IS_CFL_ULT(dev_priv) ||
		   IS_CML_ULT(dev_priv)) {
875 876 877 878 879 880 881 882
		*n_entries = ARRAY_SIZE(kbl_u_ddi_translations_dp);
		return kbl_u_ddi_translations_dp;
	} else {
		*n_entries = ARRAY_SIZE(kbl_ddi_translations_dp);
		return kbl_ddi_translations_dp;
	}
}

883
static const struct ddi_buf_trans *
884
skl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
885
{
886 887
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

888
	if (dev_priv->vbt.edp.low_vswing) {
889 890 891 892
		if (IS_SKL_ULX(dev_priv) ||
		    IS_KBL_ULX(dev_priv) ||
		    IS_CFL_ULX(dev_priv) ||
		    IS_CML_ULX(dev_priv)) {
893
			*n_entries = ARRAY_SIZE(skl_y_ddi_translations_edp);
894
			return skl_y_ddi_translations_edp;
895 896 897 898
		} else if (IS_SKL_ULT(dev_priv) ||
			   IS_KBL_ULT(dev_priv) ||
			   IS_CFL_ULT(dev_priv) ||
			   IS_CML_ULT(dev_priv)) {
899
			*n_entries = ARRAY_SIZE(skl_u_ddi_translations_edp);
900
			return skl_u_ddi_translations_edp;
901 902
		} else {
			*n_entries = ARRAY_SIZE(skl_ddi_translations_edp);
903
			return skl_ddi_translations_edp;
904 905
		}
	}
906

907 908 909
	if (IS_KABYLAKE(dev_priv) ||
	    IS_COFFEELAKE(dev_priv) ||
	    IS_COMETLAKE(dev_priv))
910
		return kbl_get_buf_trans_dp(encoder, n_entries);
911
	else
912
		return skl_get_buf_trans_dp(encoder, n_entries);
913 914 915
}

static const struct ddi_buf_trans *
916
skl_get_buf_trans_hdmi(struct drm_i915_private *dev_priv, int *n_entries)
917
{
918 919 920 921
	if (IS_SKL_ULX(dev_priv) ||
	    IS_KBL_ULX(dev_priv) ||
	    IS_CFL_ULX(dev_priv) ||
	    IS_CML_ULX(dev_priv)) {
922
		*n_entries = ARRAY_SIZE(skl_y_ddi_translations_hdmi);
923
		return skl_y_ddi_translations_hdmi;
924 925
	} else {
		*n_entries = ARRAY_SIZE(skl_ddi_translations_hdmi);
926
		return skl_ddi_translations_hdmi;
927 928 929
	}
}

930 931 932 933 934 935 936 937 938
static int skl_buf_trans_num_entries(enum port port, int n_entries)
{
	/* Only DDIA and DDIE can select the 10th register with DP */
	if (port == PORT_A || port == PORT_E)
		return min(n_entries, 10);
	else
		return min(n_entries, 9);
}

939
static const struct ddi_buf_trans *
940
intel_ddi_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
941
{
942 943
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

944 945 946
	if (IS_KABYLAKE(dev_priv) ||
	    IS_COFFEELAKE(dev_priv) ||
	    IS_COMETLAKE(dev_priv)) {
947
		const struct ddi_buf_trans *ddi_translations =
948
			kbl_get_buf_trans_dp(encoder, n_entries);
949
		*n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries);
950
		return ddi_translations;
951
	} else if (IS_SKYLAKE(dev_priv)) {
952
		const struct ddi_buf_trans *ddi_translations =
953
			skl_get_buf_trans_dp(encoder, n_entries);
954
		*n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries);
955
		return ddi_translations;
956 957 958 959 960 961 962 963 964 965 966 967 968
	} else if (IS_BROADWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_dp);
		return  bdw_ddi_translations_dp;
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_dp);
		return hsw_ddi_translations_dp;
	}

	*n_entries = 0;
	return NULL;
}

static const struct ddi_buf_trans *
969
intel_ddi_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
970
{
971 972
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

973
	if (IS_GEN9_BC(dev_priv)) {
974
		const struct ddi_buf_trans *ddi_translations =
975
			skl_get_buf_trans_edp(encoder, n_entries);
976
		*n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries);
977
		return ddi_translations;
978
	} else if (IS_BROADWELL(dev_priv)) {
979
		return bdw_get_buf_trans_edp(encoder, n_entries);
980 981 982 983 984 985 986 987 988 989 990 991 992 993 994 995 996 997 998 999 1000 1001 1002 1003 1004
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_dp);
		return hsw_ddi_translations_dp;
	}

	*n_entries = 0;
	return NULL;
}

static const struct ddi_buf_trans *
intel_ddi_get_buf_trans_fdi(struct drm_i915_private *dev_priv,
			    int *n_entries)
{
	if (IS_BROADWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_fdi);
		return bdw_ddi_translations_fdi;
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_fdi);
		return hsw_ddi_translations_fdi;
	}

	*n_entries = 0;
	return NULL;
}

1005
static const struct ddi_buf_trans *
1006
intel_ddi_get_buf_trans_hdmi(struct intel_encoder *encoder,
1007 1008
			     int *n_entries)
{
1009 1010
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

1011 1012 1013 1014 1015 1016 1017 1018 1019 1020 1021 1022 1023 1024
	if (IS_GEN9_BC(dev_priv)) {
		return skl_get_buf_trans_hdmi(dev_priv, n_entries);
	} else if (IS_BROADWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_hdmi);
		return bdw_ddi_translations_hdmi;
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_hdmi);
		return hsw_ddi_translations_hdmi;
	}

	*n_entries = 0;
	return NULL;
}

1025
static const struct bxt_ddi_buf_trans *
1026
bxt_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
1027 1028 1029 1030 1031 1032
{
	*n_entries = ARRAY_SIZE(bxt_ddi_translations_dp);
	return bxt_ddi_translations_dp;
}

static const struct bxt_ddi_buf_trans *
1033
bxt_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
1034
{
1035 1036
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

1037 1038 1039 1040 1041
	if (dev_priv->vbt.edp.low_vswing) {
		*n_entries = ARRAY_SIZE(bxt_ddi_translations_edp);
		return bxt_ddi_translations_edp;
	}

1042
	return bxt_get_buf_trans_dp(encoder, n_entries);
1043 1044 1045
}

static const struct bxt_ddi_buf_trans *
1046
bxt_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries)
1047 1048 1049 1050 1051
{
	*n_entries = ARRAY_SIZE(bxt_ddi_translations_hdmi);
	return bxt_ddi_translations_hdmi;
}

1052
static const struct cnl_ddi_buf_trans *
1053
cnl_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries)
1054
{
1055
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1056
	u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
1057 1058 1059 1060 1061 1062 1063 1064 1065 1066

	if (voltage == VOLTAGE_INFO_0_85V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_85V);
		return cnl_ddi_translations_hdmi_0_85V;
	} else if (voltage == VOLTAGE_INFO_0_95V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_95V);
		return cnl_ddi_translations_hdmi_0_95V;
	} else if (voltage == VOLTAGE_INFO_1_05V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_1_05V);
		return cnl_ddi_translations_hdmi_1_05V;
1067 1068
	} else {
		*n_entries = 1; /* shut up gcc */
1069
		MISSING_CASE(voltage);
1070
	}
1071 1072 1073 1074
	return NULL;
}

static const struct cnl_ddi_buf_trans *
1075
cnl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
1076
{
1077
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1078
	u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
1079 1080 1081 1082 1083 1084 1085 1086 1087 1088

	if (voltage == VOLTAGE_INFO_0_85V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_85V);
		return cnl_ddi_translations_dp_0_85V;
	} else if (voltage == VOLTAGE_INFO_0_95V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_95V);
		return cnl_ddi_translations_dp_0_95V;
	} else if (voltage == VOLTAGE_INFO_1_05V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_1_05V);
		return cnl_ddi_translations_dp_1_05V;
1089 1090
	} else {
		*n_entries = 1; /* shut up gcc */
1091
		MISSING_CASE(voltage);
1092
	}
1093 1094 1095 1096
	return NULL;
}

static const struct cnl_ddi_buf_trans *
1097
cnl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
1098
{
1099
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1100
	u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111

	if (dev_priv->vbt.edp.low_vswing) {
		if (voltage == VOLTAGE_INFO_0_85V) {
			*n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_85V);
			return cnl_ddi_translations_edp_0_85V;
		} else if (voltage == VOLTAGE_INFO_0_95V) {
			*n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_95V);
			return cnl_ddi_translations_edp_0_95V;
		} else if (voltage == VOLTAGE_INFO_1_05V) {
			*n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_1_05V);
			return cnl_ddi_translations_edp_1_05V;
1112 1113
		} else {
			*n_entries = 1; /* shut up gcc */
1114
			MISSING_CASE(voltage);
1115
		}
1116 1117
		return NULL;
	} else {
1118
		return cnl_get_buf_trans_dp(encoder, n_entries);
1119 1120 1121
	}
}

1122
static const struct cnl_ddi_buf_trans *
1123 1124
icl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder,
			     const struct intel_crtc_state *crtc_state,
1125 1126 1127 1128 1129 1130 1131
			     int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi);
	return icl_combo_phy_ddi_translations_hdmi;
}

static const struct cnl_ddi_buf_trans *
1132 1133
icl_get_combo_buf_trans_dp(struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
1134 1135 1136 1137 1138 1139 1140
			   int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2);
	return icl_combo_phy_ddi_translations_dp_hbr2;
}

static const struct cnl_ddi_buf_trans *
1141 1142
icl_get_combo_buf_trans_edp(struct intel_encoder *encoder,
			    const struct intel_crtc_state *crtc_state,
1143
			    int *n_entries)
1144
{
1145 1146
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

1147
	if (crtc_state->port_clock > 540000) {
1148 1149
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3);
		return icl_combo_phy_ddi_translations_edp_hbr3;
1150
	} else if (dev_priv->vbt.edp.low_vswing) {
1151 1152
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2);
		return icl_combo_phy_ddi_translations_edp_hbr2;
1153 1154 1155 1156 1157 1158
	} else if (IS_DG1(dev_priv) && crtc_state->port_clock > 270000) {
		*n_entries = ARRAY_SIZE(dg1_combo_phy_ddi_translations_dp_hbr2_hbr3);
		return dg1_combo_phy_ddi_translations_dp_hbr2_hbr3;
	} else if (IS_DG1(dev_priv)) {
		*n_entries = ARRAY_SIZE(dg1_combo_phy_ddi_translations_dp_rbr_hbr);
		return dg1_combo_phy_ddi_translations_dp_rbr_hbr;
1159
	}
1160

1161
	return icl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1162 1163 1164
}

static const struct cnl_ddi_buf_trans *
1165 1166
icl_get_combo_buf_trans(struct intel_encoder *encoder,
			const struct intel_crtc_state *crtc_state,
1167 1168
			int *n_entries)
{
1169 1170 1171 1172
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return icl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
		return icl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries);
1173
	else
1174
		return icl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1175 1176
}

1177
static const struct icl_mg_phy_ddi_buf_trans *
1178 1179
icl_get_mg_buf_trans_hdmi(struct intel_encoder *encoder,
			  const struct intel_crtc_state *crtc_state,
1180 1181 1182 1183 1184 1185 1186
			  int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hdmi);
	return icl_mg_phy_ddi_translations_hdmi;
}

static const struct icl_mg_phy_ddi_buf_trans *
1187 1188
icl_get_mg_buf_trans_dp(struct intel_encoder *encoder,
			const struct intel_crtc_state *crtc_state,
1189
			int *n_entries)
1190
{
1191
	if (crtc_state->port_clock > 270000) {
1192 1193
		*n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hbr2_hbr3);
		return icl_mg_phy_ddi_translations_hbr2_hbr3;
1194 1195 1196
	} else {
		*n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_rbr_hbr);
		return icl_mg_phy_ddi_translations_rbr_hbr;
1197
	}
1198
}
1199

1200
static const struct icl_mg_phy_ddi_buf_trans *
1201 1202
icl_get_mg_buf_trans(struct intel_encoder *encoder,
		     const struct intel_crtc_state *crtc_state,
1203 1204
		     int *n_entries)
{
1205 1206
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return icl_get_mg_buf_trans_hdmi(encoder, crtc_state, n_entries);
1207
	else
1208
		return icl_get_mg_buf_trans_dp(encoder, crtc_state, n_entries);
1209 1210
}

1211
static const struct cnl_ddi_buf_trans *
1212 1213
ehl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder,
			     const struct intel_crtc_state *crtc_state,
1214 1215 1216 1217 1218 1219 1220
			     int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi);
	return icl_combo_phy_ddi_translations_hdmi;
}

static const struct cnl_ddi_buf_trans *
1221 1222
ehl_get_combo_buf_trans_dp(struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
1223 1224 1225 1226 1227 1228 1229
			   int *n_entries)
{
	*n_entries = ARRAY_SIZE(ehl_combo_phy_ddi_translations_dp);
	return ehl_combo_phy_ddi_translations_dp;
}

static const struct cnl_ddi_buf_trans *
1230 1231
ehl_get_combo_buf_trans_edp(struct intel_encoder *encoder,
			    const struct intel_crtc_state *crtc_state,
1232
			    int *n_entries)
1233
{
1234 1235
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

1236
	if (dev_priv->vbt.edp.low_vswing) {
1237 1238
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2);
		return icl_combo_phy_ddi_translations_edp_hbr2;
1239
	}
1240

1241
	return ehl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1242 1243 1244
}

static const struct cnl_ddi_buf_trans *
1245 1246
ehl_get_combo_buf_trans(struct intel_encoder *encoder,
			const struct intel_crtc_state *crtc_state,
1247 1248
			int *n_entries)
{
1249 1250 1251 1252
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return ehl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
		return ehl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries);
1253
	else
1254
		return ehl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1255 1256
}

1257 1258 1259 1260 1261 1262 1263 1264 1265 1266 1267 1268 1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281 1282 1283 1284 1285 1286 1287 1288 1289 1290 1291 1292 1293 1294 1295 1296 1297 1298 1299 1300 1301 1302 1303 1304 1305 1306 1307
static const struct cnl_ddi_buf_trans *
jsl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder,
			     const struct intel_crtc_state *crtc_state,
			     int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi);
	return icl_combo_phy_ddi_translations_hdmi;
}

static const struct cnl_ddi_buf_trans *
jsl_get_combo_buf_trans_dp(struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
			   int *n_entries)
{
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2);
	return icl_combo_phy_ddi_translations_dp_hbr2;
}

static const struct cnl_ddi_buf_trans *
jsl_get_combo_buf_trans_edp(struct intel_encoder *encoder,
			    const struct intel_crtc_state *crtc_state,
			    int *n_entries)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

	if (dev_priv->vbt.edp.low_vswing) {
		if (crtc_state->port_clock > 270000) {
			*n_entries = ARRAY_SIZE(jsl_combo_phy_ddi_translations_edp_hbr2);
			return jsl_combo_phy_ddi_translations_edp_hbr2;
		} else {
			*n_entries = ARRAY_SIZE(jsl_combo_phy_ddi_translations_edp_hbr);
			return jsl_combo_phy_ddi_translations_edp_hbr;
		}
	}

	return jsl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
}

static const struct cnl_ddi_buf_trans *
jsl_get_combo_buf_trans(struct intel_encoder *encoder,
			const struct intel_crtc_state *crtc_state,
			int *n_entries)
{
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return jsl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
		return jsl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries);
	else
		return jsl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
}

1308
static const struct cnl_ddi_buf_trans *
1309 1310
tgl_get_combo_buf_trans_hdmi(struct intel_encoder *encoder,
			     const struct intel_crtc_state *crtc_state,
1311
			     int *n_entries)
1312
{
1313 1314 1315
	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi);
	return icl_combo_phy_ddi_translations_hdmi;
}
1316

1317
static const struct cnl_ddi_buf_trans *
1318 1319
tgl_get_combo_buf_trans_dp(struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
1320 1321 1322
			   int *n_entries)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1323

1324
	if (crtc_state->port_clock > 270000) {
1325 1326 1327 1328
		if (IS_ROCKETLAKE(dev_priv)) {
			*n_entries = ARRAY_SIZE(rkl_combo_phy_ddi_translations_dp_hbr2_hbr3);
			return rkl_combo_phy_ddi_translations_dp_hbr2_hbr3;
		} else if (IS_TGL_U(dev_priv) || IS_TGL_Y(dev_priv)) {
1329 1330 1331
			*n_entries = ARRAY_SIZE(tgl_uy_combo_phy_ddi_translations_dp_hbr2);
			return tgl_uy_combo_phy_ddi_translations_dp_hbr2;
		} else {
1332 1333
			*n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr2);
			return tgl_combo_phy_ddi_translations_dp_hbr2;
1334
		}
1335
	} else {
1336 1337 1338 1339 1340 1341 1342
		if (IS_ROCKETLAKE(dev_priv)) {
			*n_entries = ARRAY_SIZE(rkl_combo_phy_ddi_translations_dp_hbr);
			return rkl_combo_phy_ddi_translations_dp_hbr;
		} else {
			*n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr);
			return tgl_combo_phy_ddi_translations_dp_hbr;
		}
1343 1344 1345
	}
}

1346
static const struct cnl_ddi_buf_trans *
1347 1348
tgl_get_combo_buf_trans_edp(struct intel_encoder *encoder,
			    const struct intel_crtc_state *crtc_state,
1349 1350 1351 1352 1353
			    int *n_entries)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);

1354
	if (crtc_state->port_clock > 540000) {
1355 1356 1357 1358 1359 1360 1361 1362 1363 1364
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3);
		return icl_combo_phy_ddi_translations_edp_hbr3;
	} else if (dev_priv->vbt.edp.hobl && !intel_dp->hobl_failed) {
		*n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_edp_hbr2_hobl);
		return tgl_combo_phy_ddi_translations_edp_hbr2_hobl;
	} else if (dev_priv->vbt.edp.low_vswing) {
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2);
		return icl_combo_phy_ddi_translations_edp_hbr2;
	}

1365
	return tgl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1366 1367 1368
}

static const struct cnl_ddi_buf_trans *
1369 1370
tgl_get_combo_buf_trans(struct intel_encoder *encoder,
			const struct intel_crtc_state *crtc_state,
1371 1372
			int *n_entries)
{
1373 1374 1375 1376
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return tgl_get_combo_buf_trans_hdmi(encoder, crtc_state, n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
		return tgl_get_combo_buf_trans_edp(encoder, crtc_state, n_entries);
1377
	else
1378
		return tgl_get_combo_buf_trans_dp(encoder, crtc_state, n_entries);
1379 1380
}

1381
static const struct tgl_dkl_phy_ddi_buf_trans *
1382 1383
tgl_get_dkl_buf_trans_hdmi(struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
1384 1385 1386 1387 1388 1389 1390
			   int *n_entries)
{
	*n_entries = ARRAY_SIZE(tgl_dkl_phy_hdmi_ddi_trans);
	return tgl_dkl_phy_hdmi_ddi_trans;
}

static const struct tgl_dkl_phy_ddi_buf_trans *
1391 1392
tgl_get_dkl_buf_trans_dp(struct intel_encoder *encoder,
			 const struct intel_crtc_state *crtc_state,
1393
			 int *n_entries)
1394
{
1395
	if (crtc_state->port_clock > 270000) {
1396 1397
		*n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans_hbr2);
		return tgl_dkl_phy_dp_ddi_trans_hbr2;
1398 1399 1400
	} else {
		*n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans);
		return tgl_dkl_phy_dp_ddi_trans;
1401
	}
1402
}
1403

1404
static const struct tgl_dkl_phy_ddi_buf_trans *
1405 1406
tgl_get_dkl_buf_trans(struct intel_encoder *encoder,
		      const struct intel_crtc_state *crtc_state,
1407 1408
		      int *n_entries)
{
1409 1410
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
		return tgl_get_dkl_buf_trans_hdmi(encoder, crtc_state, n_entries);
1411
	else
1412
		return tgl_get_dkl_buf_trans_dp(encoder, crtc_state, n_entries);
1413 1414
}

1415 1416
static int intel_ddi_hdmi_level(struct intel_encoder *encoder,
				const struct intel_crtc_state *crtc_state)
1417
{
1418
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1419
	int n_entries, level, default_entry;
1420
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
1421

1422 1423
	if (INTEL_GEN(dev_priv) >= 12) {
		if (intel_phy_is_combo(dev_priv, phy))
1424
			tgl_get_combo_buf_trans_hdmi(encoder, crtc_state, &n_entries);
1425
		else
1426
			tgl_get_dkl_buf_trans_hdmi(encoder, crtc_state, &n_entries);
1427 1428
		default_entry = n_entries - 1;
	} else if (INTEL_GEN(dev_priv) == 11) {
1429
		if (intel_phy_is_combo(dev_priv, phy))
1430
			icl_get_combo_buf_trans_hdmi(encoder, crtc_state, &n_entries);
1431
		else
1432
			icl_get_mg_buf_trans_hdmi(encoder, crtc_state, &n_entries);
1433 1434
		default_entry = n_entries - 1;
	} else if (IS_CANNONLAKE(dev_priv)) {
1435
		cnl_get_buf_trans_hdmi(encoder, &n_entries);
1436
		default_entry = n_entries - 1;
1437
	} else if (IS_GEN9_LP(dev_priv)) {
1438
		bxt_get_buf_trans_hdmi(encoder, &n_entries);
1439
		default_entry = n_entries - 1;
1440
	} else if (IS_GEN9_BC(dev_priv)) {
1441
		intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1442
		default_entry = 8;
1443
	} else if (IS_BROADWELL(dev_priv)) {
1444
		intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1445
		default_entry = 7;
1446
	} else if (IS_HASWELL(dev_priv)) {
1447
		intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1448
		default_entry = 6;
1449
	} else {
1450
		drm_WARN(&dev_priv->drm, 1, "ddi translation table missing\n");
1451
		return 0;
1452 1453
	}

1454
	if (drm_WARN_ON_ONCE(&dev_priv->drm, n_entries == 0))
1455
		return 0;
1456

1457 1458
	level = intel_bios_hdmi_level_shift(encoder);
	if (level < 0)
1459 1460
		level = default_entry;

1461
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
1462
		level = n_entries - 1;
1463

1464
	return level;
1465 1466
}

1467 1468
/*
 * Starting with Haswell, DDI port buffers must be programmed with correct
1469 1470
 * values in advance. This function programs the correct values for
 * DP/eDP/FDI use cases.
1471
 */
1472 1473
static void intel_prepare_dp_ddi_buffers(struct intel_encoder *encoder,
					 const struct intel_crtc_state *crtc_state)
1474
{
1475
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1476
	u32 iboost_bit = 0;
1477
	int i, n_entries;
1478
	enum port port = encoder->port;
1479
	const struct ddi_buf_trans *ddi_translations;
1480

1481 1482 1483 1484
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG))
		ddi_translations = intel_ddi_get_buf_trans_fdi(dev_priv,
							       &n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
1485
		ddi_translations = intel_ddi_get_buf_trans_edp(encoder,
1486
							       &n_entries);
1487
	else
1488
		ddi_translations = intel_ddi_get_buf_trans_dp(encoder,
1489
							      &n_entries);
1490

1491
	/* If we're boosting the current, set bit 31 of trans1 */
1492
	if (IS_GEN9_BC(dev_priv) && intel_bios_dp_boost_level(encoder))
1493
		iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE;
1494

1495
	for (i = 0; i < n_entries; i++) {
1496 1497 1498 1499
		intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, i),
			       ddi_translations[i].trans1 | iboost_bit);
		intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, i),
			       ddi_translations[i].trans2);
1500
	}
1501 1502 1503 1504 1505 1506 1507
}

/*
 * Starting with Haswell, DDI port buffers must be programmed with correct
 * values in advance. This function programs the correct values for
 * HDMI/DVI use cases.
 */
1508
static void intel_prepare_hdmi_ddi_buffers(struct intel_encoder *encoder,
1509
					   int level)
1510 1511 1512
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	u32 iboost_bit = 0;
1513
	int n_entries;
1514
	enum port port = encoder->port;
1515
	const struct ddi_buf_trans *ddi_translations;
1516

1517
	ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1518

1519
	if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
1520
		return;
1521
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
1522
		level = n_entries - 1;
1523

1524
	/* If we're boosting the current, set bit 31 of trans1 */
1525
	if (IS_GEN9_BC(dev_priv) && intel_bios_hdmi_boost_level(encoder))
1526
		iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE;
1527

1528
	/* Entry 9 is for HDMI: */
1529 1530 1531 1532
	intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, 9),
		       ddi_translations[level].trans1 | iboost_bit);
	intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, 9),
		       ddi_translations[level].trans2);
1533 1534
}

1535 1536 1537
static void intel_wait_ddi_buf_idle(struct drm_i915_private *dev_priv,
				    enum port port)
{
1538 1539 1540
	if (IS_BROXTON(dev_priv)) {
		udelay(16);
		return;
1541
	}
1542 1543 1544 1545 1546

	if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) &
			 DDI_BUF_IS_IDLE), 8))
		drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get idle\n",
			port_name(port));
1547
}
1548

1549 1550 1551 1552 1553 1554 1555 1556 1557 1558 1559 1560 1561 1562 1563
static void intel_wait_ddi_buf_active(struct drm_i915_private *dev_priv,
				      enum port port)
{
	/* Wait > 518 usecs for DDI_BUF_CTL to be non idle */
	if (INTEL_GEN(dev_priv) < 10 && !IS_GEMINILAKE(dev_priv)) {
		usleep_range(518, 1000);
		return;
	}

	if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) &
			  DDI_BUF_IS_IDLE), 500))
		drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get active\n",
			port_name(port));
}

1564
static u32 hsw_pll_to_ddi_pll_sel(const struct intel_shared_dpll *pll)
1565
{
1566
	switch (pll->info->id) {
1567 1568 1569 1570 1571 1572 1573 1574 1575 1576 1577 1578 1579
	case DPLL_ID_WRPLL1:
		return PORT_CLK_SEL_WRPLL1;
	case DPLL_ID_WRPLL2:
		return PORT_CLK_SEL_WRPLL2;
	case DPLL_ID_SPLL:
		return PORT_CLK_SEL_SPLL;
	case DPLL_ID_LCPLL_810:
		return PORT_CLK_SEL_LCPLL_810;
	case DPLL_ID_LCPLL_1350:
		return PORT_CLK_SEL_LCPLL_1350;
	case DPLL_ID_LCPLL_2700:
		return PORT_CLK_SEL_LCPLL_2700;
	default:
1580
		MISSING_CASE(pll->info->id);
1581 1582 1583 1584
		return PORT_CLK_SEL_NONE;
	}
}

1585
static u32 icl_pll_to_ddi_clk_sel(struct intel_encoder *encoder,
1586
				  const struct intel_crtc_state *crtc_state)
1587
{
1588 1589
	const struct intel_shared_dpll *pll = crtc_state->shared_dpll;
	int clock = crtc_state->port_clock;
1590 1591 1592 1593
	const enum intel_dpll_id id = pll->info->id;

	switch (id) {
	default:
1594 1595 1596 1597
		/*
		 * DPLL_ID_ICL_DPLL0 and DPLL_ID_ICL_DPLL1 should not be used
		 * here, so do warn if this get passed in
		 */
1598 1599
		MISSING_CASE(id);
		return DDI_CLK_SEL_NONE;
1600 1601 1602 1603 1604 1605 1606 1607 1608 1609 1610 1611
	case DPLL_ID_ICL_TBTPLL:
		switch (clock) {
		case 162000:
			return DDI_CLK_SEL_TBT_162;
		case 270000:
			return DDI_CLK_SEL_TBT_270;
		case 540000:
			return DDI_CLK_SEL_TBT_540;
		case 810000:
			return DDI_CLK_SEL_TBT_810;
		default:
			MISSING_CASE(clock);
1612
			return DDI_CLK_SEL_NONE;
1613
		}
1614 1615 1616 1617
	case DPLL_ID_ICL_MGPLL1:
	case DPLL_ID_ICL_MGPLL2:
	case DPLL_ID_ICL_MGPLL3:
	case DPLL_ID_ICL_MGPLL4:
1618 1619
	case DPLL_ID_TGL_MGPLL5:
	case DPLL_ID_TGL_MGPLL6:
1620 1621 1622 1623
		return DDI_CLK_SEL_MG;
	}
}

1624 1625 1626 1627 1628 1629 1630 1631 1632
/* Starting with Haswell, different DDI ports can work in FDI mode for
 * connection to the PCH-located connectors. For this, it is necessary to train
 * both the DDI port and PCH receiver for the desired DDI buffer settings.
 *
 * The recommended port to work in FDI mode is DDI E, which we use here. Also,
 * please note that when FDI mode is active on DDI E, it shares 2 lines with
 * DDI A (which is used for eDP)
 */

1633
void hsw_fdi_link_train(struct intel_encoder *encoder,
1634
			const struct intel_crtc_state *crtc_state)
1635
{
1636 1637
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1638
	u32 temp, i, rx_ctl_val, ddi_pll_sel;
1639

1640
	intel_prepare_dp_ddi_buffers(encoder, crtc_state);
1641

1642 1643 1644 1645
	/* Set the FDI_RX_MISC pwrdn lanes and the 2 workarounds listed at the
	 * mode set "sequence for CRT port" document:
	 * - TP1 to TP2 time with the default value
	 * - FDI delay to 90h
1646 1647
	 *
	 * WaFDIAutoLinkSetTimingOverrride:hsw
1648
	 */
1649 1650
	intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A),
		       FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2) | FDI_RX_TP1_TO_TP2_48 | FDI_RX_FDI_DELAY_90);
1651 1652

	/* Enable the PCH Receiver FDI PLL */
1653
	rx_ctl_val = dev_priv->fdi_rx_config | FDI_RX_ENHANCE_FRAME_ENABLE |
1654
		     FDI_RX_PLL_ENABLE |
1655
		     FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
1656 1657
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
	intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A));
1658 1659 1660 1661
	udelay(220);

	/* Switch from Rawclk to PCDclk */
	rx_ctl_val |= FDI_PCDCLK;
1662
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
1663 1664

	/* Configure Port Clock Select */
1665
	ddi_pll_sel = hsw_pll_to_ddi_pll_sel(crtc_state->shared_dpll);
1666
	intel_de_write(dev_priv, PORT_CLK_SEL(PORT_E), ddi_pll_sel);
1667
	drm_WARN_ON(&dev_priv->drm, ddi_pll_sel != PORT_CLK_SEL_SPLL);
1668 1669 1670

	/* Start the training iterating through available voltages and emphasis,
	 * testing each value twice. */
1671
	for (i = 0; i < ARRAY_SIZE(hsw_ddi_translations_fdi) * 2; i++) {
1672
		/* Configure DP_TP_CTL with auto-training */
1673
		intel_de_write(dev_priv, DP_TP_CTL(PORT_E),
1674 1675 1676 1677
			       DP_TP_CTL_FDI_AUTOTRAIN |
			       DP_TP_CTL_ENHANCED_FRAME_ENABLE |
			       DP_TP_CTL_LINK_TRAIN_PAT1 |
			       DP_TP_CTL_ENABLE);
1678

1679 1680 1681 1682
		/* Configure and enable DDI_BUF_CTL for DDI E with next voltage.
		 * DDI E does not support port reversal, the functionality is
		 * achieved on the PCH side in FDI_RX_CTL, so no need to set the
		 * port reversal bit */
1683 1684 1685
		intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E),
			       DDI_BUF_CTL_ENABLE | ((crtc_state->fdi_lanes - 1) << 1) | DDI_BUF_TRANS_SELECT(i / 2));
		intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E));
1686 1687 1688

		udelay(600);

1689
		/* Program PCH FDI Receiver TU */
1690
		intel_de_write(dev_priv, FDI_RX_TUSIZE1(PIPE_A), TU_SIZE(64));
1691 1692 1693

		/* Enable PCH FDI Receiver with auto-training */
		rx_ctl_val |= FDI_RX_ENABLE | FDI_LINK_TRAIN_AUTO;
1694 1695
		intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
		intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A));
1696 1697 1698 1699 1700

		/* Wait for FDI receiver lane calibration */
		udelay(30);

		/* Unset FDI_RX_MISC pwrdn lanes */
1701
		temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A));
1702
		temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK);
1703 1704
		intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp);
		intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A));
1705 1706 1707

		/* Wait for FDI auto training time */
		udelay(5);
1708

1709
		temp = intel_de_read(dev_priv, DP_TP_STATUS(PORT_E));
1710
		if (temp & DP_TP_STATUS_AUTOTRAIN_DONE) {
1711 1712
			drm_dbg_kms(&dev_priv->drm,
				    "FDI link training done on step %d\n", i);
1713 1714
			break;
		}
1715

1716 1717 1718 1719 1720
		/*
		 * Leave things enabled even if we failed to train FDI.
		 * Results in less fireworks from the state checker.
		 */
		if (i == ARRAY_SIZE(hsw_ddi_translations_fdi) * 2 - 1) {
1721
			drm_err(&dev_priv->drm, "FDI link training failed!\n");
1722
			break;
1723
		}
1724

1725
		rx_ctl_val &= ~FDI_RX_ENABLE;
1726 1727
		intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
		intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A));
1728

1729
		temp = intel_de_read(dev_priv, DDI_BUF_CTL(PORT_E));
1730
		temp &= ~DDI_BUF_CTL_ENABLE;
1731 1732
		intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E), temp);
		intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E));
1733

1734
		/* Disable DP_TP_CTL and FDI_RX_CTL and retry */
1735
		temp = intel_de_read(dev_priv, DP_TP_CTL(PORT_E));
1736 1737
		temp &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK);
		temp |= DP_TP_CTL_LINK_TRAIN_PAT1;
1738 1739
		intel_de_write(dev_priv, DP_TP_CTL(PORT_E), temp);
		intel_de_posting_read(dev_priv, DP_TP_CTL(PORT_E));
1740 1741

		intel_wait_ddi_buf_idle(dev_priv, PORT_E);
1742 1743

		/* Reset FDI_RX_MISC pwrdn lanes */
1744
		temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A));
1745 1746
		temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK);
		temp |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2);
1747 1748
		intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp);
		intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A));
1749 1750
	}

1751
	/* Enable normal pixel sending for FDI */
1752
	intel_de_write(dev_priv, DP_TP_CTL(PORT_E),
1753 1754 1755 1756
		       DP_TP_CTL_FDI_AUTOTRAIN |
		       DP_TP_CTL_LINK_TRAIN_NORMAL |
		       DP_TP_CTL_ENHANCED_FRAME_ENABLE |
		       DP_TP_CTL_ENABLE);
1757
}
1758

1759 1760
static void intel_ddi_init_dp_buf_reg(struct intel_encoder *encoder,
				      const struct intel_crtc_state *crtc_state)
1761
{
1762
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1763
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1764

1765
	intel_dp->DP = dig_port->saved_port_bits |
1766
		DDI_BUF_CTL_ENABLE | DDI_BUF_TRANS_SELECT(0);
1767
	intel_dp->DP |= DDI_PORT_WIDTH(crtc_state->lane_count);
1768 1769
}

1770 1771 1772
static int icl_calc_tbt_pll_link(struct drm_i915_private *dev_priv,
				 enum port port)
{
1773
	u32 val = intel_de_read(dev_priv, DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK;
1774 1775 1776 1777 1778 1779 1780 1781 1782 1783 1784 1785 1786 1787 1788 1789 1790 1791

	switch (val) {
	case DDI_CLK_SEL_NONE:
		return 0;
	case DDI_CLK_SEL_TBT_162:
		return 162000;
	case DDI_CLK_SEL_TBT_270:
		return 270000;
	case DDI_CLK_SEL_TBT_540:
		return 540000;
	case DDI_CLK_SEL_TBT_810:
		return 810000;
	default:
		MISSING_CASE(val);
		return 0;
	}
}

1792 1793 1794 1795 1796 1797 1798
static void ddi_dotclock_get(struct intel_crtc_state *pipe_config)
{
	int dotclock;

	if (pipe_config->has_pch_encoder)
		dotclock = intel_dotclock_calculate(pipe_config->port_clock,
						    &pipe_config->fdi_m_n);
1799
	else if (intel_crtc_has_dp_encoder(pipe_config))
1800 1801
		dotclock = intel_dotclock_calculate(pipe_config->port_clock,
						    &pipe_config->dp_m_n);
1802 1803
	else if (pipe_config->has_hdmi_sink && pipe_config->pipe_bpp > 24)
		dotclock = pipe_config->port_clock * 24 / pipe_config->pipe_bpp;
1804 1805 1806
	else
		dotclock = pipe_config->port_clock;

1807 1808
	if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 &&
	    !intel_crtc_has_dp_encoder(pipe_config))
1809 1810
		dotclock *= 2;

1811 1812 1813
	if (pipe_config->pixel_multiplier)
		dotclock /= pipe_config->pixel_multiplier;

1814
	pipe_config->hw.adjusted_mode.crtc_clock = dotclock;
1815
}
1816

1817 1818
static void intel_ddi_clock_get(struct intel_encoder *encoder,
				struct intel_crtc_state *pipe_config)
1819
{
1820
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1821
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
1822

1823
	if (intel_phy_is_tc(dev_priv, phy) &&
1824 1825 1826 1827 1828
	    intel_get_shared_dpll_id(dev_priv, pipe_config->shared_dpll) ==
	    DPLL_ID_ICL_TBTPLL)
		pipe_config->port_clock = icl_calc_tbt_pll_link(dev_priv,
								encoder->port);
	else
1829
		pipe_config->port_clock =
1830 1831
			intel_dpll_get_freq(dev_priv, pipe_config->shared_dpll,
					    &pipe_config->dpll_hw_state);
1832 1833

	ddi_dotclock_get(pipe_config);
1834 1835
}

1836 1837
void intel_ddi_set_dp_msa(const struct intel_crtc_state *crtc_state,
			  const struct drm_connector_state *conn_state)
1838
{
1839
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1840
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1841
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1842
	u32 temp;
1843

1844 1845
	if (!intel_crtc_has_dp_encoder(crtc_state))
		return;
J
Jani Nikula 已提交
1846

1847
	drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder));
1848

1849
	temp = DP_MSA_MISC_SYNC_CLOCK;
1850

1851 1852
	switch (crtc_state->pipe_bpp) {
	case 18:
1853
		temp |= DP_MSA_MISC_6_BPC;
1854 1855
		break;
	case 24:
1856
		temp |= DP_MSA_MISC_8_BPC;
1857 1858
		break;
	case 30:
1859
		temp |= DP_MSA_MISC_10_BPC;
1860 1861
		break;
	case 36:
1862
		temp |= DP_MSA_MISC_12_BPC;
1863 1864 1865 1866
		break;
	default:
		MISSING_CASE(crtc_state->pipe_bpp);
		break;
1867
	}
1868

1869
	/* nonsense combination */
1870 1871
	drm_WARN_ON(&dev_priv->drm, crtc_state->limited_color_range &&
		    crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB);
1872 1873

	if (crtc_state->limited_color_range)
1874
		temp |= DP_MSA_MISC_COLOR_CEA_RGB;
1875

1876 1877 1878
	/*
	 * As per DP 1.2 spec section 2.3.4.3 while sending
	 * YCBCR 444 signals we should program MSA MISC1/0 fields with
1879
	 * colorspace information.
1880 1881
	 */
	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444)
1882
		temp |= DP_MSA_MISC_COLOR_YCBCR_444_BT709;
1883

1884 1885 1886
	/*
	 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication
	 * of Color Encoding Format and Content Color Gamut] while sending
1887 1888
	 * YCBCR 420, HDR BT.2020 signals we should program MSA MISC1 fields
	 * which indicate VSC SDP for the Pixel Encoding/Colorimetry Format.
1889
	 */
1890
	if (intel_dp_needs_vsc_sdp(crtc_state, conn_state))
1891
		temp |= DP_MSA_MISC_COLOR_VSC_SDP;
1892

1893
	intel_de_write(dev_priv, TRANS_MSA_MISC(cpu_transcoder), temp);
1894 1895
}

1896 1897 1898 1899 1900 1901 1902 1903
static u32 bdw_trans_port_sync_master_select(enum transcoder master_transcoder)
{
	if (master_transcoder == TRANSCODER_EDP)
		return 0;
	else
		return master_transcoder + 1;
}

1904 1905 1906 1907 1908 1909 1910
/*
 * Returns the TRANS_DDI_FUNC_CTL value based on CRTC state.
 *
 * Only intended to be used by intel_ddi_enable_transcoder_func() and
 * intel_ddi_config_transcoder_func().
 */
static u32
1911 1912
intel_ddi_transcoder_func_reg_val_get(struct intel_encoder *encoder,
				      const struct intel_crtc_state *crtc_state)
1913
{
1914
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1915 1916
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum pipe pipe = crtc->pipe;
1917
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1918
	enum port port = encoder->port;
1919
	u32 temp;
1920

1921 1922
	/* Enable TRANS_DDI_FUNC_CTL for the pipe to work in HDMI mode */
	temp = TRANS_DDI_FUNC_ENABLE;
1923 1924 1925 1926
	if (INTEL_GEN(dev_priv) >= 12)
		temp |= TGL_TRANS_DDI_SELECT_PORT(port);
	else
		temp |= TRANS_DDI_SELECT_PORT(port);
1927

1928
	switch (crtc_state->pipe_bpp) {
1929
	case 18:
1930
		temp |= TRANS_DDI_BPC_6;
1931 1932
		break;
	case 24:
1933
		temp |= TRANS_DDI_BPC_8;
1934 1935
		break;
	case 30:
1936
		temp |= TRANS_DDI_BPC_10;
1937 1938
		break;
	case 36:
1939
		temp |= TRANS_DDI_BPC_12;
1940 1941
		break;
	default:
1942
		BUG();
1943
	}
1944

1945
	if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PVSYNC)
1946
		temp |= TRANS_DDI_PVSYNC;
1947
	if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PHSYNC)
1948
		temp |= TRANS_DDI_PHSYNC;
1949

1950 1951 1952
	if (cpu_transcoder == TRANSCODER_EDP) {
		switch (pipe) {
		case PIPE_A:
1953 1954 1955 1956
			/* On Haswell, can only use the always-on power well for
			 * eDP when not using the panel fitter, and when not
			 * using motion blur mitigation (which we don't
			 * support). */
1957
			if (crtc_state->pch_pfit.force_thru)
1958 1959 1960
				temp |= TRANS_DDI_EDP_INPUT_A_ONOFF;
			else
				temp |= TRANS_DDI_EDP_INPUT_A_ON;
1961 1962 1963 1964 1965 1966 1967 1968 1969 1970 1971 1972 1973
			break;
		case PIPE_B:
			temp |= TRANS_DDI_EDP_INPUT_B_ONOFF;
			break;
		case PIPE_C:
			temp |= TRANS_DDI_EDP_INPUT_C_ONOFF;
			break;
		default:
			BUG();
			break;
		}
	}

1974
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
1975
		if (crtc_state->has_hdmi_sink)
1976
			temp |= TRANS_DDI_MODE_SELECT_HDMI;
1977
		else
1978
			temp |= TRANS_DDI_MODE_SELECT_DVI;
S
Shashank Sharma 已提交
1979 1980

		if (crtc_state->hdmi_scrambling)
1981
			temp |= TRANS_DDI_HDMI_SCRAMBLING;
S
Shashank Sharma 已提交
1982 1983
		if (crtc_state->hdmi_high_tmds_clock_ratio)
			temp |= TRANS_DDI_HIGH_TMDS_CHAR_RATE;
1984
	} else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) {
1985
		temp |= TRANS_DDI_MODE_SELECT_FDI;
1986
		temp |= (crtc_state->fdi_lanes - 1) << 1;
1987
	} else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) {
1988
		temp |= TRANS_DDI_MODE_SELECT_DP_MST;
1989
		temp |= DDI_PORT_WIDTH(crtc_state->lane_count);
1990

1991 1992 1993 1994
		if (INTEL_GEN(dev_priv) >= 12) {
			enum transcoder master;

			master = crtc_state->mst_master_transcoder;
1995 1996
			drm_WARN_ON(&dev_priv->drm,
				    master == INVALID_TRANSCODER);
1997 1998
			temp |= TRANS_DDI_MST_TRANSPORT_SELECT(master);
		}
1999
	} else {
2000 2001
		temp |= TRANS_DDI_MODE_SELECT_DP_SST;
		temp |= DDI_PORT_WIDTH(crtc_state->lane_count);
2002 2003
	}

2004 2005 2006 2007 2008 2009 2010 2011 2012
	if (IS_GEN_RANGE(dev_priv, 8, 10) &&
	    crtc_state->master_transcoder != INVALID_TRANSCODER) {
		u8 master_select =
			bdw_trans_port_sync_master_select(crtc_state->master_transcoder);

		temp |= TRANS_DDI_PORT_SYNC_ENABLE |
			TRANS_DDI_PORT_SYNC_MASTER_SELECT(master_select);
	}

2013 2014 2015
	return temp;
}

2016 2017
void intel_ddi_enable_transcoder_func(struct intel_encoder *encoder,
				      const struct intel_crtc_state *crtc_state)
2018
{
2019
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2020 2021
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2022 2023 2024 2025 2026 2027

	if (INTEL_GEN(dev_priv) >= 11) {
		enum transcoder master_transcoder = crtc_state->master_transcoder;
		u32 ctl2 = 0;

		if (master_transcoder != INVALID_TRANSCODER) {
2028 2029
			u8 master_select =
				bdw_trans_port_sync_master_select(master_transcoder);
2030

2031
			ctl2 |= PORT_SYNC_MODE_ENABLE |
2032
				PORT_SYNC_MODE_MASTER_SELECT(master_select);
2033 2034 2035 2036 2037 2038
		}

		intel_de_write(dev_priv,
			       TRANS_DDI_FUNC_CTL2(cpu_transcoder), ctl2);
	}

2039 2040 2041
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder),
		       intel_ddi_transcoder_func_reg_val_get(encoder,
							     crtc_state));
2042 2043 2044 2045 2046 2047 2048
}

/*
 * Same as intel_ddi_enable_transcoder_func(), but it does not set the enable
 * bit.
 */
static void
2049 2050
intel_ddi_config_transcoder_func(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
2051
{
2052
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2053 2054
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2055
	u32 ctl;
2056

2057
	ctl = intel_ddi_transcoder_func_reg_val_get(encoder, crtc_state);
2058 2059
	ctl &= ~TRANS_DDI_FUNC_ENABLE;
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl);
2060
}
2061

2062
void intel_ddi_disable_transcoder_func(const struct intel_crtc_state *crtc_state)
2063
{
2064
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2065 2066
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2067
	u32 ctl;
2068

2069 2070 2071 2072 2073
	if (INTEL_GEN(dev_priv) >= 11)
		intel_de_write(dev_priv,
			       TRANS_DDI_FUNC_CTL2(cpu_transcoder), 0);

	ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
2074

2075 2076
	drm_WARN_ON(crtc->base.dev, ctl & TRANS_DDI_HDCP_SIGNALLING);

2077
	ctl &= ~TRANS_DDI_FUNC_ENABLE;
2078

2079 2080 2081 2082
	if (IS_GEN_RANGE(dev_priv, 8, 10))
		ctl &= ~(TRANS_DDI_PORT_SYNC_ENABLE |
			 TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK);

2083
	if (INTEL_GEN(dev_priv) >= 12) {
2084
		if (!intel_dp_mst_is_master_trans(crtc_state)) {
2085
			ctl &= ~(TGL_TRANS_DDI_PORT_MASK |
2086 2087
				 TRANS_DDI_MODE_SELECT_MASK);
		}
2088
	} else {
2089
		ctl &= ~(TRANS_DDI_PORT_MASK | TRANS_DDI_MODE_SELECT_MASK);
2090
	}
2091

2092
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl);
2093 2094 2095

	if (dev_priv->quirks & QUIRK_INCREASE_DDI_DISABLED_TIME &&
	    intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
2096 2097
		drm_dbg_kms(&dev_priv->drm,
			    "Quirk Increase DDI disabled time\n");
2098 2099 2100
		/* Quirk time at 100ms for reliable operation */
		msleep(100);
	}
2101 2102
}

2103 2104 2105
int intel_ddi_toggle_hdcp_bits(struct intel_encoder *intel_encoder,
			       enum transcoder cpu_transcoder,
			       bool enable, u32 hdcp_mask)
S
Sean Paul 已提交
2106 2107 2108
{
	struct drm_device *dev = intel_encoder->base.dev;
	struct drm_i915_private *dev_priv = to_i915(dev);
2109
	intel_wakeref_t wakeref;
S
Sean Paul 已提交
2110
	int ret = 0;
2111
	u32 tmp;
S
Sean Paul 已提交
2112

2113 2114
	wakeref = intel_display_power_get_if_enabled(dev_priv,
						     intel_encoder->power_domain);
2115
	if (drm_WARN_ON(dev, !wakeref))
S
Sean Paul 已提交
2116 2117
		return -ENXIO;

2118
	tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
S
Sean Paul 已提交
2119
	if (enable)
2120
		tmp |= hdcp_mask;
S
Sean Paul 已提交
2121
	else
2122
		tmp &= ~hdcp_mask;
2123
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), tmp);
2124
	intel_display_power_put(dev_priv, intel_encoder->power_domain, wakeref);
S
Sean Paul 已提交
2125 2126 2127
	return ret;
}

2128 2129 2130
bool intel_ddi_connector_get_hw_state(struct intel_connector *intel_connector)
{
	struct drm_device *dev = intel_connector->base.dev;
2131
	struct drm_i915_private *dev_priv = to_i915(dev);
2132
	struct intel_encoder *encoder = intel_attached_encoder(intel_connector);
2133
	int type = intel_connector->base.connector_type;
2134
	enum port port = encoder->port;
2135
	enum transcoder cpu_transcoder;
2136 2137
	intel_wakeref_t wakeref;
	enum pipe pipe = 0;
2138
	u32 tmp;
2139
	bool ret;
2140

2141 2142 2143
	wakeref = intel_display_power_get_if_enabled(dev_priv,
						     encoder->power_domain);
	if (!wakeref)
2144 2145
		return false;

2146
	if (!encoder->get_hw_state(encoder, &pipe)) {
2147 2148 2149
		ret = false;
		goto out;
	}
2150

2151
	if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A)
2152 2153
		cpu_transcoder = TRANSCODER_EDP;
	else
D
Daniel Vetter 已提交
2154
		cpu_transcoder = (enum transcoder) pipe;
2155

2156
	tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
2157 2158 2159 2160

	switch (tmp & TRANS_DDI_MODE_SELECT_MASK) {
	case TRANS_DDI_MODE_SELECT_HDMI:
	case TRANS_DDI_MODE_SELECT_DVI:
2161 2162
		ret = type == DRM_MODE_CONNECTOR_HDMIA;
		break;
2163 2164

	case TRANS_DDI_MODE_SELECT_DP_SST:
2165 2166 2167 2168
		ret = type == DRM_MODE_CONNECTOR_eDP ||
		      type == DRM_MODE_CONNECTOR_DisplayPort;
		break;

2169 2170 2171
	case TRANS_DDI_MODE_SELECT_DP_MST:
		/* if the transcoder is in MST state then
		 * connector isn't connected */
2172 2173
		ret = false;
		break;
2174 2175

	case TRANS_DDI_MODE_SELECT_FDI:
2176 2177
		ret = type == DRM_MODE_CONNECTOR_VGA;
		break;
2178 2179

	default:
2180 2181
		ret = false;
		break;
2182
	}
2183 2184

out:
2185
	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
2186 2187

	return ret;
2188 2189
}

2190 2191
static void intel_ddi_get_encoder_pipes(struct intel_encoder *encoder,
					u8 *pipe_mask, bool *is_dp_mst)
2192 2193
{
	struct drm_device *dev = encoder->base.dev;
2194
	struct drm_i915_private *dev_priv = to_i915(dev);
2195
	enum port port = encoder->port;
2196
	intel_wakeref_t wakeref;
2197
	enum pipe p;
2198
	u32 tmp;
2199 2200 2201 2202
	u8 mst_pipe_mask;

	*pipe_mask = 0;
	*is_dp_mst = false;
2203

2204 2205 2206
	wakeref = intel_display_power_get_if_enabled(dev_priv,
						     encoder->power_domain);
	if (!wakeref)
2207
		return;
2208

2209
	tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port));
2210
	if (!(tmp & DDI_BUF_CTL_ENABLE))
2211
		goto out;
2212

2213
	if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) {
2214 2215
		tmp = intel_de_read(dev_priv,
				    TRANS_DDI_FUNC_CTL(TRANSCODER_EDP));
2216

2217
		switch (tmp & TRANS_DDI_EDP_INPUT_MASK) {
2218 2219
		default:
			MISSING_CASE(tmp & TRANS_DDI_EDP_INPUT_MASK);
2220
			fallthrough;
2221 2222
		case TRANS_DDI_EDP_INPUT_A_ON:
		case TRANS_DDI_EDP_INPUT_A_ONOFF:
2223
			*pipe_mask = BIT(PIPE_A);
2224 2225
			break;
		case TRANS_DDI_EDP_INPUT_B_ONOFF:
2226
			*pipe_mask = BIT(PIPE_B);
2227 2228
			break;
		case TRANS_DDI_EDP_INPUT_C_ONOFF:
2229
			*pipe_mask = BIT(PIPE_C);
2230 2231 2232
			break;
		}

2233 2234
		goto out;
	}
2235

2236
	mst_pipe_mask = 0;
2237
	for_each_pipe(dev_priv, p) {
2238
		enum transcoder cpu_transcoder = (enum transcoder)p;
2239
		unsigned int port_mask, ddi_select;
2240 2241 2242 2243 2244 2245
		intel_wakeref_t trans_wakeref;

		trans_wakeref = intel_display_power_get_if_enabled(dev_priv,
								   POWER_DOMAIN_TRANSCODER(cpu_transcoder));
		if (!trans_wakeref)
			continue;
2246 2247 2248 2249 2250 2251 2252 2253

		if (INTEL_GEN(dev_priv) >= 12) {
			port_mask = TGL_TRANS_DDI_PORT_MASK;
			ddi_select = TGL_TRANS_DDI_SELECT_PORT(port);
		} else {
			port_mask = TRANS_DDI_PORT_MASK;
			ddi_select = TRANS_DDI_SELECT_PORT(port);
		}
2254

2255 2256
		tmp = intel_de_read(dev_priv,
				    TRANS_DDI_FUNC_CTL(cpu_transcoder));
2257 2258
		intel_display_power_put(dev_priv, POWER_DOMAIN_TRANSCODER(cpu_transcoder),
					trans_wakeref);
2259

2260
		if ((tmp & port_mask) != ddi_select)
2261
			continue;
2262

2263 2264 2265
		if ((tmp & TRANS_DDI_MODE_SELECT_MASK) ==
		    TRANS_DDI_MODE_SELECT_DP_MST)
			mst_pipe_mask |= BIT(p);
2266

2267
		*pipe_mask |= BIT(p);
2268 2269
	}

2270
	if (!*pipe_mask)
2271 2272 2273
		drm_dbg_kms(&dev_priv->drm,
			    "No pipe for [ENCODER:%d:%s] found\n",
			    encoder->base.base.id, encoder->base.name);
2274 2275

	if (!mst_pipe_mask && hweight8(*pipe_mask) > 1) {
2276 2277 2278 2279
		drm_dbg_kms(&dev_priv->drm,
			    "Multiple pipes for [ENCODER:%d:%s] (pipe_mask %02x)\n",
			    encoder->base.base.id, encoder->base.name,
			    *pipe_mask);
2280 2281 2282 2283
		*pipe_mask = BIT(ffs(*pipe_mask) - 1);
	}

	if (mst_pipe_mask && mst_pipe_mask != *pipe_mask)
2284 2285 2286 2287
		drm_dbg_kms(&dev_priv->drm,
			    "Conflicting MST and non-MST state for [ENCODER:%d:%s] (pipe_mask %02x mst_pipe_mask %02x)\n",
			    encoder->base.base.id, encoder->base.name,
			    *pipe_mask, mst_pipe_mask);
2288 2289
	else
		*is_dp_mst = mst_pipe_mask;
2290

2291
out:
2292
	if (*pipe_mask && IS_GEN9_LP(dev_priv)) {
2293
		tmp = intel_de_read(dev_priv, BXT_PHY_CTL(port));
2294 2295
		if ((tmp & (BXT_PHY_CMNLANE_POWERDOWN_ACK |
			    BXT_PHY_LANE_POWERDOWN_ACK |
2296
			    BXT_PHY_LANE_ENABLED)) != BXT_PHY_LANE_ENABLED)
2297 2298 2299
			drm_err(&dev_priv->drm,
				"[ENCODER:%d:%s] enabled but PHY powered down? (PHY_CTL %08x)\n",
				encoder->base.base.id, encoder->base.name, tmp);
2300 2301
	}

2302
	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
2303
}
2304

2305 2306 2307 2308 2309 2310 2311 2312 2313 2314 2315 2316 2317 2318
bool intel_ddi_get_hw_state(struct intel_encoder *encoder,
			    enum pipe *pipe)
{
	u8 pipe_mask;
	bool is_mst;

	intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst);

	if (is_mst || !pipe_mask)
		return false;

	*pipe = ffs(pipe_mask) - 1;

	return true;
2319 2320
}

2321
static enum intel_display_power_domain
I
Imre Deak 已提交
2322
intel_ddi_main_link_aux_domain(struct intel_digital_port *dig_port)
2323
{
2324
	/* CNL+ HW requires corresponding AUX IOs to be powered up for PSR with
2325 2326 2327 2328 2329 2330 2331 2332 2333 2334 2335
	 * DC states enabled at the same time, while for driver initiated AUX
	 * transfers we need the same AUX IOs to be powered but with DC states
	 * disabled. Accordingly use the AUX power domain here which leaves DC
	 * states enabled.
	 * However, for non-A AUX ports the corresponding non-EDP transcoders
	 * would have already enabled power well 2 and DC_OFF. This means we can
	 * acquire a wider POWER_DOMAIN_AUX_{B,C,D,F} reference instead of a
	 * specific AUX_IO reference without powering up any extra wells.
	 * Note that PSR is enabled only on Port A even though this function
	 * returns the correct domain for other ports too.
	 */
2336
	return dig_port->aux_ch == AUX_CH_A ? POWER_DOMAIN_AUX_IO_A :
2337
					      intel_aux_power_domain(dig_port);
2338 2339
}

2340 2341
static void intel_ddi_get_power_domains(struct intel_encoder *encoder,
					struct intel_crtc_state *crtc_state)
2342
{
2343
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2344
	struct intel_digital_port *dig_port;
2345
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2346

2347 2348
	/*
	 * TODO: Add support for MST encoders. Atm, the following should never
2349 2350
	 * happen since fake-MST encoders don't set their get_power_domains()
	 * hook.
2351
	 */
2352 2353
	if (drm_WARN_ON(&dev_priv->drm,
			intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)))
2354
		return;
2355

2356
	dig_port = enc_to_dig_port(encoder);
2357 2358

	if (!intel_phy_is_tc(dev_priv, phy) ||
2359 2360 2361 2362 2363
	    dig_port->tc_mode != TC_PORT_TBT_ALT) {
		drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref);
		dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv,
								   dig_port->ddi_io_power_domain);
	}
2364

2365 2366 2367 2368 2369
	/*
	 * AUX power is only needed for (e)DP mode, and for HDMI mode on TC
	 * ports.
	 */
	if (intel_crtc_has_dp_encoder(crtc_state) ||
2370 2371 2372 2373 2374 2375
	    intel_phy_is_tc(dev_priv, phy)) {
		drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref);
		dig_port->aux_wakeref =
			intel_display_power_get(dev_priv,
						intel_ddi_main_link_aux_domain(dig_port));
	}
2376 2377
}

2378 2379
void intel_ddi_enable_pipe_clock(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
2380
{
2381
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2382
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2383
	enum port port = encoder->port;
2384
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2385

2386 2387
	if (cpu_transcoder != TRANSCODER_EDP) {
		if (INTEL_GEN(dev_priv) >= 12)
2388 2389 2390
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TGL_TRANS_CLK_SEL_PORT(port));
2391
		else
2392 2393 2394
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TRANS_CLK_SEL_PORT(port));
2395
	}
2396 2397
}

2398
void intel_ddi_disable_pipe_clock(const struct intel_crtc_state *crtc_state)
2399
{
2400
	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2401
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2402

2403 2404
	if (cpu_transcoder != TRANSCODER_EDP) {
		if (INTEL_GEN(dev_priv) >= 12)
2405 2406 2407
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TGL_TRANS_CLK_SEL_DISABLED);
2408
		else
2409 2410 2411
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TRANS_CLK_SEL_DISABLED);
2412
	}
2413 2414
}

2415
static void _skl_ddi_set_iboost(struct drm_i915_private *dev_priv,
2416
				enum port port, u8 iboost)
2417
{
2418 2419
	u32 tmp;

2420
	tmp = intel_de_read(dev_priv, DISPIO_CR_TX_BMU_CR0);
2421 2422 2423 2424 2425
	tmp &= ~(BALANCE_LEG_MASK(port) | BALANCE_LEG_DISABLE(port));
	if (iboost)
		tmp |= iboost << BALANCE_LEG_SHIFT(port);
	else
		tmp |= BALANCE_LEG_DISABLE(port);
2426
	intel_de_write(dev_priv, DISPIO_CR_TX_BMU_CR0, tmp);
2427 2428
}

2429
static void skl_ddi_set_iboost(struct intel_encoder *encoder,
2430 2431
			       const struct intel_crtc_state *crtc_state,
			       int level)
2432
{
2433
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2434
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2435
	u8 iboost;
2436

2437
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2438
		iboost = intel_bios_hdmi_boost_level(encoder);
2439
	else
2440
		iboost = intel_bios_dp_boost_level(encoder);
2441

2442 2443 2444 2445
	if (iboost == 0) {
		const struct ddi_buf_trans *ddi_translations;
		int n_entries;

2446
		if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2447
			ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
2448 2449
		else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
			ddi_translations = intel_ddi_get_buf_trans_edp(encoder, &n_entries);
2450
		else
2451
			ddi_translations = intel_ddi_get_buf_trans_dp(encoder, &n_entries);
2452

2453
		if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
2454
			return;
2455
		if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
2456 2457
			level = n_entries - 1;

2458
		iboost = ddi_translations[level].i_boost;
2459 2460 2461 2462
	}

	/* Make sure that the requested I_boost is valid */
	if (iboost && iboost != 0x1 && iboost != 0x3 && iboost != 0x7) {
2463
		drm_err(&dev_priv->drm, "Invalid I_boost value %u\n", iboost);
2464 2465 2466
		return;
	}

2467
	_skl_ddi_set_iboost(dev_priv, encoder->port, iboost);
2468

2469
	if (encoder->port == PORT_A && dig_port->max_lanes == 4)
2470
		_skl_ddi_set_iboost(dev_priv, PORT_E, iboost);
2471 2472
}

2473
static void bxt_ddi_vswing_sequence(struct intel_encoder *encoder,
2474 2475
				    const struct intel_crtc_state *crtc_state,
				    int level)
2476
{
2477
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2478
	const struct bxt_ddi_buf_trans *ddi_translations;
2479
	enum port port = encoder->port;
2480
	int n_entries;
2481

2482
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2483
		ddi_translations = bxt_get_buf_trans_hdmi(encoder, &n_entries);
2484
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2485
		ddi_translations = bxt_get_buf_trans_edp(encoder, &n_entries);
2486
	else
2487
		ddi_translations = bxt_get_buf_trans_dp(encoder, &n_entries);
2488

2489
	if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
2490
		return;
2491
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
2492 2493
		level = n_entries - 1;

2494 2495 2496 2497 2498
	bxt_ddi_phy_set_signal_level(dev_priv, port,
				     ddi_translations[level].margin,
				     ddi_translations[level].scale,
				     ddi_translations[level].enable,
				     ddi_translations[level].deemphasis);
2499 2500
}

2501 2502
static u8 intel_ddi_dp_voltage_max(struct intel_dp *intel_dp,
				   const struct intel_crtc_state *crtc_state)
2503
{
2504
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
2505
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2506
	enum port port = encoder->port;
2507
	enum phy phy = intel_port_to_phy(dev_priv, port);
2508 2509
	int n_entries;

2510 2511
	if (INTEL_GEN(dev_priv) >= 12) {
		if (intel_phy_is_combo(dev_priv, phy))
2512
			tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2513
		else
2514
			tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries);
2515
	} else if (INTEL_GEN(dev_priv) == 11) {
2516 2517 2518
		if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE))
			jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
		else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE))
2519
			ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2520
		else if (intel_phy_is_combo(dev_priv, phy))
2521
			icl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2522
		else
2523
			icl_get_mg_buf_trans(encoder, crtc_state, &n_entries);
2524
	} else if (IS_CANNONLAKE(dev_priv)) {
2525
		if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2526
			cnl_get_buf_trans_edp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2527
		else
2528
			cnl_get_buf_trans_dp(encoder, &n_entries);
2529
	} else if (IS_GEN9_LP(dev_priv)) {
2530
		if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2531
			bxt_get_buf_trans_edp(encoder, &n_entries);
2532
		else
2533
			bxt_get_buf_trans_dp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2534
	} else {
2535
		if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2536
			intel_ddi_get_buf_trans_edp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2537
		else
2538
			intel_ddi_get_buf_trans_dp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2539
	}
2540

2541
	if (drm_WARN_ON(&dev_priv->drm, n_entries < 1))
2542
		n_entries = 1;
2543 2544
	if (drm_WARN_ON(&dev_priv->drm,
			n_entries > ARRAY_SIZE(index_to_dp_signal_levels)))
2545 2546 2547 2548 2549 2550
		n_entries = ARRAY_SIZE(index_to_dp_signal_levels);

	return index_to_dp_signal_levels[n_entries - 1] &
		DP_TRAIN_VOLTAGE_SWING_MASK;
}

2551 2552 2553 2554 2555
/*
 * We assume that the full set of pre-emphasis values can be
 * used on all DDI platforms. Should that change we need to
 * rethink this code.
 */
2556
static u8 intel_ddi_dp_preemph_max(struct intel_dp *intel_dp)
2557
{
2558
	return DP_TRAIN_PRE_EMPH_LEVEL_3;
2559 2560
}

2561
static void cnl_ddi_vswing_program(struct intel_encoder *encoder,
2562 2563
				   const struct intel_crtc_state *crtc_state,
				   int level)
2564
{
2565 2566
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	const struct cnl_ddi_buf_trans *ddi_translations;
2567
	enum port port = encoder->port;
2568 2569
	int n_entries, ln;
	u32 val;
2570

2571
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2572
		ddi_translations = cnl_get_buf_trans_hdmi(encoder, &n_entries);
2573
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
2574
		ddi_translations = cnl_get_buf_trans_edp(encoder, &n_entries);
2575
	else
2576
		ddi_translations = cnl_get_buf_trans_dp(encoder, &n_entries);
2577

2578
	if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
2579
		return;
2580
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
2581 2582 2583
		level = n_entries - 1;

	/* Set PORT_TX_DW5 Scaling Mode Sel to 010b. */
2584
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2585
	val &= ~SCALING_MODE_SEL_MASK;
2586
	val |= SCALING_MODE_SEL(2);
2587
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2588 2589

	/* Program PORT_TX_DW2 */
2590
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW2_LN0(port));
2591 2592
	val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
		 RCOMP_SCALAR_MASK);
2593 2594 2595 2596
	val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel);
	val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel);
	/* Rcomp scalar is fixed as 0x98 for every table entry */
	val |= RCOMP_SCALAR(0x98);
2597
	intel_de_write(dev_priv, CNL_PORT_TX_DW2_GRP(port), val);
2598

2599
	/* Program PORT_TX_DW4 */
2600 2601
	/* We cannot write to GRP. It would overrite individual loadgen */
	for (ln = 0; ln < 4; ln++) {
2602
		val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port));
2603 2604
		val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
			 CURSOR_COEFF_MASK);
2605 2606 2607
		val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1);
		val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2);
		val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff);
2608
		intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val);
2609 2610
	}

2611
	/* Program PORT_TX_DW5 */
2612
	/* All DW5 values are fixed for every table entry */
2613
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2614
	val &= ~RTERM_SELECT_MASK;
2615 2616
	val |= RTERM_SELECT(6);
	val |= TAP3_DISABLE;
2617
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2618

2619
	/* Program PORT_TX_DW7 */
2620
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW7_LN0(port));
2621
	val &= ~N_SCALAR_MASK;
2622
	val |= N_SCALAR(ddi_translations[level].dw7_n_scalar);
2623
	intel_de_write(dev_priv, CNL_PORT_TX_DW7_GRP(port), val);
2624 2625
}

2626
static void cnl_ddi_vswing_sequence(struct intel_encoder *encoder,
2627 2628
				    const struct intel_crtc_state *crtc_state,
				    int level)
2629
{
2630
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2631
	enum port port = encoder->port;
2632
	int width, rate, ln;
2633
	u32 val;
2634

2635 2636
	width = crtc_state->lane_count;
	rate = crtc_state->port_clock;
2637 2638 2639 2640 2641 2642

	/*
	 * 1. If port type is eDP or DP,
	 * set PORT_PCS_DW1 cmnkeeper_enable to 1b,
	 * else clear to 0b.
	 */
2643
	val = intel_de_read(dev_priv, CNL_PORT_PCS_DW1_LN0(port));
2644
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2645
		val &= ~COMMON_KEEPER_EN;
2646 2647
	else
		val |= COMMON_KEEPER_EN;
2648
	intel_de_write(dev_priv, CNL_PORT_PCS_DW1_GRP(port), val);
2649 2650 2651

	/* 2. Program loadgen select */
	/*
2652 2653 2654 2655
	 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes
	 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1)
	 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0)
	 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0)
2656
	 */
2657
	for (ln = 0; ln <= 3; ln++) {
2658
		val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port));
2659 2660
		val &= ~LOADGEN_SELECT;

2661 2662
		if ((rate <= 600000 && width == 4 && ln >= 1)  ||
		    (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) {
2663 2664
			val |= LOADGEN_SELECT;
		}
2665
		intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val);
2666
	}
2667 2668

	/* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */
2669
	val = intel_de_read(dev_priv, CNL_PORT_CL1CM_DW5);
2670
	val |= SUS_CLOCK_CONFIG;
2671
	intel_de_write(dev_priv, CNL_PORT_CL1CM_DW5, val);
2672 2673

	/* 4. Clear training enable to change swing values */
2674
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2675
	val &= ~TX_TRAINING_EN;
2676
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2677 2678

	/* 5. Program swing and de-emphasis */
2679
	cnl_ddi_vswing_program(encoder, crtc_state, level);
2680 2681

	/* 6. Set training enable to trigger update */
2682
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2683
	val |= TX_TRAINING_EN;
2684
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2685 2686
}

2687
static void icl_ddi_combo_vswing_program(struct intel_encoder *encoder,
2688 2689
					 const struct intel_crtc_state *crtc_state,
					 int level)
2690
{
2691
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2692
	const struct cnl_ddi_buf_trans *ddi_translations;
2693
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2694 2695
	int n_entries, ln;
	u32 val;
2696

2697
	if (INTEL_GEN(dev_priv) >= 12)
2698
		ddi_translations = tgl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2699 2700 2701
	else if (IS_PLATFORM(dev_priv, INTEL_JASPERLAKE))
		ddi_translations = jsl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
	else if (IS_PLATFORM(dev_priv, INTEL_ELKHARTLAKE))
2702
		ddi_translations = ehl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2703
	else
2704
		ddi_translations = icl_get_combo_buf_trans(encoder, crtc_state, &n_entries);
2705 2706 2707 2708
	if (!ddi_translations)
		return;

	if (level >= n_entries) {
2709 2710 2711
		drm_dbg_kms(&dev_priv->drm,
			    "DDI translation not found for level %d. Using %d instead.",
			    level, n_entries - 1);
2712 2713 2714
		level = n_entries - 1;
	}

2715
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP)) {
2716 2717 2718 2719 2720 2721 2722 2723
		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);

		val = EDP4K2K_MODE_OVRD_EN | EDP4K2K_MODE_OVRD_OPTIMIZED;
		intel_dp->hobl_active = is_hobl_buf_trans(ddi_translations);
		intel_de_rmw(dev_priv, ICL_PORT_CL_DW10(phy), val,
			     intel_dp->hobl_active ? val : 0);
	}

2724
	/* Set PORT_TX_DW5 */
2725
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy));
2726 2727 2728
	val &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK |
		  TAP2_DISABLE | TAP3_DISABLE);
	val |= SCALING_MODE_SEL(0x2);
2729
	val |= RTERM_SELECT(0x6);
2730
	val |= TAP3_DISABLE;
2731
	intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val);
2732 2733

	/* Program PORT_TX_DW2 */
2734
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN0(phy));
2735 2736
	val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
		 RCOMP_SCALAR_MASK);
2737 2738
	val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel);
	val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel);
2739
	/* Program Rcomp scalar for every table entry */
2740
	val |= RCOMP_SCALAR(0x98);
2741
	intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), val);
2742 2743 2744 2745

	/* Program PORT_TX_DW4 */
	/* We cannot write to GRP. It would overwrite individual loadgen. */
	for (ln = 0; ln <= 3; ln++) {
2746
		val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy));
2747 2748
		val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
			 CURSOR_COEFF_MASK);
2749 2750 2751
		val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1);
		val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2);
		val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff);
2752
		intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val);
2753
	}
2754 2755

	/* Program PORT_TX_DW7 */
2756
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW7_LN0(phy));
2757 2758
	val &= ~N_SCALAR_MASK;
	val |= N_SCALAR(ddi_translations[level].dw7_n_scalar);
2759
	intel_de_write(dev_priv, ICL_PORT_TX_DW7_GRP(phy), val);
2760 2761 2762
}

static void icl_combo_phy_ddi_vswing_sequence(struct intel_encoder *encoder,
2763 2764
					      const struct intel_crtc_state *crtc_state,
					      int level)
2765 2766
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2767
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2768
	int width, rate, ln;
2769 2770
	u32 val;

2771 2772
	width = crtc_state->lane_count;
	rate = crtc_state->port_clock;
2773 2774 2775 2776 2777 2778

	/*
	 * 1. If port type is eDP or DP,
	 * set PORT_PCS_DW1 cmnkeeper_enable to 1b,
	 * else clear to 0b.
	 */
2779
	val = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN0(phy));
2780
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
2781 2782 2783
		val &= ~COMMON_KEEPER_EN;
	else
		val |= COMMON_KEEPER_EN;
2784
	intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), val);
2785 2786 2787 2788 2789 2790 2791 2792 2793

	/* 2. Program loadgen select */
	/*
	 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes
	 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1)
	 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0)
	 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0)
	 */
	for (ln = 0; ln <= 3; ln++) {
2794
		val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy));
2795 2796 2797 2798 2799 2800
		val &= ~LOADGEN_SELECT;

		if ((rate <= 600000 && width == 4 && ln >= 1) ||
		    (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) {
			val |= LOADGEN_SELECT;
		}
2801
		intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val);
2802 2803 2804
	}

	/* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */
2805
	val = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy));
2806
	val |= SUS_CLOCK_CONFIG;
2807
	intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), val);
2808 2809

	/* 4. Clear training enable to change swing values */
2810
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy));
2811
	val &= ~TX_TRAINING_EN;
2812
	intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val);
2813 2814

	/* 5. Program swing and de-emphasis */
2815
	icl_ddi_combo_vswing_program(encoder, crtc_state, level);
2816 2817

	/* 6. Set training enable to trigger update */
2818
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy));
2819
	val |= TX_TRAINING_EN;
2820
	intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val);
2821 2822
}

2823
static void icl_mg_phy_ddi_vswing_sequence(struct intel_encoder *encoder,
2824 2825
					   const struct intel_crtc_state *crtc_state,
					   int level)
2826 2827
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2828
	enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port);
2829
	const struct icl_mg_phy_ddi_buf_trans *ddi_translations;
2830 2831
	int n_entries, ln;
	u32 val;
2832

2833
	ddi_translations = icl_get_mg_buf_trans(encoder, crtc_state, &n_entries);
2834
	if (level >= n_entries) {
2835 2836
		drm_dbg_kms(&dev_priv->drm,
			    "DDI translation not found for level %d. Using %d instead.",
2837 2838
			    level, n_entries - 1);
		level = n_entries - 1;
2839 2840 2841 2842
	}

	/* Set MG_TX_LINK_PARAMS cri_use_fs32 to 0. */
	for (ln = 0; ln < 2; ln++) {
2843
		val = intel_de_read(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port));
2844
		val &= ~CRI_USE_FS32;
2845
		intel_de_write(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port), val);
2846

2847
		val = intel_de_read(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port));
2848
		val &= ~CRI_USE_FS32;
2849
		intel_de_write(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port), val);
2850 2851 2852 2853
	}

	/* Program MG_TX_SWINGCTRL with values from vswing table */
	for (ln = 0; ln < 2; ln++) {
2854
		val = intel_de_read(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port));
2855 2856 2857
		val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK;
		val |= CRI_TXDEEMPH_OVERRIDE_17_12(
			ddi_translations[level].cri_txdeemph_override_17_12);
2858
		intel_de_write(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port), val);
2859

2860
		val = intel_de_read(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port));
2861 2862 2863
		val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK;
		val |= CRI_TXDEEMPH_OVERRIDE_17_12(
			ddi_translations[level].cri_txdeemph_override_17_12);
2864
		intel_de_write(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port), val);
2865 2866 2867 2868
	}

	/* Program MG_TX_DRVCTRL with values from vswing table */
	for (ln = 0; ln < 2; ln++) {
2869
		val = intel_de_read(dev_priv, MG_TX1_DRVCTRL(ln, tc_port));
2870 2871 2872 2873 2874 2875 2876
		val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK |
			 CRI_TXDEEMPH_OVERRIDE_5_0_MASK);
		val |= CRI_TXDEEMPH_OVERRIDE_5_0(
			ddi_translations[level].cri_txdeemph_override_5_0) |
			CRI_TXDEEMPH_OVERRIDE_11_6(
				ddi_translations[level].cri_txdeemph_override_11_6) |
			CRI_TXDEEMPH_OVERRIDE_EN;
2877
		intel_de_write(dev_priv, MG_TX1_DRVCTRL(ln, tc_port), val);
2878

2879
		val = intel_de_read(dev_priv, MG_TX2_DRVCTRL(ln, tc_port));
2880 2881 2882 2883 2884 2885 2886
		val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK |
			 CRI_TXDEEMPH_OVERRIDE_5_0_MASK);
		val |= CRI_TXDEEMPH_OVERRIDE_5_0(
			ddi_translations[level].cri_txdeemph_override_5_0) |
			CRI_TXDEEMPH_OVERRIDE_11_6(
				ddi_translations[level].cri_txdeemph_override_11_6) |
			CRI_TXDEEMPH_OVERRIDE_EN;
2887
		intel_de_write(dev_priv, MG_TX2_DRVCTRL(ln, tc_port), val);
2888 2889 2890 2891 2892 2893 2894 2895 2896 2897

		/* FIXME: Program CRI_LOADGEN_SEL after the spec is updated */
	}

	/*
	 * Program MG_CLKHUB<LN, port being used> with value from frequency table
	 * In case of Legacy mode on MG PHY, both TX1 and TX2 enabled so use the
	 * values from table for which TX1 and TX2 enabled.
	 */
	for (ln = 0; ln < 2; ln++) {
2898
		val = intel_de_read(dev_priv, MG_CLKHUB(ln, tc_port));
2899
		if (crtc_state->port_clock < 300000)
2900 2901 2902
			val |= CFG_LOW_RATE_LKREN_EN;
		else
			val &= ~CFG_LOW_RATE_LKREN_EN;
2903
		intel_de_write(dev_priv, MG_CLKHUB(ln, tc_port), val);
2904 2905 2906 2907
	}

	/* Program the MG_TX_DCC<LN, port being used> based on the link frequency */
	for (ln = 0; ln < 2; ln++) {
2908
		val = intel_de_read(dev_priv, MG_TX1_DCC(ln, tc_port));
2909
		val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK;
2910
		if (crtc_state->port_clock <= 500000) {
2911 2912 2913 2914 2915
			val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN;
		} else {
			val |= CFG_AMI_CK_DIV_OVERRIDE_EN |
				CFG_AMI_CK_DIV_OVERRIDE_VAL(1);
		}
2916
		intel_de_write(dev_priv, MG_TX1_DCC(ln, tc_port), val);
2917

2918
		val = intel_de_read(dev_priv, MG_TX2_DCC(ln, tc_port));
2919
		val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK;
2920
		if (crtc_state->port_clock <= 500000) {
2921 2922 2923 2924 2925
			val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN;
		} else {
			val |= CFG_AMI_CK_DIV_OVERRIDE_EN |
				CFG_AMI_CK_DIV_OVERRIDE_VAL(1);
		}
2926
		intel_de_write(dev_priv, MG_TX2_DCC(ln, tc_port), val);
2927 2928 2929 2930
	}

	/* Program MG_TX_PISO_READLOAD with values from vswing table */
	for (ln = 0; ln < 2; ln++) {
2931 2932
		val = intel_de_read(dev_priv,
				    MG_TX1_PISO_READLOAD(ln, tc_port));
2933
		val |= CRI_CALCINIT;
2934 2935
		intel_de_write(dev_priv, MG_TX1_PISO_READLOAD(ln, tc_port),
			       val);
2936

2937 2938
		val = intel_de_read(dev_priv,
				    MG_TX2_PISO_READLOAD(ln, tc_port));
2939
		val |= CRI_CALCINIT;
2940 2941
		intel_de_write(dev_priv, MG_TX2_PISO_READLOAD(ln, tc_port),
			       val);
2942 2943 2944 2945
	}
}

static void icl_ddi_vswing_sequence(struct intel_encoder *encoder,
2946 2947
				    const struct intel_crtc_state *crtc_state,
				    int level)
2948
{
2949
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2950
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2951

2952
	if (intel_phy_is_combo(dev_priv, phy))
2953
		icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level);
2954
	else
2955
		icl_mg_phy_ddi_vswing_sequence(encoder, crtc_state, level);
2956 2957
}

2958
static void
2959 2960 2961
tgl_dkl_phy_ddi_vswing_sequence(struct intel_encoder *encoder,
				const struct intel_crtc_state *crtc_state,
				int level)
2962 2963 2964 2965
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port);
	const struct tgl_dkl_phy_ddi_buf_trans *ddi_translations;
2966 2967
	u32 val, dpcnt_mask, dpcnt_val;
	int n_entries, ln;
2968

2969
	ddi_translations = tgl_get_dkl_buf_trans(encoder, crtc_state, &n_entries);
2970

2971 2972 2973 2974 2975 2976 2977 2978 2979 2980 2981
	if (level >= n_entries)
		level = n_entries - 1;

	dpcnt_mask = (DKL_TX_PRESHOOT_COEFF_MASK |
		      DKL_TX_DE_EMPAHSIS_COEFF_MASK |
		      DKL_TX_VSWING_CONTROL_MASK);
	dpcnt_val = DKL_TX_VSWING_CONTROL(ddi_translations[level].dkl_vswing_control);
	dpcnt_val |= DKL_TX_DE_EMPHASIS_COEFF(ddi_translations[level].dkl_de_emphasis_control);
	dpcnt_val |= DKL_TX_PRESHOOT_COEFF(ddi_translations[level].dkl_preshoot_control);

	for (ln = 0; ln < 2; ln++) {
2982 2983
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, ln));
2984

2985
		intel_de_write(dev_priv, DKL_TX_PMD_LANE_SUS(tc_port), 0);
2986

2987
		/* All the registers are RMW */
2988
		val = intel_de_read(dev_priv, DKL_TX_DPCNTL0(tc_port));
2989 2990
		val &= ~dpcnt_mask;
		val |= dpcnt_val;
2991
		intel_de_write(dev_priv, DKL_TX_DPCNTL0(tc_port), val);
2992

2993
		val = intel_de_read(dev_priv, DKL_TX_DPCNTL1(tc_port));
2994 2995
		val &= ~dpcnt_mask;
		val |= dpcnt_val;
2996
		intel_de_write(dev_priv, DKL_TX_DPCNTL1(tc_port), val);
2997

2998
		val = intel_de_read(dev_priv, DKL_TX_DPCNTL2(tc_port));
2999
		val &= ~DKL_TX_DP20BITMODE;
3000
		intel_de_write(dev_priv, DKL_TX_DPCNTL2(tc_port), val);
3001 3002 3003 3004
	}
}

static void tgl_ddi_vswing_sequence(struct intel_encoder *encoder,
3005 3006
				    const struct intel_crtc_state *crtc_state,
				    int level)
3007 3008 3009 3010 3011
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);

	if (intel_phy_is_combo(dev_priv, phy))
3012
		icl_combo_phy_ddi_vswing_sequence(encoder, crtc_state, level);
3013
	else
3014
		tgl_dkl_phy_ddi_vswing_sequence(encoder, crtc_state, level);
3015 3016
}

3017 3018
static int translate_signal_level(struct intel_dp *intel_dp,
				  u8 signal_levels)
3019
{
3020
	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
3021
	int i;
3022

3023 3024 3025
	for (i = 0; i < ARRAY_SIZE(index_to_dp_signal_levels); i++) {
		if (index_to_dp_signal_levels[i] == signal_levels)
			return i;
3026 3027
	}

3028 3029 3030
	drm_WARN(&i915->drm, 1,
		 "Unsupported voltage swing/pre-emphasis level: 0x%x\n",
		 signal_levels);
3031 3032

	return 0;
3033 3034
}

3035
static int intel_ddi_dp_level(struct intel_dp *intel_dp)
3036
{
3037
	u8 train_set = intel_dp->train_set[0];
3038 3039
	u8 signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
					DP_TRAIN_PRE_EMPHASIS_MASK);
3040

3041
	return translate_signal_level(intel_dp, signal_levels);
3042 3043
}

3044
static void
3045 3046
tgl_set_signal_levels(struct intel_dp *intel_dp,
		      const struct intel_crtc_state *crtc_state)
3047
{
3048
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
3049
	int level = intel_ddi_dp_level(intel_dp);
3050

3051
	tgl_ddi_vswing_sequence(encoder, crtc_state, level);
3052
}
3053

3054
static void
3055 3056
icl_set_signal_levels(struct intel_dp *intel_dp,
		      const struct intel_crtc_state *crtc_state)
3057 3058 3059 3060
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	int level = intel_ddi_dp_level(intel_dp);

3061
	icl_ddi_vswing_sequence(encoder, crtc_state, level);
3062 3063
}

3064
static void
3065 3066
cnl_set_signal_levels(struct intel_dp *intel_dp,
		      const struct intel_crtc_state *crtc_state)
3067
{
3068
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
3069
	int level = intel_ddi_dp_level(intel_dp);
3070

3071
	cnl_ddi_vswing_sequence(encoder, crtc_state, level);
3072 3073 3074
}

static void
3075 3076
bxt_set_signal_levels(struct intel_dp *intel_dp,
		      const struct intel_crtc_state *crtc_state)
3077 3078 3079 3080
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	int level = intel_ddi_dp_level(intel_dp);

3081
	bxt_ddi_vswing_sequence(encoder, crtc_state, level);
3082 3083 3084
}

static void
3085 3086
hsw_set_signal_levels(struct intel_dp *intel_dp,
		      const struct intel_crtc_state *crtc_state)
3087 3088 3089 3090 3091 3092 3093 3094 3095 3096 3097 3098 3099 3100 3101
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	int level = intel_ddi_dp_level(intel_dp);
	enum port port = encoder->port;
	u32 signal_levels;

	signal_levels = DDI_BUF_TRANS_SELECT(level);

	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
		    signal_levels);

	intel_dp->DP &= ~DDI_BUF_EMP_MASK;
	intel_dp->DP |= signal_levels;

3102
	if (IS_GEN9_BC(dev_priv))
3103
		skl_ddi_set_iboost(encoder, crtc_state, level);
3104

3105 3106
	intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP);
	intel_de_posting_read(dev_priv, DDI_BUF_CTL(port));
3107 3108
}

3109 3110
static u32 icl_dpclka_cfgcr0_clk_off(struct drm_i915_private *dev_priv,
				     enum phy phy)
3111
{
3112 3113 3114
	if (IS_ROCKETLAKE(dev_priv)) {
		return RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
	} else if (intel_phy_is_combo(dev_priv, phy)) {
3115 3116 3117 3118
		return ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
	} else if (intel_phy_is_tc(dev_priv, phy)) {
		enum tc_port tc_port = intel_port_to_tc(dev_priv,
							(enum port)phy);
3119 3120 3121 3122 3123 3124 3125

		return ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port);
	}

	return 0;
}

3126 3127 3128 3129 3130 3131 3132 3133 3134 3135 3136 3137 3138 3139 3140 3141 3142 3143 3144 3145 3146 3147 3148 3149 3150 3151 3152 3153 3154 3155 3156 3157 3158 3159
static void dg1_map_plls_to_ports(struct intel_encoder *encoder,
				  const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	struct intel_shared_dpll *pll = crtc_state->shared_dpll;
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
	u32 val;

	/*
	 * If we fail this, something went very wrong: first 2 PLLs should be
	 * used by first 2 phys and last 2 PLLs by last phys
	 */
	if (drm_WARN_ON(&dev_priv->drm,
			(pll->info->id < DPLL_ID_DG1_DPLL2 && phy >= PHY_C) ||
			(pll->info->id >= DPLL_ID_DG1_DPLL2 && phy < PHY_C)))
		return;

	mutex_lock(&dev_priv->dpll.lock);

	val = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy));
	drm_WARN_ON(&dev_priv->drm,
		    (val & DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy)) == 0);

	val &= ~DG1_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
	val |= DG1_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
	intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val);
	intel_de_posting_read(dev_priv, DG1_DPCLKA_CFGCR0(phy));

	val &= ~DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
	intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val);

	mutex_unlock(&dev_priv->dpll.lock);
}

3160 3161
static void icl_map_plls_to_ports(struct intel_encoder *encoder,
				  const struct intel_crtc_state *crtc_state)
3162
{
3163
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3164
	struct intel_shared_dpll *pll = crtc_state->shared_dpll;
3165
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3166
	u32 val;
3167

3168
	mutex_lock(&dev_priv->dpll.lock);
3169

3170
	val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
3171 3172
	drm_WARN_ON(&dev_priv->drm,
		    (val & icl_dpclka_cfgcr0_clk_off(dev_priv, phy)) == 0);
3173

3174
	if (intel_phy_is_combo(dev_priv, phy)) {
3175 3176 3177 3178 3179 3180 3181 3182 3183 3184
		u32 mask, sel;

		if (IS_ROCKETLAKE(dev_priv)) {
			mask = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
			sel = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
		} else {
			mask = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
			sel = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
		}

3185 3186 3187 3188 3189 3190 3191 3192 3193 3194
		/*
		 * Even though this register references DDIs, note that we
		 * want to pass the PHY rather than the port (DDI).  For
		 * ICL, port=phy in all cases so it doesn't matter, but for
		 * EHL the bspec notes the following:
		 *
		 *   "DDID clock tied to DDIA clock, so DPCLKA_CFGCR0 DDIA
		 *   Clock Select chooses the PLL for both DDIA and DDID and
		 *   drives port A in all cases."
		 */
3195 3196
		val &= ~mask;
		val |= sel;
3197 3198
		intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
		intel_de_posting_read(dev_priv, ICL_DPCLKA_CFGCR0);
3199
	}
3200

3201
	val &= ~icl_dpclka_cfgcr0_clk_off(dev_priv, phy);
3202
	intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
3203

3204
	mutex_unlock(&dev_priv->dpll.lock);
3205 3206
}

3207 3208 3209 3210 3211 3212 3213 3214 3215 3216 3217 3218 3219
static void dg1_unmap_plls_to_ports(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);

	mutex_lock(&dev_priv->dpll.lock);

	intel_de_rmw(dev_priv, DG1_DPCLKA_CFGCR0(phy), 0,
		     DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy));

	mutex_unlock(&dev_priv->dpll.lock);
}

3220
static void icl_unmap_plls_to_ports(struct intel_encoder *encoder)
3221
{
3222
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3223
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3224
	u32 val;
3225

3226
	mutex_lock(&dev_priv->dpll.lock);
3227

3228
	val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
3229
	val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy);
3230
	intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
3231

3232
	mutex_unlock(&dev_priv->dpll.lock);
3233 3234
}

3235 3236 3237 3238 3239 3240 3241 3242 3243 3244 3245 3246 3247 3248 3249 3250 3251 3252 3253 3254 3255 3256 3257 3258 3259 3260 3261 3262 3263 3264 3265
static void dg1_sanitize_port_clk_off(struct drm_i915_private *dev_priv,
				      u32 port_mask, bool ddi_clk_needed)
{
	enum port port;
	u32 val;

	for_each_port_masked(port, port_mask) {
		enum phy phy = intel_port_to_phy(dev_priv, port);
		bool ddi_clk_off;

		val = intel_de_read(dev_priv, DG1_DPCLKA_CFGCR0(phy));
		ddi_clk_off = val & DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);

		if (ddi_clk_needed == !ddi_clk_off)
			continue;

		/*
		 * Punt on the case now where clock is gated, but it would
		 * be needed by the port. Something else is really broken then.
		 */
		if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed))
			continue;

		drm_notice(&dev_priv->drm,
			   "PHY %c is disabled with an ungated DDI clock, gate it\n",
			   phy_name(phy));
		val |= DG1_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
		intel_de_write(dev_priv, DG1_DPCLKA_CFGCR0(phy), val);
	}
}

3266 3267 3268 3269 3270 3271
static void icl_sanitize_port_clk_off(struct drm_i915_private *dev_priv,
				      u32 port_mask, bool ddi_clk_needed)
{
	enum port port;
	u32 val;

3272
	val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
3273 3274
	for_each_port_masked(port, port_mask) {
		enum phy phy = intel_port_to_phy(dev_priv, port);
3275 3276
		bool ddi_clk_off = val & icl_dpclka_cfgcr0_clk_off(dev_priv,
								   phy);
3277

3278
		if (ddi_clk_needed == !ddi_clk_off)
3279 3280 3281 3282 3283 3284
			continue;

		/*
		 * Punt on the case now where clock is gated, but it would
		 * be needed by the port. Something else is really broken then.
		 */
3285
		if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed))
3286 3287
			continue;

3288 3289 3290
		drm_notice(&dev_priv->drm,
			   "PHY %c is disabled/in DSI mode with an ungated DDI clock, gate it\n",
			   phy_name(phy));
3291
		val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy);
3292
		intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
3293 3294 3295
	}
}

3296 3297 3298
void icl_sanitize_encoder_pll_mapping(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3299 3300
	u32 port_mask;
	bool ddi_clk_needed;
3301 3302 3303 3304 3305 3306 3307 3308 3309 3310 3311 3312 3313 3314 3315 3316 3317

	/*
	 * In case of DP MST, we sanitize the primary encoder only, not the
	 * virtual ones.
	 */
	if (encoder->type == INTEL_OUTPUT_DP_MST)
		return;

	if (!encoder->base.crtc && intel_encoder_is_dp(encoder)) {
		u8 pipe_mask;
		bool is_mst;

		intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst);
		/*
		 * In the unlikely case that BIOS enables DP in MST mode, just
		 * warn since our MST HW readout is incomplete.
		 */
3318
		if (drm_WARN_ON(&dev_priv->drm, is_mst))
3319 3320
			return;
	}
3321

3322 3323
	port_mask = BIT(encoder->port);
	ddi_clk_needed = encoder->base.crtc;
3324

3325 3326
	if (encoder->type == INTEL_OUTPUT_DSI) {
		struct intel_encoder *other_encoder;
3327

3328 3329 3330 3331 3332 3333 3334 3335 3336
		port_mask = intel_dsi_encoder_ports(encoder);
		/*
		 * Sanity check that we haven't incorrectly registered another
		 * encoder using any of the ports of this DSI encoder.
		 */
		for_each_intel_encoder(&dev_priv->drm, other_encoder) {
			if (other_encoder == encoder)
				continue;

3337 3338
			if (drm_WARN_ON(&dev_priv->drm,
					port_mask & BIT(other_encoder->port)))
3339 3340 3341
				return;
		}
		/*
3342 3343
		 * For DSI we keep the ddi clocks gated
		 * except during enable/disable sequence.
3344
		 */
3345
		ddi_clk_needed = false;
3346 3347
	}

3348 3349 3350 3351
	if (IS_DG1(dev_priv))
		dg1_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed);
	else
		icl_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed);
3352 3353
}

3354
static void intel_ddi_clk_select(struct intel_encoder *encoder,
3355
				 const struct intel_crtc_state *crtc_state)
3356
{
3357
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3358
	enum port port = encoder->port;
3359
	enum phy phy = intel_port_to_phy(dev_priv, port);
3360
	u32 val;
3361
	const struct intel_shared_dpll *pll = crtc_state->shared_dpll;
3362

3363
	if (drm_WARN_ON(&dev_priv->drm, !pll))
3364 3365
		return;

3366
	mutex_lock(&dev_priv->dpll.lock);
3367

3368
	if (INTEL_GEN(dev_priv) >= 11) {
3369
		if (!intel_phy_is_combo(dev_priv, phy))
3370 3371
			intel_de_write(dev_priv, DDI_CLK_SEL(port),
				       icl_pll_to_ddi_clk_sel(encoder, crtc_state));
3372
		else if (IS_JSL_EHL(dev_priv) && port >= PORT_C)
3373 3374 3375 3376
			/*
			 * MG does not exist but the programming is required
			 * to ungate DDIC and DDID
			 */
3377 3378
			intel_de_write(dev_priv, DDI_CLK_SEL(port),
				       DDI_CLK_SEL_MG);
3379
	} else if (IS_CANNONLAKE(dev_priv)) {
R
Rodrigo Vivi 已提交
3380
		/* Configure DPCLKA_CFGCR0 to map the DPLL to the DDI. */
3381
		val = intel_de_read(dev_priv, DPCLKA_CFGCR0);
3382
		val &= ~DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port);
3383
		val |= DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, port);
3384
		intel_de_write(dev_priv, DPCLKA_CFGCR0, val);
3385

R
Rodrigo Vivi 已提交
3386 3387 3388 3389 3390
		/*
		 * Configure DPCLKA_CFGCR0 to turn on the clock for the DDI.
		 * This step and the step before must be done with separate
		 * register writes.
		 */
3391
		val = intel_de_read(dev_priv, DPCLKA_CFGCR0);
R
Rodrigo Vivi 已提交
3392
		val &= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port);
3393
		intel_de_write(dev_priv, DPCLKA_CFGCR0, val);
R
Rodrigo Vivi 已提交
3394
	} else if (IS_GEN9_BC(dev_priv)) {
3395
		/* DDI -> PLL mapping  */
3396
		val = intel_de_read(dev_priv, DPLL_CTRL2);
3397 3398

		val &= ~(DPLL_CTRL2_DDI_CLK_OFF(port) |
3399
			 DPLL_CTRL2_DDI_CLK_SEL_MASK(port));
3400
		val |= (DPLL_CTRL2_DDI_CLK_SEL(pll->info->id, port) |
3401 3402
			DPLL_CTRL2_DDI_SEL_OVERRIDE(port));

3403
		intel_de_write(dev_priv, DPLL_CTRL2, val);
3404

3405
	} else if (INTEL_GEN(dev_priv) < 9) {
3406 3407
		intel_de_write(dev_priv, PORT_CLK_SEL(port),
			       hsw_pll_to_ddi_pll_sel(pll));
3408
	}
3409

3410
	mutex_unlock(&dev_priv->dpll.lock);
3411 3412
}

3413 3414 3415
static void intel_ddi_clk_disable(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3416
	enum port port = encoder->port;
3417
	enum phy phy = intel_port_to_phy(dev_priv, port);
3418

3419
	if (INTEL_GEN(dev_priv) >= 11) {
3420
		if (!intel_phy_is_combo(dev_priv, phy) ||
3421
		    (IS_JSL_EHL(dev_priv) && port >= PORT_C))
3422 3423
			intel_de_write(dev_priv, DDI_CLK_SEL(port),
				       DDI_CLK_SEL_NONE);
3424
	} else if (IS_CANNONLAKE(dev_priv)) {
3425 3426
		intel_de_write(dev_priv, DPCLKA_CFGCR0,
			       intel_de_read(dev_priv, DPCLKA_CFGCR0) | DPCLKA_CFGCR0_DDI_CLK_OFF(port));
3427
	} else if (IS_GEN9_BC(dev_priv)) {
3428 3429
		intel_de_write(dev_priv, DPLL_CTRL2,
			       intel_de_read(dev_priv, DPLL_CTRL2) | DPLL_CTRL2_DDI_CLK_OFF(port));
3430
	} else if (INTEL_GEN(dev_priv) < 9) {
3431 3432
		intel_de_write(dev_priv, PORT_CLK_SEL(port),
			       PORT_CLK_SEL_NONE);
3433
	}
3434 3435
}

3436
static void
3437
icl_program_mg_dp_mode(struct intel_digital_port *dig_port,
3438
		       const struct intel_crtc_state *crtc_state)
3439
{
3440 3441
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
	enum tc_port tc_port = intel_port_to_tc(dev_priv, dig_port->base.port);
3442 3443
	u32 ln0, ln1, pin_assignment;
	u8 width;
3444

3445
	if (dig_port->tc_mode == TC_PORT_TBT_ALT)
3446 3447
		return;

3448
	if (INTEL_GEN(dev_priv) >= 12) {
3449 3450 3451 3452 3453 3454
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x0));
		ln0 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port));
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x1));
		ln1 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port));
3455
	} else {
3456 3457
		ln0 = intel_de_read(dev_priv, MG_DP_MODE(0, tc_port));
		ln1 = intel_de_read(dev_priv, MG_DP_MODE(1, tc_port));
3458
	}
3459

3460
	ln0 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE);
3461
	ln1 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE);
3462

3463
	/* DPPATC */
3464
	pin_assignment = intel_tc_port_get_pin_assignment_mask(dig_port);
3465
	width = crtc_state->lane_count;
3466

3467 3468
	switch (pin_assignment) {
	case 0x0:
3469
		drm_WARN_ON(&dev_priv->drm,
3470
			    dig_port->tc_mode != TC_PORT_LEGACY);
3471 3472 3473 3474 3475 3476 3477 3478 3479 3480 3481 3482 3483 3484 3485 3486 3487 3488 3489 3490 3491 3492
		if (width == 1) {
			ln1 |= MG_DP_MODE_CFG_DP_X1_MODE;
		} else {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
		break;
	case 0x1:
		if (width == 4) {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
		break;
	case 0x2:
		if (width == 2) {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
		break;
	case 0x3:
	case 0x5:
		if (width == 1) {
3493 3494
			ln0 |= MG_DP_MODE_CFG_DP_X1_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X1_MODE;
3495 3496 3497
		} else {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
3498 3499
		}
		break;
3500 3501 3502 3503 3504 3505 3506 3507 3508
	case 0x4:
	case 0x6:
		if (width == 1) {
			ln0 |= MG_DP_MODE_CFG_DP_X1_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X1_MODE;
		} else {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
3509 3510
		break;
	default:
3511
		MISSING_CASE(pin_assignment);
3512 3513
	}

3514
	if (INTEL_GEN(dev_priv) >= 12) {
3515 3516 3517 3518 3519 3520
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x0));
		intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln0);
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x1));
		intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln1);
3521
	} else {
3522 3523
		intel_de_write(dev_priv, MG_DP_MODE(0, tc_port), ln0);
		intel_de_write(dev_priv, MG_DP_MODE(1, tc_port), ln1);
3524
	}
3525 3526
}

3527 3528 3529 3530 3531 3532 3533 3534 3535 3536 3537 3538 3539 3540 3541 3542 3543 3544 3545 3546 3547 3548 3549 3550 3551 3552 3553 3554 3555 3556 3557
static enum transcoder
tgl_dp_tp_transcoder(const struct intel_crtc_state *crtc_state)
{
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST))
		return crtc_state->mst_master_transcoder;
	else
		return crtc_state->cpu_transcoder;
}

i915_reg_t dp_tp_ctl_reg(struct intel_encoder *encoder,
			 const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

	if (INTEL_GEN(dev_priv) >= 12)
		return TGL_DP_TP_CTL(tgl_dp_tp_transcoder(crtc_state));
	else
		return DP_TP_CTL(encoder->port);
}

i915_reg_t dp_tp_status_reg(struct intel_encoder *encoder,
			    const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

	if (INTEL_GEN(dev_priv) >= 12)
		return TGL_DP_TP_STATUS(tgl_dp_tp_transcoder(crtc_state));
	else
		return DP_TP_STATUS(encoder->port);
}

3558 3559 3560
static void intel_dp_sink_set_fec_ready(struct intel_dp *intel_dp,
					const struct intel_crtc_state *crtc_state)
{
3561 3562
	struct drm_i915_private *i915 = dp_to_i915(intel_dp);

3563 3564 3565 3566
	if (!crtc_state->fec_enable)
		return;

	if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_FEC_CONFIGURATION, DP_FEC_READY) <= 0)
3567 3568
		drm_dbg_kms(&i915->drm,
			    "Failed to set FEC_READY in the sink\n");
3569 3570
}

3571 3572 3573 3574
static void intel_ddi_enable_fec(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3575
	struct intel_dp *intel_dp;
3576 3577 3578 3579 3580
	u32 val;

	if (!crtc_state->fec_enable)
		return;

3581
	intel_dp = enc_to_intel_dp(encoder);
3582
	val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
3583
	val |= DP_TP_CTL_FEC_ENABLE;
3584
	intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val);
3585 3586
}

A
Anusha Srivatsa 已提交
3587 3588 3589 3590
static void intel_ddi_disable_fec_state(struct intel_encoder *encoder,
					const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3591
	struct intel_dp *intel_dp;
A
Anusha Srivatsa 已提交
3592 3593 3594 3595 3596
	u32 val;

	if (!crtc_state->fec_enable)
		return;

3597
	intel_dp = enc_to_intel_dp(encoder);
3598
	val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
A
Anusha Srivatsa 已提交
3599
	val &= ~DP_TP_CTL_FEC_ENABLE;
3600 3601
	intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val);
	intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
A
Anusha Srivatsa 已提交
3602 3603
}

3604 3605
static void tgl_ddi_pre_enable_dp(struct intel_atomic_state *state,
				  struct intel_encoder *encoder,
3606 3607 3608
				  const struct intel_crtc_state *crtc_state,
				  const struct drm_connector_state *conn_state)
{
3609
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3610 3611
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3612
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3613 3614 3615
	bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST);
	int level = intel_ddi_dp_level(intel_dp);

3616 3617 3618
	intel_dp_set_link_params(intel_dp,
				 crtc_state->port_clock,
				 crtc_state->lane_count);
3619

3620 3621 3622 3623 3624 3625
	/*
	 * 1. Enable Power Wells
	 *
	 * This was handled at the beginning of intel_atomic_commit_tail(),
	 * before we called down into this function.
	 */
3626

3627
	/* 2. Enable Panel Power if PPS is required */
3628
	intel_pps_on(intel_dp);
3629 3630

	/*
3631 3632 3633 3634
	 * 3. For non-TBT Type-C ports, set FIA lane count
	 * (DFLEXDPSP.DPX4TXLATC)
	 *
	 * This was done before tgl_ddi_pre_enable_dp by
3635
	 * hsw_crtc_enable()->intel_encoders_pre_pll_enable().
3636 3637
	 */

3638 3639 3640 3641
	/*
	 * 4. Enable the port PLL.
	 *
	 * The PLL enabling itself was already done before this function by
3642
	 * hsw_crtc_enable()->intel_enable_shared_dpll().  We need only
3643 3644
	 * configure the PLL to port mapping here.
	 */
3645 3646
	intel_ddi_clk_select(encoder, crtc_state);

3647
	/* 5. If IO power is controlled through PWR_WELL_CTL, Enable IO Power */
3648
	if (!intel_phy_is_tc(dev_priv, phy) ||
3649 3650 3651 3652 3653
	    dig_port->tc_mode != TC_PORT_TBT_ALT) {
		drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref);
		dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv,
								   dig_port->ddi_io_power_domain);
	}
3654

3655
	/* 6. Program DP_MODE */
3656
	icl_program_mg_dp_mode(dig_port, crtc_state);
3657 3658

	/*
3659 3660 3661 3662 3663 3664 3665 3666 3667 3668 3669 3670
	 * 7. The rest of the below are substeps under the bspec's "Enable and
	 * Train Display Port" step.  Note that steps that are specific to
	 * MST will be handled by intel_mst_pre_enable_dp() before/after it
	 * calls into this function.  Also intel_mst_pre_enable_dp() only calls
	 * us when active_mst_links==0, so any steps designated for "single
	 * stream or multi-stream master transcoder" can just be performed
	 * unconditionally here.
	 */

	/*
	 * 7.a Configure Transcoder Clock Select to direct the Port clock to the
	 * Transcoder.
3671
	 */
3672
	intel_ddi_enable_pipe_clock(encoder, crtc_state);
3673

3674 3675 3676 3677
	/*
	 * 7.b Configure TRANS_DDI_FUNC_CTL DDI Select, DDI Mode Select & MST
	 * Transport Select
	 */
3678
	intel_ddi_config_transcoder_func(encoder, crtc_state);
3679

3680 3681 3682 3683 3684 3685 3686 3687 3688
	/*
	 * 7.c Configure & enable DP_TP_CTL with link training pattern 1
	 * selected
	 *
	 * This will be handled by the intel_dp_start_link_train() farther
	 * down this function.
	 */

	/* 7.e Configure voltage swing and related IO settings */
3689
	tgl_ddi_vswing_sequence(encoder, crtc_state, level);
3690

3691 3692 3693 3694
	/*
	 * 7.f Combo PHY: Configure PORT_CL_DW10 Static Power Down to power up
	 * the used lanes of the DDI.
	 */
3695 3696 3697 3698 3699 3700 3701 3702 3703
	if (intel_phy_is_combo(dev_priv, phy)) {
		bool lane_reversal =
			dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL;

		intel_combo_phy_power_up_lanes(dev_priv, phy, false,
					       crtc_state->lane_count,
					       lane_reversal);
	}

3704 3705 3706 3707 3708 3709 3710 3711
	/*
	 * 7.g Configure and enable DDI_BUF_CTL
	 * 7.h Wait for DDI_BUF_CTL DDI Idle Status = 0b (Not Idle), timeout
	 *     after 500 us.
	 *
	 * We only configure what the register value will be here.  Actual
	 * enabling happens during link training farther down.
	 */
3712
	intel_ddi_init_dp_buf_reg(encoder, crtc_state);
3713 3714

	if (!is_mst)
3715
		intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
3716

3717
	intel_dp_configure_protocol_converter(intel_dp, crtc_state);
3718 3719 3720 3721 3722 3723 3724
	intel_dp_sink_set_decompression_state(intel_dp, crtc_state, true);
	/*
	 * DDI FEC: "anticipates enabling FEC encoding sets the FEC_READY bit
	 * in the FEC_CONFIGURATION register to 1 before initiating link
	 * training
	 */
	intel_dp_sink_set_fec_ready(intel_dp, crtc_state);
3725

3726
	intel_dp_check_frl_training(intel_dp);
3727
	intel_dp_pcon_dsc_configure(intel_dp, crtc_state);
3728

3729 3730 3731 3732 3733 3734 3735
	/*
	 * 7.i Follow DisplayPort specification training sequence (see notes for
	 *     failure handling)
	 * 7.j If DisplayPort multi-stream - Set DP_TP_CTL link training to Idle
	 *     Pattern, wait for 5 idle patterns (DP_TP_STATUS Min_Idles_Sent)
	 *     (timeout after 800 us)
	 */
3736
	intel_dp_start_link_train(intel_dp, crtc_state);
3737

3738
	/* 7.k Set DP_TP_CTL link training to Normal */
3739
	if (!is_trans_port_sync_mode(crtc_state))
3740
		intel_dp_stop_link_train(intel_dp, crtc_state);
3741

3742
	/* 7.l Configure and enable FEC if needed */
3743
	intel_ddi_enable_fec(encoder, crtc_state);
3744 3745
	if (!crtc_state->bigjoiner)
		intel_dsc_enable(encoder, crtc_state);
3746 3747
}

3748 3749
static void hsw_ddi_pre_enable_dp(struct intel_atomic_state *state,
				  struct intel_encoder *encoder,
3750 3751
				  const struct intel_crtc_state *crtc_state,
				  const struct drm_connector_state *conn_state)
3752
{
3753
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3754
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3755
	enum port port = encoder->port;
3756
	enum phy phy = intel_port_to_phy(dev_priv, port);
3757
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3758
	bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST);
3759
	int level = intel_ddi_dp_level(intel_dp);
3760

3761
	if (INTEL_GEN(dev_priv) < 11)
3762 3763
		drm_WARN_ON(&dev_priv->drm,
			    is_mst && (port == PORT_A || port == PORT_E));
3764
	else
3765
		drm_WARN_ON(&dev_priv->drm, is_mst && port == PORT_A);
3766

3767 3768 3769
	intel_dp_set_link_params(intel_dp,
				 crtc_state->port_clock,
				 crtc_state->lane_count);
3770

3771
	intel_pps_on(intel_dp);
3772

3773
	intel_ddi_clk_select(encoder, crtc_state);
3774

3775
	if (!intel_phy_is_tc(dev_priv, phy) ||
3776 3777 3778 3779 3780
	    dig_port->tc_mode != TC_PORT_TBT_ALT) {
		drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref);
		dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv,
								   dig_port->ddi_io_power_domain);
	}
3781

3782
	icl_program_mg_dp_mode(dig_port, crtc_state);
P
Paulo Zanoni 已提交
3783

3784
	if (INTEL_GEN(dev_priv) >= 11)
3785
		icl_ddi_vswing_sequence(encoder, crtc_state, level);
3786
	else if (IS_CANNONLAKE(dev_priv))
3787
		cnl_ddi_vswing_sequence(encoder, crtc_state, level);
3788
	else if (IS_GEN9_LP(dev_priv))
3789
		bxt_ddi_vswing_sequence(encoder, crtc_state, level);
3790
	else
3791
		intel_prepare_dp_ddi_buffers(encoder, crtc_state);
3792

3793
	if (intel_phy_is_combo(dev_priv, phy)) {
3794 3795 3796
		bool lane_reversal =
			dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL;

3797
		intel_combo_phy_power_up_lanes(dev_priv, phy, false,
3798 3799 3800 3801
					       crtc_state->lane_count,
					       lane_reversal);
	}

3802
	intel_ddi_init_dp_buf_reg(encoder, crtc_state);
3803
	if (!is_mst)
3804
		intel_dp_set_power(intel_dp, DP_SET_POWER_D0);
3805
	intel_dp_configure_protocol_converter(intel_dp, crtc_state);
3806 3807
	intel_dp_sink_set_decompression_state(intel_dp, crtc_state,
					      true);
3808
	intel_dp_sink_set_fec_ready(intel_dp, crtc_state);
3809
	intel_dp_start_link_train(intel_dp, crtc_state);
3810 3811
	if ((port != PORT_A || INTEL_GEN(dev_priv) >= 9) &&
	    !is_trans_port_sync_mode(crtc_state))
3812
		intel_dp_stop_link_train(intel_dp, crtc_state);
3813

3814 3815
	intel_ddi_enable_fec(encoder, crtc_state);

3816
	if (!is_mst)
3817
		intel_ddi_enable_pipe_clock(encoder, crtc_state);
3818

3819 3820
	if (!crtc_state->bigjoiner)
		intel_dsc_enable(encoder, crtc_state);
3821
}
3822

3823 3824
static void intel_ddi_pre_enable_dp(struct intel_atomic_state *state,
				    struct intel_encoder *encoder,
3825 3826 3827 3828 3829 3830
				    const struct intel_crtc_state *crtc_state,
				    const struct drm_connector_state *conn_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

	if (INTEL_GEN(dev_priv) >= 12)
3831
		tgl_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state);
3832
	else
3833
		hsw_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state);
3834

3835 3836 3837
	/* MST will call a setting of MSA after an allocating of Virtual Channel
	 * from MST encoder pre_enable callback.
	 */
3838
	if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) {
3839
		intel_ddi_set_dp_msa(crtc_state, conn_state);
3840

3841 3842
		intel_dp_set_m_n(crtc_state, M1_N1);
	}
3843 3844
}

3845 3846
static void intel_ddi_pre_enable_hdmi(struct intel_atomic_state *state,
				      struct intel_encoder *encoder,
3847
				      const struct intel_crtc_state *crtc_state,
3848
				      const struct drm_connector_state *conn_state)
3849
{
3850 3851
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
	struct intel_hdmi *intel_hdmi = &dig_port->hdmi;
3852
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3853
	int level = intel_ddi_hdmi_level(encoder, crtc_state);
3854

3855
	intel_dp_dual_mode_set_tmds_output(intel_hdmi, true);
3856
	intel_ddi_clk_select(encoder, crtc_state);
3857

3858 3859 3860
	drm_WARN_ON(&dev_priv->drm, dig_port->ddi_io_wakeref);
	dig_port->ddi_io_wakeref = intel_display_power_get(dev_priv,
							   dig_port->ddi_io_power_domain);
3861

3862
	icl_program_mg_dp_mode(dig_port, crtc_state);
3863

3864
	if (INTEL_GEN(dev_priv) >= 12)
3865
		tgl_ddi_vswing_sequence(encoder, crtc_state, level);
3866
	else if (INTEL_GEN(dev_priv) == 11)
3867
		icl_ddi_vswing_sequence(encoder, crtc_state, level);
3868
	else if (IS_CANNONLAKE(dev_priv))
3869
		cnl_ddi_vswing_sequence(encoder, crtc_state, level);
3870
	else if (IS_GEN9_LP(dev_priv))
3871
		bxt_ddi_vswing_sequence(encoder, crtc_state, level);
3872
	else
3873
		intel_prepare_hdmi_ddi_buffers(encoder, level);
3874 3875

	if (IS_GEN9_BC(dev_priv))
3876
		skl_ddi_set_iboost(encoder, crtc_state, level);
3877

3878
	intel_ddi_enable_pipe_clock(encoder, crtc_state);
3879

3880 3881 3882
	dig_port->set_infoframes(encoder,
				 crtc_state->has_infoframe,
				 crtc_state, conn_state);
3883
}
3884

3885 3886
static void intel_ddi_pre_enable(struct intel_atomic_state *state,
				 struct intel_encoder *encoder,
3887
				 const struct intel_crtc_state *crtc_state,
3888
				 const struct drm_connector_state *conn_state)
3889
{
3890
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
3891 3892
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum pipe pipe = crtc->pipe;
3893

3894 3895 3896 3897 3898 3899 3900 3901 3902 3903 3904 3905 3906
	/*
	 * When called from DP MST code:
	 * - conn_state will be NULL
	 * - encoder will be the main encoder (ie. mst->primary)
	 * - the main connector associated with this port
	 *   won't be active or linked to a crtc
	 * - crtc_state will be the state of the first stream to
	 *   be activated on this port, and it may not be the same
	 *   stream that will be deactivated last, but each stream
	 *   should have a state that is identical when it comes to
	 *   the DP link parameteres
	 */

3907
	drm_WARN_ON(&dev_priv->drm, crtc_state->has_pch_encoder);
3908

3909 3910 3911
	if (IS_DG1(dev_priv))
		dg1_map_plls_to_ports(encoder, crtc_state);
	else if (INTEL_GEN(dev_priv) >= 11)
3912 3913
		icl_map_plls_to_ports(encoder, crtc_state);

3914 3915
	intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);

3916
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
3917 3918
		intel_ddi_pre_enable_hdmi(state, encoder, crtc_state,
					  conn_state);
3919
	} else {
3920
		struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3921

3922 3923
		intel_ddi_pre_enable_dp(state, encoder, crtc_state,
					conn_state);
3924

3925 3926 3927
		/* FIXME precompute everything properly */
		/* FIXME how do we turn infoframes off again? */
		if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink)
3928 3929 3930 3931
			dig_port->set_infoframes(encoder,
						 crtc_state->has_infoframe,
						 crtc_state, conn_state);
	}
3932 3933
}

A
Anusha Srivatsa 已提交
3934 3935
static void intel_disable_ddi_buf(struct intel_encoder *encoder,
				  const struct intel_crtc_state *crtc_state)
3936 3937
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3938
	enum port port = encoder->port;
3939 3940 3941
	bool wait = false;
	u32 val;

3942
	val = intel_de_read(dev_priv, DDI_BUF_CTL(port));
3943 3944
	if (val & DDI_BUF_CTL_ENABLE) {
		val &= ~DDI_BUF_CTL_ENABLE;
3945
		intel_de_write(dev_priv, DDI_BUF_CTL(port), val);
3946 3947 3948
		wait = true;
	}

3949
	if (intel_crtc_has_dp_encoder(crtc_state)) {
3950
		val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
3951 3952
		val &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK);
		val |= DP_TP_CTL_LINK_TRAIN_PAT1;
3953
		intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val);
3954
	}
3955

A
Anusha Srivatsa 已提交
3956 3957 3958
	/* Disable FEC in DP Sink */
	intel_ddi_disable_fec_state(encoder, crtc_state);

3959 3960 3961 3962
	if (wait)
		intel_wait_ddi_buf_idle(dev_priv, port);
}

3963 3964
static void intel_ddi_post_disable_dp(struct intel_atomic_state *state,
				      struct intel_encoder *encoder,
3965 3966
				      const struct intel_crtc_state *old_crtc_state,
				      const struct drm_connector_state *old_conn_state)
3967
{
3968
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3969
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3970
	struct intel_dp *intel_dp = &dig_port->dp;
3971 3972
	bool is_mst = intel_crtc_has_type(old_crtc_state,
					  INTEL_OUTPUT_DP_MST);
3973
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3974

3975 3976 3977
	if (!is_mst)
		intel_dp_set_infoframes(encoder, false,
					old_crtc_state, old_conn_state);
3978

3979 3980 3981 3982
	/*
	 * Power down sink before disabling the port, otherwise we end
	 * up getting interrupts from the sink on detecting link loss.
	 */
3983
	intel_dp_set_power(intel_dp, DP_SET_POWER_D3);
3984

3985 3986 3987 3988 3989
	if (INTEL_GEN(dev_priv) >= 12) {
		if (is_mst) {
			enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
			u32 val;

3990 3991
			val = intel_de_read(dev_priv,
					    TRANS_DDI_FUNC_CTL(cpu_transcoder));
3992 3993
			val &= ~(TGL_TRANS_DDI_PORT_MASK |
				 TRANS_DDI_MODE_SELECT_MASK);
3994 3995 3996
			intel_de_write(dev_priv,
				       TRANS_DDI_FUNC_CTL(cpu_transcoder),
				       val);
3997 3998 3999 4000 4001
		}
	} else {
		if (!is_mst)
			intel_ddi_disable_pipe_clock(old_crtc_state);
	}
4002

A
Anusha Srivatsa 已提交
4003
	intel_disable_ddi_buf(encoder, old_crtc_state);
4004

4005 4006 4007 4008 4009 4010 4011 4012
	/*
	 * From TGL spec: "If single stream or multi-stream master transcoder:
	 * Configure Transcoder Clock select to direct no clock to the
	 * transcoder"
	 */
	if (INTEL_GEN(dev_priv) >= 12)
		intel_ddi_disable_pipe_clock(old_crtc_state);

4013 4014
	intel_pps_vdd_on(intel_dp);
	intel_pps_off(intel_dp);
4015

4016
	if (!intel_phy_is_tc(dev_priv, phy) ||
4017
	    dig_port->tc_mode != TC_PORT_TBT_ALT)
4018 4019 4020
		intel_display_power_put(dev_priv,
					dig_port->ddi_io_power_domain,
					fetch_and_zero(&dig_port->ddi_io_wakeref));
4021

4022 4023
	intel_ddi_clk_disable(encoder);
}
4024

4025 4026
static void intel_ddi_post_disable_hdmi(struct intel_atomic_state *state,
					struct intel_encoder *encoder,
4027 4028 4029 4030
					const struct intel_crtc_state *old_crtc_state,
					const struct drm_connector_state *old_conn_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4031
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4032
	struct intel_hdmi *intel_hdmi = &dig_port->hdmi;
4033

4034
	dig_port->set_infoframes(encoder, false,
4035 4036
				 old_crtc_state, old_conn_state);

4037 4038
	intel_ddi_disable_pipe_clock(old_crtc_state);

A
Anusha Srivatsa 已提交
4039
	intel_disable_ddi_buf(encoder, old_crtc_state);
4040

4041 4042 4043
	intel_display_power_put(dev_priv,
				dig_port->ddi_io_power_domain,
				fetch_and_zero(&dig_port->ddi_io_wakeref));
4044

4045 4046 4047 4048 4049
	intel_ddi_clk_disable(encoder);

	intel_dp_dual_mode_set_tmds_output(intel_hdmi, false);
}

4050 4051
static void intel_ddi_post_disable(struct intel_atomic_state *state,
				   struct intel_encoder *encoder,
4052 4053 4054
				   const struct intel_crtc_state *old_crtc_state,
				   const struct drm_connector_state *old_conn_state)
{
4055
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4056
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4057 4058
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
	bool is_tc_port = intel_phy_is_tc(dev_priv, phy);
4059

4060 4061
	if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DP_MST)) {
		intel_crtc_vblank_off(old_crtc_state);
4062

4063
		intel_disable_pipe(old_crtc_state);
4064

4065
		intel_ddi_disable_transcoder_func(old_crtc_state);
4066

4067
		intel_dsc_disable(old_crtc_state);
4068

4069 4070 4071 4072 4073
		if (INTEL_GEN(dev_priv) >= 9)
			skl_scaler_disable(old_crtc_state);
		else
			ilk_pfit_disable(old_crtc_state);
	}
4074

4075 4076 4077 4078 4079 4080 4081 4082 4083 4084 4085 4086 4087 4088 4089
	if (old_crtc_state->bigjoiner_linked_crtc) {
		struct intel_atomic_state *state =
			to_intel_atomic_state(old_crtc_state->uapi.state);
		struct intel_crtc *slave =
			old_crtc_state->bigjoiner_linked_crtc;
		const struct intel_crtc_state *old_slave_crtc_state =
			intel_atomic_get_old_crtc_state(state, slave);

		intel_crtc_vblank_off(old_slave_crtc_state);
		trace_intel_pipe_disable(slave);

		intel_dsc_disable(old_slave_crtc_state);
		skl_scaler_disable(old_slave_crtc_state);
	}

4090
	/*
4091 4092 4093 4094 4095 4096 4097 4098 4099 4100
	 * When called from DP MST code:
	 * - old_conn_state will be NULL
	 * - encoder will be the main encoder (ie. mst->primary)
	 * - the main connector associated with this port
	 *   won't be active or linked to a crtc
	 * - old_crtc_state will be the state of the last stream to
	 *   be deactivated on this port, and it may not be the same
	 *   stream that was activated last, but each stream
	 *   should have a state that is identical when it comes to
	 *   the DP link parameteres
4101
	 */
4102 4103

	if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI))
4104 4105
		intel_ddi_post_disable_hdmi(state, encoder, old_crtc_state,
					    old_conn_state);
4106
	else
4107 4108
		intel_ddi_post_disable_dp(state, encoder, old_crtc_state,
					  old_conn_state);
4109

4110 4111 4112
	if (IS_DG1(dev_priv))
		dg1_unmap_plls_to_ports(encoder);
	else if (INTEL_GEN(dev_priv) >= 11)
4113
		icl_unmap_plls_to_ports(encoder);
4114 4115

	if (intel_crtc_has_dp_encoder(old_crtc_state) || is_tc_port)
4116 4117 4118
		intel_display_power_put(dev_priv,
					intel_ddi_main_link_aux_domain(dig_port),
					fetch_and_zero(&dig_port->aux_wakeref));
4119 4120 4121

	if (is_tc_port)
		intel_tc_port_put_link(dig_port);
4122 4123
}

4124 4125
void intel_ddi_fdi_post_disable(struct intel_atomic_state *state,
				struct intel_encoder *encoder,
4126 4127
				const struct intel_crtc_state *old_crtc_state,
				const struct drm_connector_state *old_conn_state)
4128
{
4129
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4130
	u32 val;
4131 4132 4133 4134 4135 4136 4137

	/*
	 * Bspec lists this as both step 13 (before DDI_BUF_CTL disable)
	 * and step 18 (after clearing PORT_CLK_SEL). Based on a BUN,
	 * step 13 is the correct place for it. Step 18 is where it was
	 * originally before the BUN.
	 */
4138
	val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A));
4139
	val &= ~FDI_RX_ENABLE;
4140
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val);
4141

A
Anusha Srivatsa 已提交
4142
	intel_disable_ddi_buf(encoder, old_crtc_state);
4143
	intel_ddi_clk_disable(encoder);
4144

4145
	val = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A));
4146 4147
	val &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK);
	val |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2);
4148
	intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), val);
4149

4150
	val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A));
4151
	val &= ~FDI_PCDCLK;
4152
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val);
4153

4154
	val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A));
4155
	val &= ~FDI_RX_PLL_ENABLE;
4156
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val);
4157 4158
}

4159 4160 4161 4162 4163 4164 4165 4166 4167 4168 4169 4170 4171 4172 4173 4174 4175 4176 4177 4178 4179 4180 4181 4182 4183 4184 4185
static void trans_port_sync_stop_link_train(struct intel_atomic_state *state,
					    struct intel_encoder *encoder,
					    const struct intel_crtc_state *crtc_state)
{
	const struct drm_connector_state *conn_state;
	struct drm_connector *conn;
	int i;

	if (!crtc_state->sync_mode_slaves_mask)
		return;

	for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
		struct intel_encoder *slave_encoder =
			to_intel_encoder(conn_state->best_encoder);
		struct intel_crtc *slave_crtc = to_intel_crtc(conn_state->crtc);
		const struct intel_crtc_state *slave_crtc_state;

		if (!slave_crtc)
			continue;

		slave_crtc_state =
			intel_atomic_get_new_crtc_state(state, slave_crtc);

		if (slave_crtc_state->master_transcoder !=
		    crtc_state->cpu_transcoder)
			continue;

4186 4187
		intel_dp_stop_link_train(enc_to_intel_dp(slave_encoder),
					 slave_crtc_state);
4188 4189 4190 4191
	}

	usleep_range(200, 400);

4192 4193
	intel_dp_stop_link_train(enc_to_intel_dp(encoder),
				 crtc_state);
4194 4195
}

4196 4197
static void intel_enable_ddi_dp(struct intel_atomic_state *state,
				struct intel_encoder *encoder,
4198 4199
				const struct intel_crtc_state *crtc_state,
				const struct drm_connector_state *conn_state)
4200
{
4201
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4202
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4203
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4204
	enum port port = encoder->port;
4205

4206
	if (port == PORT_A && INTEL_GEN(dev_priv) < 9)
4207
		intel_dp_stop_link_train(intel_dp, crtc_state);
4208

4209
	intel_edp_backlight_on(crtc_state, conn_state);
4210
	intel_psr_enable(intel_dp, crtc_state, conn_state);
4211 4212 4213 4214

	if (!dig_port->lspcon.active || dig_port->dp.has_hdmi_sink)
		intel_dp_set_infoframes(encoder, true, crtc_state, conn_state);

4215
	intel_edp_drrs_enable(intel_dp, crtc_state);
4216

4217 4218
	if (crtc_state->has_audio)
		intel_audio_codec_enable(encoder, crtc_state, conn_state);
4219 4220

	trans_port_sync_stop_link_train(state, encoder, crtc_state);
4221 4222
}

4223 4224 4225 4226
static i915_reg_t
gen9_chicken_trans_reg_by_port(struct drm_i915_private *dev_priv,
			       enum port port)
{
4227 4228 4229 4230 4231 4232
	static const enum transcoder trans[] = {
		[PORT_A] = TRANSCODER_EDP,
		[PORT_B] = TRANSCODER_A,
		[PORT_C] = TRANSCODER_B,
		[PORT_D] = TRANSCODER_C,
		[PORT_E] = TRANSCODER_A,
4233 4234
	};

4235
	drm_WARN_ON(&dev_priv->drm, INTEL_GEN(dev_priv) < 9);
4236

4237
	if (drm_WARN_ON(&dev_priv->drm, port < PORT_A || port > PORT_E))
4238 4239
		port = PORT_A;

4240
	return CHICKEN_TRANS(trans[port]);
4241 4242
}

4243 4244
static void intel_enable_ddi_hdmi(struct intel_atomic_state *state,
				  struct intel_encoder *encoder,
4245 4246 4247 4248
				  const struct intel_crtc_state *crtc_state,
				  const struct drm_connector_state *conn_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4249
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4250
	struct drm_connector *connector = conn_state->connector;
4251
	enum port port = encoder->port;
4252

4253 4254 4255
	if (!intel_hdmi_handle_sink_scrambling(encoder, connector,
					       crtc_state->hdmi_high_tmds_clock_ratio,
					       crtc_state->hdmi_scrambling))
4256 4257 4258
		drm_dbg_kms(&dev_priv->drm,
			    "[CONNECTOR:%d:%s] Failed to configure sink scrambling/TMDS bit clock ratio\n",
			    connector->base.id, connector->name);
4259

4260 4261 4262 4263 4264 4265 4266 4267
	/* Display WA #1143: skl,kbl,cfl */
	if (IS_GEN9_BC(dev_priv)) {
		/*
		 * For some reason these chicken bits have been
		 * stuffed into a transcoder register, event though
		 * the bits affect a specific DDI port rather than
		 * a specific transcoder.
		 */
4268
		i915_reg_t reg = gen9_chicken_trans_reg_by_port(dev_priv, port);
4269 4270
		u32 val;

4271
		val = intel_de_read(dev_priv, reg);
4272 4273 4274 4275 4276 4277 4278 4279

		if (port == PORT_E)
			val |= DDIE_TRAINING_OVERRIDE_ENABLE |
				DDIE_TRAINING_OVERRIDE_VALUE;
		else
			val |= DDI_TRAINING_OVERRIDE_ENABLE |
				DDI_TRAINING_OVERRIDE_VALUE;

4280 4281
		intel_de_write(dev_priv, reg, val);
		intel_de_posting_read(dev_priv, reg);
4282 4283 4284 4285 4286 4287 4288 4289 4290 4291

		udelay(1);

		if (port == PORT_E)
			val &= ~(DDIE_TRAINING_OVERRIDE_ENABLE |
				 DDIE_TRAINING_OVERRIDE_VALUE);
		else
			val &= ~(DDI_TRAINING_OVERRIDE_ENABLE |
				 DDI_TRAINING_OVERRIDE_VALUE);

4292
		intel_de_write(dev_priv, reg, val);
4293 4294
	}

4295 4296 4297 4298
	/* In HDMI/DVI mode, the port width, and swing/emphasis values
	 * are ignored so nothing special needs to be done besides
	 * enabling the port.
	 */
4299 4300
	intel_de_write(dev_priv, DDI_BUF_CTL(port),
		       dig_port->saved_port_bits | DDI_BUF_CTL_ENABLE);
4301

4302 4303 4304 4305
	if (crtc_state->has_audio)
		intel_audio_codec_enable(encoder, crtc_state, conn_state);
}

4306 4307
static void intel_enable_ddi(struct intel_atomic_state *state,
			     struct intel_encoder *encoder,
4308 4309 4310
			     const struct intel_crtc_state *crtc_state,
			     const struct drm_connector_state *conn_state)
{
4311
	drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder);
4312

4313 4314
	if (!crtc_state->bigjoiner_slave)
		intel_ddi_enable_transcoder_func(encoder, crtc_state);
4315

4316 4317 4318 4319
	intel_enable_pipe(crtc_state);

	intel_crtc_vblank_on(crtc_state);

4320
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
4321
		intel_enable_ddi_hdmi(state, encoder, crtc_state, conn_state);
4322
	else
4323
		intel_enable_ddi_dp(state, encoder, crtc_state, conn_state);
4324 4325 4326 4327

	/* Enable hdcp if it's desired */
	if (conn_state->content_protection ==
	    DRM_MODE_CONTENT_PROTECTION_DESIRED)
4328
		intel_hdcp_enable(to_intel_connector(conn_state->connector),
4329
				  crtc_state,
4330
				  (u8)conn_state->hdcp_content_type);
4331 4332
}

4333 4334
static void intel_disable_ddi_dp(struct intel_atomic_state *state,
				 struct intel_encoder *encoder,
4335 4336
				 const struct intel_crtc_state *old_crtc_state,
				 const struct drm_connector_state *old_conn_state)
4337
{
4338
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4339

4340 4341
	intel_dp->link_trained = false;

4342
	if (old_crtc_state->has_audio)
4343 4344
		intel_audio_codec_disable(encoder,
					  old_crtc_state, old_conn_state);
4345

4346 4347 4348
	intel_edp_drrs_disable(intel_dp, old_crtc_state);
	intel_psr_disable(intel_dp, old_crtc_state);
	intel_edp_backlight_off(old_conn_state);
4349 4350 4351
	/* Disable the decompression in DP Sink */
	intel_dp_sink_set_decompression_state(intel_dp, old_crtc_state,
					      false);
4352
}
S
Shashank Sharma 已提交
4353

4354 4355
static void intel_disable_ddi_hdmi(struct intel_atomic_state *state,
				   struct intel_encoder *encoder,
4356 4357 4358
				   const struct intel_crtc_state *old_crtc_state,
				   const struct drm_connector_state *old_conn_state)
{
4359
	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
4360 4361
	struct drm_connector *connector = old_conn_state->connector;

4362
	if (old_crtc_state->has_audio)
4363 4364
		intel_audio_codec_disable(encoder,
					  old_crtc_state, old_conn_state);
4365

4366 4367
	if (!intel_hdmi_handle_sink_scrambling(encoder, connector,
					       false, false))
4368 4369 4370
		drm_dbg_kms(&i915->drm,
			    "[CONNECTOR:%d:%s] Failed to reset sink scrambling/TMDS bit clock ratio\n",
			    connector->base.id, connector->name);
4371 4372
}

4373 4374
static void intel_disable_ddi(struct intel_atomic_state *state,
			      struct intel_encoder *encoder,
4375 4376 4377
			      const struct intel_crtc_state *old_crtc_state,
			      const struct drm_connector_state *old_conn_state)
{
4378 4379
	intel_hdcp_disable(to_intel_connector(old_conn_state->connector));

4380
	if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI))
4381 4382
		intel_disable_ddi_hdmi(state, encoder, old_crtc_state,
				       old_conn_state);
4383
	else
4384 4385
		intel_disable_ddi_dp(state, encoder, old_crtc_state,
				     old_conn_state);
4386
}
P
Paulo Zanoni 已提交
4387

4388 4389
static void intel_ddi_update_pipe_dp(struct intel_atomic_state *state,
				     struct intel_encoder *encoder,
4390 4391 4392
				     const struct intel_crtc_state *crtc_state,
				     const struct drm_connector_state *conn_state)
{
4393
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4394

4395
	intel_ddi_set_dp_msa(crtc_state, conn_state);
4396

4397
	intel_psr_update(intel_dp, crtc_state, conn_state);
4398
	intel_dp_set_infoframes(encoder, true, crtc_state, conn_state);
4399
	intel_edp_drrs_update(intel_dp, crtc_state);
4400

4401
	intel_panel_update_backlight(state, encoder, crtc_state, conn_state);
4402 4403
}

4404 4405 4406 4407
void intel_ddi_update_pipe(struct intel_atomic_state *state,
			   struct intel_encoder *encoder,
			   const struct intel_crtc_state *crtc_state,
			   const struct drm_connector_state *conn_state)
4408
{
4409

4410 4411
	if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI) &&
	    !intel_encoder_is_mst(encoder))
4412 4413
		intel_ddi_update_pipe_dp(state, encoder, crtc_state,
					 conn_state);
4414

4415
	intel_hdcp_update_pipe(state, encoder, crtc_state, conn_state);
4416 4417
}

4418 4419 4420 4421 4422 4423 4424 4425 4426
static void
intel_ddi_update_prepare(struct intel_atomic_state *state,
			 struct intel_encoder *encoder,
			 struct intel_crtc *crtc)
{
	struct intel_crtc_state *crtc_state =
		crtc ? intel_atomic_get_new_crtc_state(state, crtc) : NULL;
	int required_lanes = crtc_state ? crtc_state->lane_count : 1;

4427
	drm_WARN_ON(state->base.dev, crtc && crtc->active);
4428

4429 4430
	intel_tc_port_get_link(enc_to_dig_port(encoder),
		               required_lanes);
4431
	if (crtc_state && crtc_state->hw.active)
4432 4433 4434 4435 4436 4437 4438 4439
		intel_update_active_dpll(state, crtc, encoder);
}

static void
intel_ddi_update_complete(struct intel_atomic_state *state,
			  struct intel_encoder *encoder,
			  struct intel_crtc *crtc)
{
4440
	intel_tc_port_put_link(enc_to_dig_port(encoder));
4441 4442
}

I
Imre Deak 已提交
4443
static void
4444 4445
intel_ddi_pre_pll_enable(struct intel_atomic_state *state,
			 struct intel_encoder *encoder,
I
Imre Deak 已提交
4446 4447
			 const struct intel_crtc_state *crtc_state,
			 const struct drm_connector_state *conn_state)
4448
{
I
Imre Deak 已提交
4449
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4450
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4451 4452
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
	bool is_tc_port = intel_phy_is_tc(dev_priv, phy);
I
Imre Deak 已提交
4453

4454 4455 4456
	if (is_tc_port)
		intel_tc_port_get_link(dig_port, crtc_state->lane_count);

4457 4458 4459 4460 4461 4462
	if (intel_crtc_has_dp_encoder(crtc_state) || is_tc_port) {
		drm_WARN_ON(&dev_priv->drm, dig_port->aux_wakeref);
		dig_port->aux_wakeref =
			intel_display_power_get(dev_priv,
						intel_ddi_main_link_aux_domain(dig_port));
	}
I
Imre Deak 已提交
4463

4464 4465 4466 4467 4468 4469 4470
	if (is_tc_port && dig_port->tc_mode != TC_PORT_TBT_ALT)
		/*
		 * Program the lane count for static/dynamic connections on
		 * Type-C ports.  Skip this step for TBT.
		 */
		intel_tc_port_set_fia_lane_count(dig_port, crtc_state->lane_count);
	else if (IS_GEN9_LP(dev_priv))
I
Imre Deak 已提交
4471 4472 4473 4474
		bxt_ddi_phy_set_lane_optim_mask(encoder,
						crtc_state->lane_lat_optim_mask);
}

4475 4476
static void intel_ddi_prepare_link_retrain(struct intel_dp *intel_dp,
					   const struct intel_crtc_state *crtc_state)
4477
{
4478 4479 4480
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum port port = encoder->port;
4481
	u32 dp_tp_ctl, ddi_buf_ctl;
4482
	bool wait = false;
4483

4484
	dp_tp_ctl = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
4485 4486

	if (dp_tp_ctl & DP_TP_CTL_ENABLE) {
4487
		ddi_buf_ctl = intel_de_read(dev_priv, DDI_BUF_CTL(port));
4488
		if (ddi_buf_ctl & DDI_BUF_CTL_ENABLE) {
4489 4490
			intel_de_write(dev_priv, DDI_BUF_CTL(port),
				       ddi_buf_ctl & ~DDI_BUF_CTL_ENABLE);
4491 4492 4493
			wait = true;
		}

4494 4495
		dp_tp_ctl &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK);
		dp_tp_ctl |= DP_TP_CTL_LINK_TRAIN_PAT1;
4496 4497
		intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl);
		intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
4498 4499 4500 4501 4502

		if (wait)
			intel_wait_ddi_buf_idle(dev_priv, port);
	}

4503
	dp_tp_ctl = DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_PAT1;
4504
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) {
4505
		dp_tp_ctl |= DP_TP_CTL_MODE_MST;
4506
	} else {
4507
		dp_tp_ctl |= DP_TP_CTL_MODE_SST;
4508
		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
4509
			dp_tp_ctl |= DP_TP_CTL_ENHANCED_FRAME_ENABLE;
4510
	}
4511 4512
	intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), dp_tp_ctl);
	intel_de_posting_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
4513 4514

	intel_dp->DP |= DDI_BUF_CTL_ENABLE;
4515 4516
	intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP);
	intel_de_posting_read(dev_priv, DDI_BUF_CTL(port));
4517

4518
	intel_wait_ddi_buf_active(dev_priv, port);
4519
}
P
Paulo Zanoni 已提交
4520

4521
static void intel_ddi_set_link_train(struct intel_dp *intel_dp,
4522
				     const struct intel_crtc_state *crtc_state,
4523 4524
				     u8 dp_train_pat)
{
4525 4526
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4527 4528
	u32 temp;

4529
	temp = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
4530 4531

	temp &= ~DP_TP_CTL_LINK_TRAIN_MASK;
4532
	switch (intel_dp_training_pattern_symbol(dp_train_pat)) {
4533 4534 4535 4536 4537 4538 4539 4540 4541 4542 4543 4544 4545 4546 4547 4548 4549
	case DP_TRAINING_PATTERN_DISABLE:
		temp |= DP_TP_CTL_LINK_TRAIN_NORMAL;
		break;
	case DP_TRAINING_PATTERN_1:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT1;
		break;
	case DP_TRAINING_PATTERN_2:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT2;
		break;
	case DP_TRAINING_PATTERN_3:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT3;
		break;
	case DP_TRAINING_PATTERN_4:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT4;
		break;
	}

4550
	intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), temp);
4551 4552
}

4553 4554
static void intel_ddi_set_idle_link_train(struct intel_dp *intel_dp,
					  const struct intel_crtc_state *crtc_state)
4555 4556 4557 4558 4559 4560
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum port port = encoder->port;
	u32 val;

4561
	val = intel_de_read(dev_priv, dp_tp_ctl_reg(encoder, crtc_state));
4562 4563
	val &= ~DP_TP_CTL_LINK_TRAIN_MASK;
	val |= DP_TP_CTL_LINK_TRAIN_IDLE;
4564
	intel_de_write(dev_priv, dp_tp_ctl_reg(encoder, crtc_state), val);
4565 4566 4567 4568 4569 4570 4571 4572 4573 4574 4575

	/*
	 * Until TGL on PORT_A we can have only eDP in SST mode. There the only
	 * reason we need to set idle transmission mode is to work around a HW
	 * issue where we enable the pipe while not in idle link-training mode.
	 * In this case there is requirement to wait for a minimum number of
	 * idle patterns to be sent.
	 */
	if (port == PORT_A && INTEL_GEN(dev_priv) < 12)
		return;

4576 4577
	if (intel_de_wait_for_set(dev_priv,
				  dp_tp_status_reg(encoder, crtc_state),
4578 4579 4580 4581 4582
				  DP_TP_STATUS_IDLE_DONE, 1))
		drm_err(&dev_priv->drm,
			"Timed out waiting for DP idle patterns\n");
}

4583 4584
static bool intel_ddi_is_audio_enabled(struct drm_i915_private *dev_priv,
				       enum transcoder cpu_transcoder)
4585
{
4586 4587
	if (cpu_transcoder == TRANSCODER_EDP)
		return false;
4588

4589 4590 4591
	if (!intel_display_power_is_enabled(dev_priv, POWER_DOMAIN_AUDIO))
		return false;

4592
	return intel_de_read(dev_priv, HSW_AUD_PIN_ELD_CP_VLD) &
4593
		AUDIO_OUTPUT_ENABLE(cpu_transcoder);
4594 4595
}

4596 4597 4598
void intel_ddi_compute_min_voltage_level(struct drm_i915_private *dev_priv,
					 struct intel_crtc_state *crtc_state)
{
4599 4600
	if (INTEL_GEN(dev_priv) >= 12 && crtc_state->port_clock > 594000)
		crtc_state->min_voltage_level = 2;
4601
	else if (IS_JSL_EHL(dev_priv) && crtc_state->port_clock > 594000)
4602 4603
		crtc_state->min_voltage_level = 3;
	else if (INTEL_GEN(dev_priv) >= 11 && crtc_state->port_clock > 594000)
4604
		crtc_state->min_voltage_level = 1;
4605 4606
	else if (IS_CANNONLAKE(dev_priv) && crtc_state->port_clock > 594000)
		crtc_state->min_voltage_level = 2;
4607 4608
}

4609 4610
static enum transcoder bdw_transcoder_master_readout(struct drm_i915_private *dev_priv,
						     enum transcoder cpu_transcoder)
4611
{
4612 4613 4614 4615
	u32 master_select;

	if (INTEL_GEN(dev_priv) >= 11) {
		u32 ctl2 = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL2(cpu_transcoder));
4616

4617 4618
		if ((ctl2 & PORT_SYNC_MODE_ENABLE) == 0)
			return INVALID_TRANSCODER;
4619

4620 4621 4622
		master_select = REG_FIELD_GET(PORT_SYNC_MODE_MASTER_SELECT_MASK, ctl2);
	} else {
		u32 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
4623

4624 4625 4626 4627 4628
		if ((ctl & TRANS_DDI_PORT_SYNC_ENABLE) == 0)
			return INVALID_TRANSCODER;

		master_select = REG_FIELD_GET(TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK, ctl);
	}
4629 4630 4631 4632 4633 4634 4635

	if (master_select == 0)
		return TRANSCODER_EDP;
	else
		return master_select - 1;
}

4636
static void bdw_get_trans_port_sync_config(struct intel_crtc_state *crtc_state)
4637 4638 4639 4640 4641 4642 4643
{
	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
	u32 transcoders = BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
		BIT(TRANSCODER_C) | BIT(TRANSCODER_D);
	enum transcoder cpu_transcoder;

	crtc_state->master_transcoder =
4644
		bdw_transcoder_master_readout(dev_priv, crtc_state->cpu_transcoder);
4645 4646 4647 4648 4649 4650 4651 4652 4653 4654 4655 4656

	for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) {
		enum intel_display_power_domain power_domain;
		intel_wakeref_t trans_wakeref;

		power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder);
		trans_wakeref = intel_display_power_get_if_enabled(dev_priv,
								   power_domain);

		if (!trans_wakeref)
			continue;

4657
		if (bdw_transcoder_master_readout(dev_priv, cpu_transcoder) ==
4658 4659 4660 4661 4662 4663 4664 4665 4666 4667 4668
		    crtc_state->cpu_transcoder)
			crtc_state->sync_mode_slaves_mask |= BIT(cpu_transcoder);

		intel_display_power_put(dev_priv, power_domain, trans_wakeref);
	}

	drm_WARN_ON(&dev_priv->drm,
		    crtc_state->master_transcoder != INVALID_TRANSCODER &&
		    crtc_state->sync_mode_slaves_mask);
}

4669 4670
static void intel_ddi_read_func_ctl(struct intel_encoder *encoder,
				    struct intel_crtc_state *pipe_config)
4671
{
4672
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4673
	struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->uapi.crtc);
4674
	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
4675
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4676 4677
	u32 temp, flags = 0;

4678
	temp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
4679 4680 4681 4682 4683 4684 4685 4686 4687
	if (temp & TRANS_DDI_PHSYNC)
		flags |= DRM_MODE_FLAG_PHSYNC;
	else
		flags |= DRM_MODE_FLAG_NHSYNC;
	if (temp & TRANS_DDI_PVSYNC)
		flags |= DRM_MODE_FLAG_PVSYNC;
	else
		flags |= DRM_MODE_FLAG_NVSYNC;

4688
	pipe_config->hw.adjusted_mode.flags |= flags;
4689 4690 4691 4692 4693 4694 4695 4696 4697 4698 4699 4700 4701 4702 4703 4704 4705

	switch (temp & TRANS_DDI_BPC_MASK) {
	case TRANS_DDI_BPC_6:
		pipe_config->pipe_bpp = 18;
		break;
	case TRANS_DDI_BPC_8:
		pipe_config->pipe_bpp = 24;
		break;
	case TRANS_DDI_BPC_10:
		pipe_config->pipe_bpp = 30;
		break;
	case TRANS_DDI_BPC_12:
		pipe_config->pipe_bpp = 36;
		break;
	default:
		break;
	}
4706 4707 4708

	switch (temp & TRANS_DDI_MODE_SELECT_MASK) {
	case TRANS_DDI_MODE_SELECT_HDMI:
4709
		pipe_config->has_hdmi_sink = true;
4710

4711 4712 4713 4714
		pipe_config->infoframes.enable |=
			intel_hdmi_infoframes_enabled(encoder, pipe_config);

		if (pipe_config->infoframes.enable)
4715
			pipe_config->has_infoframe = true;
S
Shashank Sharma 已提交
4716

4717
		if (temp & TRANS_DDI_HDMI_SCRAMBLING)
S
Shashank Sharma 已提交
4718 4719 4720
			pipe_config->hdmi_scrambling = true;
		if (temp & TRANS_DDI_HIGH_TMDS_CHAR_RATE)
			pipe_config->hdmi_high_tmds_clock_ratio = true;
4721
		fallthrough;
4722
	case TRANS_DDI_MODE_SELECT_DVI:
4723
		pipe_config->output_types |= BIT(INTEL_OUTPUT_HDMI);
4724 4725
		pipe_config->lane_count = 4;
		break;
4726
	case TRANS_DDI_MODE_SELECT_FDI:
4727
		pipe_config->output_types |= BIT(INTEL_OUTPUT_ANALOG);
4728 4729
		break;
	case TRANS_DDI_MODE_SELECT_DP_SST:
4730 4731 4732 4733 4734 4735 4736
		if (encoder->type == INTEL_OUTPUT_EDP)
			pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP);
		else
			pipe_config->output_types |= BIT(INTEL_OUTPUT_DP);
		pipe_config->lane_count =
			((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1;
		intel_dp_get_m_n(intel_crtc, pipe_config);
4737 4738

		if (INTEL_GEN(dev_priv) >= 11) {
4739
			i915_reg_t dp_tp_ctl = dp_tp_ctl_reg(encoder, pipe_config);
4740 4741

			pipe_config->fec_enable =
4742
				intel_de_read(dev_priv, dp_tp_ctl) & DP_TP_CTL_FEC_ENABLE;
4743

4744 4745 4746 4747
			drm_dbg_kms(&dev_priv->drm,
				    "[ENCODER:%d:%s] Fec status: %u\n",
				    encoder->base.base.id, encoder->base.name,
				    pipe_config->fec_enable);
4748 4749
		}

4750 4751 4752 4753 4754 4755
		if (dig_port->lspcon.active && dig_port->dp.has_hdmi_sink)
			pipe_config->infoframes.enable |=
				intel_lspcon_infoframes_enabled(encoder, pipe_config);
		else
			pipe_config->infoframes.enable |=
				intel_hdmi_infoframes_enabled(encoder, pipe_config);
4756
		break;
4757
	case TRANS_DDI_MODE_SELECT_DP_MST:
4758
		pipe_config->output_types |= BIT(INTEL_OUTPUT_DP_MST);
4759 4760
		pipe_config->lane_count =
			((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1;
4761 4762 4763 4764 4765

		if (INTEL_GEN(dev_priv) >= 12)
			pipe_config->mst_master_transcoder =
					REG_FIELD_GET(TRANS_DDI_MST_TRANSPORT_SELECT_MASK, temp);

4766
		intel_dp_get_m_n(intel_crtc, pipe_config);
4767 4768 4769

		pipe_config->infoframes.enable |=
			intel_hdmi_infoframes_enabled(encoder, pipe_config);
4770 4771 4772 4773
		break;
	default:
		break;
	}
4774 4775 4776 4777 4778 4779 4780 4781 4782 4783 4784 4785 4786 4787 4788 4789 4790 4791 4792 4793 4794 4795 4796 4797
}

void intel_ddi_get_config(struct intel_encoder *encoder,
			  struct intel_crtc_state *pipe_config)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;

	/* XXX: DSI transcoder paranoia */
	if (drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder)))
		return;

	if (pipe_config->bigjoiner_slave) {
		/* read out pipe settings from master */
		enum transcoder save = pipe_config->cpu_transcoder;

		/* Our own transcoder needs to be disabled when reading it in intel_ddi_read_func_ctl() */
		WARN_ON(pipe_config->output_types);
		pipe_config->cpu_transcoder = (enum transcoder)pipe_config->bigjoiner_linked_crtc->pipe;
		intel_ddi_read_func_ctl(encoder, pipe_config);
		pipe_config->cpu_transcoder = save;
	} else {
		intel_ddi_read_func_ctl(encoder, pipe_config);
	}
4798

4799
	pipe_config->has_audio =
4800
		intel_ddi_is_audio_enabled(dev_priv, cpu_transcoder);
4801

4802 4803
	if (encoder->type == INTEL_OUTPUT_EDP && dev_priv->vbt.edp.bpp &&
	    pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) {
4804 4805 4806 4807 4808 4809 4810 4811 4812 4813 4814 4815 4816
		/*
		 * This is a big fat ugly hack.
		 *
		 * Some machines in UEFI boot mode provide us a VBT that has 18
		 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons
		 * unknown we fail to light up. Yet the same BIOS boots up with
		 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as
		 * max, not what it tells us to use.
		 *
		 * Note: This will still be broken if the eDP panel is not lit
		 * up by the BIOS, and thus we can't get the mode at module
		 * load.
		 */
4817 4818 4819
		drm_dbg_kms(&dev_priv->drm,
			    "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n",
			    pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp);
4820
		dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp;
4821
	}
4822

4823 4824
	if (!pipe_config->bigjoiner_slave)
		intel_ddi_clock_get(encoder, pipe_config);
4825

4826
	if (IS_GEN9_LP(dev_priv))
4827 4828
		pipe_config->lane_lat_optim_mask =
			bxt_ddi_phy_get_lane_lat_optim_mask(encoder);
4829 4830

	intel_ddi_compute_min_voltage_level(dev_priv, pipe_config);
4831 4832 4833 4834 4835 4836 4837 4838 4839 4840 4841 4842

	intel_hdmi_read_gcp_infoframe(encoder, pipe_config);

	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_AVI,
			     &pipe_config->infoframes.avi);
	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_SPD,
			     &pipe_config->infoframes.spd);
	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_VENDOR,
			     &pipe_config->infoframes.hdmi);
4843 4844 4845
	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_DRM,
			     &pipe_config->infoframes.drm);
4846

4847 4848
	if (INTEL_GEN(dev_priv) >= 8)
		bdw_get_trans_port_sync_config(pipe_config);
4849 4850

	intel_read_dp_sdp(encoder, pipe_config, HDMI_PACKET_TYPE_GAMUT_METADATA);
4851
	intel_read_dp_sdp(encoder, pipe_config, DP_SDP_VSC);
4852 4853
}

4854 4855 4856 4857 4858 4859 4860
static void intel_ddi_sync_state(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
{
	if (intel_crtc_has_dp_encoder(crtc_state))
		intel_dp_sync_state(encoder, crtc_state);
}

4861 4862 4863 4864 4865 4866 4867 4868 4869
static bool intel_ddi_initial_fastset_check(struct intel_encoder *encoder,
					    struct intel_crtc_state *crtc_state)
{
	if (intel_crtc_has_dp_encoder(crtc_state))
		return intel_dp_initial_fastset_check(encoder, crtc_state);

	return true;
}

4870 4871 4872 4873 4874 4875 4876 4877 4878 4879 4880 4881 4882 4883 4884 4885 4886 4887
static enum intel_output_type
intel_ddi_compute_output_type(struct intel_encoder *encoder,
			      struct intel_crtc_state *crtc_state,
			      struct drm_connector_state *conn_state)
{
	switch (conn_state->connector->connector_type) {
	case DRM_MODE_CONNECTOR_HDMIA:
		return INTEL_OUTPUT_HDMI;
	case DRM_MODE_CONNECTOR_eDP:
		return INTEL_OUTPUT_EDP;
	case DRM_MODE_CONNECTOR_DisplayPort:
		return INTEL_OUTPUT_DP;
	default:
		MISSING_CASE(conn_state->connector->connector_type);
		return INTEL_OUTPUT_UNUSED;
	}
}

4888 4889 4890
static int intel_ddi_compute_config(struct intel_encoder *encoder,
				    struct intel_crtc_state *pipe_config,
				    struct drm_connector_state *conn_state)
P
Paulo Zanoni 已提交
4891
{
4892
	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
4893
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4894
	enum port port = encoder->port;
4895
	int ret;
P
Paulo Zanoni 已提交
4896

4897
	if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A)
4898 4899
		pipe_config->cpu_transcoder = TRANSCODER_EDP;

4900
	if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI)) {
4901
		ret = intel_hdmi_compute_config(encoder, pipe_config, conn_state);
4902
	} else {
4903
		ret = intel_dp_compute_config(encoder, pipe_config, conn_state);
4904 4905
	}

4906 4907
	if (ret)
		return ret;
4908

4909 4910 4911 4912 4913 4914
	if (IS_HASWELL(dev_priv) && crtc->pipe == PIPE_A &&
	    pipe_config->cpu_transcoder == TRANSCODER_EDP)
		pipe_config->pch_pfit.force_thru =
			pipe_config->pch_pfit.enabled ||
			pipe_config->crc_enabled;

4915
	if (IS_GEN9_LP(dev_priv))
4916
		pipe_config->lane_lat_optim_mask =
4917
			bxt_ddi_phy_calc_lane_lat_optim_mask(pipe_config->lane_count);
4918

4919 4920
	intel_ddi_compute_min_voltage_level(dev_priv, pipe_config);

4921
	return 0;
P
Paulo Zanoni 已提交
4922 4923
}

4924 4925 4926 4927 4928 4929 4930 4931 4932 4933 4934 4935 4936 4937 4938 4939 4940 4941 4942 4943 4944 4945 4946 4947 4948 4949 4950 4951 4952 4953 4954 4955 4956 4957 4958 4959 4960 4961 4962 4963 4964 4965 4966 4967 4968
static bool mode_equal(const struct drm_display_mode *mode1,
		       const struct drm_display_mode *mode2)
{
	return drm_mode_match(mode1, mode2,
			      DRM_MODE_MATCH_TIMINGS |
			      DRM_MODE_MATCH_FLAGS |
			      DRM_MODE_MATCH_3D_FLAGS) &&
		mode1->clock == mode2->clock; /* we want an exact match */
}

static bool m_n_equal(const struct intel_link_m_n *m_n_1,
		      const struct intel_link_m_n *m_n_2)
{
	return m_n_1->tu == m_n_2->tu &&
		m_n_1->gmch_m == m_n_2->gmch_m &&
		m_n_1->gmch_n == m_n_2->gmch_n &&
		m_n_1->link_m == m_n_2->link_m &&
		m_n_1->link_n == m_n_2->link_n;
}

static bool crtcs_port_sync_compatible(const struct intel_crtc_state *crtc_state1,
				       const struct intel_crtc_state *crtc_state2)
{
	return crtc_state1->hw.active && crtc_state2->hw.active &&
		crtc_state1->output_types == crtc_state2->output_types &&
		crtc_state1->output_format == crtc_state2->output_format &&
		crtc_state1->lane_count == crtc_state2->lane_count &&
		crtc_state1->port_clock == crtc_state2->port_clock &&
		mode_equal(&crtc_state1->hw.adjusted_mode,
			   &crtc_state2->hw.adjusted_mode) &&
		m_n_equal(&crtc_state1->dp_m_n, &crtc_state2->dp_m_n);
}

static u8
intel_ddi_port_sync_transcoders(const struct intel_crtc_state *ref_crtc_state,
				int tile_group_id)
{
	struct drm_connector *connector;
	const struct drm_connector_state *conn_state;
	struct drm_i915_private *dev_priv = to_i915(ref_crtc_state->uapi.crtc->dev);
	struct intel_atomic_state *state =
		to_intel_atomic_state(ref_crtc_state->uapi.state);
	u8 transcoders = 0;
	int i;

4969 4970 4971 4972 4973
	/*
	 * We don't enable port sync on BDW due to missing w/as and
	 * due to not having adjusted the modeset sequence appropriately.
	 */
	if (INTEL_GEN(dev_priv) < 9)
4974 4975 4976 4977 4978 4979 4980 4981 4982 4983 4984 4985 4986 4987 4988 4989 4990 4991 4992 4993 4994 4995 4996 4997 4998 4999 5000 5001 5002 5003 5004
		return 0;

	if (!intel_crtc_has_type(ref_crtc_state, INTEL_OUTPUT_DP))
		return 0;

	for_each_new_connector_in_state(&state->base, connector, conn_state, i) {
		struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc);
		const struct intel_crtc_state *crtc_state;

		if (!crtc)
			continue;

		if (!connector->has_tile ||
		    connector->tile_group->id !=
		    tile_group_id)
			continue;
		crtc_state = intel_atomic_get_new_crtc_state(state,
							     crtc);
		if (!crtcs_port_sync_compatible(ref_crtc_state,
						crtc_state))
			continue;
		transcoders |= BIT(crtc_state->cpu_transcoder);
	}

	return transcoders;
}

static int intel_ddi_compute_config_late(struct intel_encoder *encoder,
					 struct intel_crtc_state *crtc_state,
					 struct drm_connector_state *conn_state)
{
5005
	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
5006 5007 5008
	struct drm_connector *connector = conn_state->connector;
	u8 port_sync_transcoders = 0;

5009 5010 5011
	drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] [CRTC:%d:%s]",
		    encoder->base.base.id, encoder->base.name,
		    crtc_state->uapi.crtc->base.id, crtc_state->uapi.crtc->name);
5012 5013 5014 5015 5016 5017 5018 5019 5020 5021 5022 5023 5024 5025 5026 5027 5028 5029 5030 5031 5032 5033 5034

	if (connector->has_tile)
		port_sync_transcoders = intel_ddi_port_sync_transcoders(crtc_state,
									connector->tile_group->id);

	/*
	 * EDP Transcoders cannot be ensalved
	 * make them a master always when present
	 */
	if (port_sync_transcoders & BIT(TRANSCODER_EDP))
		crtc_state->master_transcoder = TRANSCODER_EDP;
	else
		crtc_state->master_transcoder = ffs(port_sync_transcoders) - 1;

	if (crtc_state->master_transcoder == crtc_state->cpu_transcoder) {
		crtc_state->master_transcoder = INVALID_TRANSCODER;
		crtc_state->sync_mode_slaves_mask =
			port_sync_transcoders & ~BIT(crtc_state->cpu_transcoder);
	}

	return 0;
}

5035 5036
static void intel_ddi_encoder_destroy(struct drm_encoder *encoder)
{
5037
	struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder));
5038 5039 5040 5041

	intel_dp_encoder_flush_work(encoder);

	drm_encoder_cleanup(encoder);
5042 5043
	if (dig_port)
		kfree(dig_port->hdcp_port_data.streams);
5044 5045 5046
	kfree(dig_port);
}

P
Paulo Zanoni 已提交
5047
static const struct drm_encoder_funcs intel_ddi_funcs = {
5048
	.reset = intel_dp_encoder_reset,
5049
	.destroy = intel_ddi_encoder_destroy,
P
Paulo Zanoni 已提交
5050 5051
};

5052
static struct intel_connector *
5053
intel_ddi_init_dp_connector(struct intel_digital_port *dig_port)
5054
{
5055
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
5056
	struct intel_connector *connector;
5057
	enum port port = dig_port->base.port;
5058

5059
	connector = intel_connector_alloc();
5060 5061 5062
	if (!connector)
		return NULL;

5063 5064 5065 5066
	dig_port->dp.output_reg = DDI_BUF_CTL(port);
	dig_port->dp.prepare_link_retrain = intel_ddi_prepare_link_retrain;
	dig_port->dp.set_link_train = intel_ddi_set_link_train;
	dig_port->dp.set_idle_link_train = intel_ddi_set_idle_link_train;
5067

5068
	if (INTEL_GEN(dev_priv) >= 12)
5069
		dig_port->dp.set_signal_levels = tgl_set_signal_levels;
5070
	else if (INTEL_GEN(dev_priv) >= 11)
5071
		dig_port->dp.set_signal_levels = icl_set_signal_levels;
5072
	else if (IS_CANNONLAKE(dev_priv))
5073
		dig_port->dp.set_signal_levels = cnl_set_signal_levels;
5074
	else if (IS_GEN9_LP(dev_priv))
5075
		dig_port->dp.set_signal_levels = bxt_set_signal_levels;
5076
	else
5077
		dig_port->dp.set_signal_levels = hsw_set_signal_levels;
5078

5079 5080
	dig_port->dp.voltage_max = intel_ddi_dp_voltage_max;
	dig_port->dp.preemph_max = intel_ddi_dp_preemph_max;
5081

5082
	if (!intel_dp_init_connector(dig_port, connector)) {
5083 5084 5085 5086 5087 5088 5089
		kfree(connector);
		return NULL;
	}

	return connector;
}

5090 5091 5092 5093 5094 5095 5096 5097 5098 5099 5100 5101 5102 5103 5104 5105 5106 5107 5108
static int modeset_pipe(struct drm_crtc *crtc,
			struct drm_modeset_acquire_ctx *ctx)
{
	struct drm_atomic_state *state;
	struct drm_crtc_state *crtc_state;
	int ret;

	state = drm_atomic_state_alloc(crtc->dev);
	if (!state)
		return -ENOMEM;

	state->acquire_ctx = ctx;

	crtc_state = drm_atomic_get_crtc_state(state, crtc);
	if (IS_ERR(crtc_state)) {
		ret = PTR_ERR(crtc_state);
		goto out;
	}

5109
	crtc_state->connectors_changed = true;
5110 5111

	ret = drm_atomic_commit(state);
5112
out:
5113 5114 5115 5116 5117 5118 5119 5120 5121
	drm_atomic_state_put(state);

	return ret;
}

static int intel_hdmi_reset_link(struct intel_encoder *encoder,
				 struct drm_modeset_acquire_ctx *ctx)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5122
	struct intel_hdmi *hdmi = enc_to_intel_hdmi(encoder);
5123 5124 5125 5126 5127 5128 5129 5130 5131 5132 5133 5134 5135 5136 5137 5138 5139 5140 5141 5142 5143 5144 5145 5146 5147 5148 5149 5150 5151
	struct intel_connector *connector = hdmi->attached_connector;
	struct i2c_adapter *adapter =
		intel_gmbus_get_adapter(dev_priv, hdmi->ddc_bus);
	struct drm_connector_state *conn_state;
	struct intel_crtc_state *crtc_state;
	struct intel_crtc *crtc;
	u8 config;
	int ret;

	if (!connector || connector->base.status != connector_status_connected)
		return 0;

	ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex,
			       ctx);
	if (ret)
		return ret;

	conn_state = connector->base.state;

	crtc = to_intel_crtc(conn_state->crtc);
	if (!crtc)
		return 0;

	ret = drm_modeset_lock(&crtc->base.mutex, ctx);
	if (ret)
		return ret;

	crtc_state = to_intel_crtc_state(crtc->base.state);

5152 5153
	drm_WARN_ON(&dev_priv->drm,
		    !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI));
5154

5155
	if (!crtc_state->hw.active)
5156 5157 5158 5159 5160 5161 5162 5163 5164 5165 5166 5167
		return 0;

	if (!crtc_state->hdmi_high_tmds_clock_ratio &&
	    !crtc_state->hdmi_scrambling)
		return 0;

	if (conn_state->commit &&
	    !try_wait_for_completion(&conn_state->commit->hw_done))
		return 0;

	ret = drm_scdc_readb(adapter, SCDC_TMDS_CONFIG, &config);
	if (ret < 0) {
5168 5169
		drm_err(&dev_priv->drm, "Failed to read TMDS config: %d\n",
			ret);
5170 5171 5172 5173 5174 5175 5176 5177 5178 5179 5180 5181 5182 5183 5184 5185 5186 5187 5188 5189 5190
		return 0;
	}

	if (!!(config & SCDC_TMDS_BIT_CLOCK_RATIO_BY_40) ==
	    crtc_state->hdmi_high_tmds_clock_ratio &&
	    !!(config & SCDC_SCRAMBLING_ENABLE) ==
	    crtc_state->hdmi_scrambling)
		return 0;

	/*
	 * HDMI 2.0 says that one should not send scrambled data
	 * prior to configuring the sink scrambling, and that
	 * TMDS clock/data transmission should be suspended when
	 * changing the TMDS clock rate in the sink. So let's
	 * just do a full modeset here, even though some sinks
	 * would be perfectly happy if were to just reconfigure
	 * the SCDC settings on the fly.
	 */
	return modeset_pipe(&crtc->base, ctx);
}

5191 5192
static enum intel_hotplug_state
intel_ddi_hotplug(struct intel_encoder *encoder,
5193
		  struct intel_connector *connector)
5194
{
5195
	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
5196
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
5197
	struct intel_dp *intel_dp = &dig_port->dp;
5198 5199
	enum phy phy = intel_port_to_phy(i915, encoder->port);
	bool is_tc = intel_phy_is_tc(i915, phy);
5200
	struct drm_modeset_acquire_ctx ctx;
5201
	enum intel_hotplug_state state;
5202 5203
	int ret;

5204 5205 5206 5207 5208 5209 5210
	if (intel_dp->compliance.test_active &&
	    intel_dp->compliance.test_type == DP_TEST_LINK_PHY_TEST_PATTERN) {
		intel_dp_phy_test(encoder);
		/* just do the PHY test and nothing else */
		return INTEL_HOTPLUG_UNCHANGED;
	}

5211
	state = intel_encoder_hotplug(encoder, connector);
5212 5213 5214 5215

	drm_modeset_acquire_init(&ctx, 0);

	for (;;) {
5216 5217 5218 5219
		if (connector->base.connector_type == DRM_MODE_CONNECTOR_HDMIA)
			ret = intel_hdmi_reset_link(encoder, &ctx);
		else
			ret = intel_dp_retrain_link(encoder, &ctx);
5220 5221 5222 5223 5224 5225 5226 5227 5228 5229 5230

		if (ret == -EDEADLK) {
			drm_modeset_backoff(&ctx);
			continue;
		}

		break;
	}

	drm_modeset_drop_locks(&ctx);
	drm_modeset_acquire_fini(&ctx);
5231 5232
	drm_WARN(encoder->base.dev, ret,
		 "Acquiring modeset locks failed with %i\n", ret);
5233

5234 5235 5236 5237 5238 5239 5240 5241 5242 5243 5244 5245 5246 5247 5248
	/*
	 * Unpowered type-c dongles can take some time to boot and be
	 * responsible, so here giving some time to those dongles to power up
	 * and then retrying the probe.
	 *
	 * On many platforms the HDMI live state signal is known to be
	 * unreliable, so we can't use it to detect if a sink is connected or
	 * not. Instead we detect if it's connected based on whether we can
	 * read the EDID or not. That in turn has a problem during disconnect,
	 * since the HPD interrupt may be raised before the DDC lines get
	 * disconnected (due to how the required length of DDC vs. HPD
	 * connector pins are specified) and so we'll still be able to get a
	 * valid EDID. To solve this schedule another detection cycle if this
	 * time around we didn't detect any change in the sink's connection
	 * status.
5249 5250 5251 5252 5253 5254
	 *
	 * Type-c connectors which get their HPD signal deasserted then
	 * reasserted, without unplugging/replugging the sink from the
	 * connector, introduce a delay until the AUX channel communication
	 * becomes functional. Retry the detection for 5 seconds on type-c
	 * connectors to account for this delay.
5255
	 */
5256 5257
	if (state == INTEL_HOTPLUG_UNCHANGED &&
	    connector->hotplug_retries < (is_tc ? 5 : 1) &&
5258 5259 5260
	    !dig_port->dp.is_mst)
		state = INTEL_HOTPLUG_RETRY;

5261
	return state;
5262 5263
}

5264 5265 5266
static bool lpt_digital_port_connected(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5267
	u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin];
5268 5269 5270 5271 5272 5273 5274

	return intel_de_read(dev_priv, SDEISR) & bit;
}

static bool hsw_digital_port_connected(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5275
	u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin];
5276

5277
	return intel_de_read(dev_priv, DEISR) & bit;
5278 5279 5280 5281 5282
}

static bool bdw_digital_port_connected(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
5283
	u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin];
5284 5285 5286 5287

	return intel_de_read(dev_priv, GEN8_DE_PORT_ISR) & bit;
}

5288
static struct intel_connector *
5289
intel_ddi_init_hdmi_connector(struct intel_digital_port *dig_port)
5290 5291
{
	struct intel_connector *connector;
5292
	enum port port = dig_port->base.port;
5293

5294
	connector = intel_connector_alloc();
5295 5296 5297
	if (!connector)
		return NULL;

5298 5299
	dig_port->hdmi.hdmi_reg = DDI_BUF_CTL(port);
	intel_hdmi_init_connector(dig_port, connector);
5300 5301 5302 5303

	return connector;
}

5304
static bool intel_ddi_a_force_4_lanes(struct intel_digital_port *dig_port)
5305
{
5306
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
5307

5308
	if (dig_port->base.port != PORT_A)
5309 5310
		return false;

5311
	if (dig_port->saved_port_bits & DDI_A_4_LANES)
5312 5313 5314 5315 5316 5317 5318 5319 5320 5321 5322 5323 5324 5325 5326 5327 5328 5329 5330 5331
		return false;

	/* Broxton/Geminilake: Bspec says that DDI_A_4_LANES is the only
	 *                     supported configuration
	 */
	if (IS_GEN9_LP(dev_priv))
		return true;

	/* Cannonlake: Most of SKUs don't support DDI_E, and the only
	 *             one who does also have a full A/E split called
	 *             DDI_F what makes DDI_E useless. However for this
	 *             case let's trust VBT info.
	 */
	if (IS_CANNONLAKE(dev_priv) &&
	    !intel_bios_is_port_present(dev_priv, PORT_E))
		return true;

	return false;
}

5332
static int
5333
intel_ddi_max_lanes(struct intel_digital_port *dig_port)
5334
{
5335 5336
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
	enum port port = dig_port->base.port;
5337 5338 5339 5340 5341 5342
	int max_lanes = 4;

	if (INTEL_GEN(dev_priv) >= 11)
		return max_lanes;

	if (port == PORT_A || port == PORT_E) {
5343
		if (intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES)
5344 5345 5346 5347 5348 5349 5350 5351 5352 5353 5354
			max_lanes = port == PORT_A ? 4 : 0;
		else
			/* Both A and E share 2 lanes */
			max_lanes = 2;
	}

	/*
	 * Some BIOS might fail to set this bit on port A if eDP
	 * wasn't lit up at boot.  Force this bit set when needed
	 * so we use the proper lane count for our calculations.
	 */
5355
	if (intel_ddi_a_force_4_lanes(dig_port)) {
5356 5357
		drm_dbg_kms(&dev_priv->drm,
			    "Forcing DDI_A_4_LANES for port A\n");
5358
		dig_port->saved_port_bits |= DDI_A_4_LANES;
5359 5360 5361 5362 5363 5364
		max_lanes = 4;
	}

	return max_lanes;
}

M
Matt Roper 已提交
5365 5366 5367
static bool hti_uses_phy(struct drm_i915_private *i915, enum phy phy)
{
	return i915->hti_state & HDPORT_ENABLED &&
5368
	       i915->hti_state & HDPORT_DDI_USED(phy);
M
Matt Roper 已提交
5369 5370
}

5371 5372 5373
static enum hpd_pin dg1_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
5374 5375
	if (port >= PORT_TC1)
		return HPD_PORT_C + port - PORT_TC1;
5376 5377 5378 5379
	else
		return HPD_PORT_A + port - PORT_A;
}

5380 5381 5382
static enum hpd_pin tgl_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
5383 5384
	if (port >= PORT_TC1)
		return HPD_PORT_TC1 + port - PORT_TC1;
5385 5386 5387 5388 5389 5390 5391 5392 5393 5394
	else
		return HPD_PORT_A + port - PORT_A;
}

static enum hpd_pin rkl_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
	if (HAS_PCH_TGP(dev_priv))
		return tgl_hpd_pin(dev_priv, port);

5395 5396
	if (port >= PORT_TC1)
		return HPD_PORT_C + port - PORT_TC1;
5397 5398 5399 5400 5401 5402 5403 5404 5405 5406 5407 5408 5409 5410 5411 5412 5413 5414 5415 5416 5417 5418 5419 5420 5421 5422 5423 5424 5425 5426 5427 5428 5429 5430
	else
		return HPD_PORT_A + port - PORT_A;
}

static enum hpd_pin icl_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
	if (port >= PORT_C)
		return HPD_PORT_TC1 + port - PORT_C;
	else
		return HPD_PORT_A + port - PORT_A;
}

static enum hpd_pin ehl_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
	if (port == PORT_D)
		return HPD_PORT_A;

	if (HAS_PCH_MCC(dev_priv))
		return icl_hpd_pin(dev_priv, port);

	return HPD_PORT_A + port - PORT_A;
}

static enum hpd_pin cnl_hpd_pin(struct drm_i915_private *dev_priv,
				enum port port)
{
	if (port == PORT_F)
		return HPD_PORT_E;

	return HPD_PORT_A + port - PORT_A;
}

5431 5432 5433
#define port_tc_name(port) ((port) - PORT_TC1 + '1')
#define tc_port_name(tc_port) ((tc_port) - TC_PORT_1 + '1')

5434
void intel_ddi_init(struct drm_i915_private *dev_priv, enum port port)
P
Paulo Zanoni 已提交
5435
{
5436
	struct intel_digital_port *dig_port;
5437
	struct intel_encoder *encoder;
5438
	bool init_hdmi, init_dp;
5439
	enum phy phy = intel_port_to_phy(dev_priv, port);
5440

M
Matt Roper 已提交
5441 5442 5443 5444 5445 5446 5447 5448 5449 5450 5451 5452
	/*
	 * On platforms with HTI (aka HDPORT), if it's enabled at boot it may
	 * have taken over some of the PHYs and made them unavailable to the
	 * driver.  In that case we should skip initializing the corresponding
	 * outputs.
	 */
	if (hti_uses_phy(dev_priv, phy)) {
		drm_dbg_kms(&dev_priv->drm, "PORT %c / PHY %c reserved by HTI\n",
			    port_name(port), phy_name(phy));
		return;
	}

5453 5454 5455
	init_hdmi = intel_bios_port_supports_dvi(dev_priv, port) ||
		intel_bios_port_supports_hdmi(dev_priv, port);
	init_dp = intel_bios_port_supports_dp(dev_priv, port);
5456 5457 5458 5459 5460 5461 5462 5463 5464

	if (intel_bios_is_lspcon_present(dev_priv, port)) {
		/*
		 * Lspcon device needs to be driven with DP connector
		 * with special detection sequence. So make sure DP
		 * is initialized before lspcon.
		 */
		init_dp = true;
		init_hdmi = false;
5465 5466
		drm_dbg_kms(&dev_priv->drm, "VBT says port %c has lspcon\n",
			    port_name(port));
5467 5468
	}

5469
	if (!init_dp && !init_hdmi) {
5470 5471 5472
		drm_dbg_kms(&dev_priv->drm,
			    "VBT says port %c is not DVI/HDMI/DP compatible, respect it\n",
			    port_name(port));
5473
		return;
5474
	}
P
Paulo Zanoni 已提交
5475

5476 5477
	dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL);
	if (!dig_port)
P
Paulo Zanoni 已提交
5478 5479
		return;

5480
	encoder = &dig_port->base;
P
Paulo Zanoni 已提交
5481

5482 5483 5484 5485 5486 5487 5488
	if (INTEL_GEN(dev_priv) >= 12) {
		enum tc_port tc_port = intel_port_to_tc(dev_priv, port);

		drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs,
				 DRM_MODE_ENCODER_TMDS,
				 "DDI %s%c/PHY %s%c",
				 port >= PORT_TC1 ? "TC" : "",
5489
				 port >= PORT_TC1 ? port_tc_name(port) : port_name(port),
5490
				 tc_port != TC_PORT_NONE ? "TC" : "",
5491
				 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy));
5492 5493 5494 5495 5496 5497 5498 5499 5500
	} else if (INTEL_GEN(dev_priv) >= 11) {
		enum tc_port tc_port = intel_port_to_tc(dev_priv, port);

		drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs,
				 DRM_MODE_ENCODER_TMDS,
				 "DDI %c%s/PHY %s%c",
				 port_name(port),
				 port >= PORT_C ? " (TC)" : "",
				 tc_port != TC_PORT_NONE ? "TC" : "",
5501
				 tc_port != TC_PORT_NONE ? tc_port_name(tc_port) : phy_name(phy));
5502 5503 5504 5505 5506
	} else {
		drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs,
				 DRM_MODE_ENCODER_TMDS,
				 "DDI %c/PHY %c", port_name(port),  phy_name(phy));
	}
P
Paulo Zanoni 已提交
5507

5508 5509 5510
	mutex_init(&dig_port->hdcp_mutex);
	dig_port->num_hdcp_streams = 0;

5511 5512 5513
	encoder->hotplug = intel_ddi_hotplug;
	encoder->compute_output_type = intel_ddi_compute_output_type;
	encoder->compute_config = intel_ddi_compute_config;
5514
	encoder->compute_config_late = intel_ddi_compute_config_late;
5515 5516 5517 5518 5519 5520 5521 5522
	encoder->enable = intel_enable_ddi;
	encoder->pre_pll_enable = intel_ddi_pre_pll_enable;
	encoder->pre_enable = intel_ddi_pre_enable;
	encoder->disable = intel_disable_ddi;
	encoder->post_disable = intel_ddi_post_disable;
	encoder->update_pipe = intel_ddi_update_pipe;
	encoder->get_hw_state = intel_ddi_get_hw_state;
	encoder->get_config = intel_ddi_get_config;
5523
	encoder->sync_state = intel_ddi_sync_state;
5524
	encoder->initial_fastset_check = intel_ddi_initial_fastset_check;
5525
	encoder->suspend = intel_dp_encoder_suspend;
5526
	encoder->shutdown = intel_dp_encoder_shutdown;
5527 5528 5529 5530 5531 5532 5533
	encoder->get_power_domains = intel_ddi_get_power_domains;

	encoder->type = INTEL_OUTPUT_DDI;
	encoder->power_domain = intel_port_to_power_domain(port);
	encoder->port = port;
	encoder->cloneable = 0;
	encoder->pipe_mask = ~0;
5534

5535 5536 5537
	if (IS_DG1(dev_priv))
		encoder->hpd_pin = dg1_hpd_pin(dev_priv, port);
	else if (IS_ROCKETLAKE(dev_priv))
5538 5539 5540
		encoder->hpd_pin = rkl_hpd_pin(dev_priv, port);
	else if (INTEL_GEN(dev_priv) >= 12)
		encoder->hpd_pin = tgl_hpd_pin(dev_priv, port);
5541
	else if (IS_JSL_EHL(dev_priv))
5542 5543 5544 5545 5546 5547 5548
		encoder->hpd_pin = ehl_hpd_pin(dev_priv, port);
	else if (IS_GEN(dev_priv, 11))
		encoder->hpd_pin = icl_hpd_pin(dev_priv, port);
	else if (IS_GEN(dev_priv, 10))
		encoder->hpd_pin = cnl_hpd_pin(dev_priv, port);
	else
		encoder->hpd_pin = intel_hpd_pin_default(dev_priv, port);
P
Paulo Zanoni 已提交
5549

5550
	if (INTEL_GEN(dev_priv) >= 11)
5551 5552 5553
		dig_port->saved_port_bits =
			intel_de_read(dev_priv, DDI_BUF_CTL(port))
			& DDI_BUF_PORT_REVERSAL;
5554
	else
5555 5556 5557
		dig_port->saved_port_bits =
			intel_de_read(dev_priv, DDI_BUF_CTL(port))
			& (DDI_BUF_PORT_REVERSAL | DDI_A_4_LANES);
5558

5559 5560 5561
	dig_port->dp.output_reg = INVALID_MMIO_REG;
	dig_port->max_lanes = intel_ddi_max_lanes(dig_port);
	dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port);
P
Paulo Zanoni 已提交
5562

5563
	if (intel_phy_is_tc(dev_priv, phy)) {
5564 5565 5566
		bool is_legacy =
			!intel_bios_port_supports_typec_usb(dev_priv, port) &&
			!intel_bios_port_supports_tbt(dev_priv, port);
5567

5568
		intel_tc_port_init(dig_port, is_legacy);
5569

5570 5571
		encoder->update_prepare = intel_ddi_update_prepare;
		encoder->update_complete = intel_ddi_update_complete;
5572
	}
5573

5574
	drm_WARN_ON(&dev_priv->drm, port > PORT_I);
5575
	dig_port->ddi_io_power_domain = POWER_DOMAIN_PORT_DDI_A_IO +
5576
					      port - PORT_A;
5577

5578
	if (init_dp) {
5579
		if (!intel_ddi_init_dp_connector(dig_port))
5580
			goto err;
5581

5582
		dig_port->hpd_pulse = intel_dp_hpd_pulse;
5583
	}
5584

5585 5586
	/* In theory we don't need the encoder->type check, but leave it just in
	 * case we have some really bad VBTs... */
5587
	if (encoder->type != INTEL_OUTPUT_EDP && init_hdmi) {
5588
		if (!intel_ddi_init_hdmi_connector(dig_port))
5589
			goto err;
5590
	}
5591

5592 5593
	if (INTEL_GEN(dev_priv) >= 11) {
		if (intel_phy_is_tc(dev_priv, phy))
5594
			dig_port->connected = intel_tc_port_connected;
5595
		else
5596
			dig_port->connected = lpt_digital_port_connected;
5597 5598
	} else if (INTEL_GEN(dev_priv) >= 8) {
		if (port == PORT_A || IS_GEN9_LP(dev_priv))
5599
			dig_port->connected = bdw_digital_port_connected;
5600
		else
5601
			dig_port->connected = lpt_digital_port_connected;
5602
	} else {
5603
		if (port == PORT_A)
5604
			dig_port->connected = hsw_digital_port_connected;
5605
		else
5606
			dig_port->connected = lpt_digital_port_connected;
5607 5608
	}

5609
	intel_infoframe_init(dig_port);
5610

5611 5612 5613
	return;

err:
5614
	drm_encoder_cleanup(&encoder->base);
5615
	kfree(dig_port);
P
Paulo Zanoni 已提交
5616
}