intel_ddi.c 160.3 KB
Newer Older
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27
/*
 * Copyright © 2012 Intel Corporation
 *
 * Permission is hereby granted, free of charge, to any person obtaining a
 * copy of this software and associated documentation files (the "Software"),
 * to deal in the Software without restriction, including without limitation
 * the rights to use, copy, modify, merge, publish, distribute, sublicense,
 * and/or sell copies of the Software, and to permit persons to whom the
 * Software is furnished to do so, subject to the following conditions:
 *
 * The above copyright notice and this permission notice (including the next
 * paragraph) shall be included in all copies or substantial portions of the
 * Software.
 *
 * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
 * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
 * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.  IN NO EVENT SHALL
 * THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
 * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
 * FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
 * IN THE SOFTWARE.
 *
 * Authors:
 *    Eugeni Dodonov <eugeni.dodonov@intel.com>
 *
 */

28
#include <drm/drm_scdc_helper.h>
29

30
#include "i915_drv.h"
31
#include "intel_audio.h"
32
#include "intel_combo_phy.h"
33
#include "intel_connector.h"
34
#include "intel_ddi.h"
35
#include "intel_display_types.h"
36
#include "intel_dp.h"
37
#include "intel_dp_mst.h"
38
#include "intel_dp_link_training.h"
39
#include "intel_dpio_phy.h"
40
#include "intel_dsi.h"
41
#include "intel_fifo_underrun.h"
42
#include "intel_gmbus.h"
43
#include "intel_hdcp.h"
44
#include "intel_hdmi.h"
45
#include "intel_hotplug.h"
46
#include "intel_lspcon.h"
47
#include "intel_panel.h"
48
#include "intel_psr.h"
49
#include "intel_sprite.h"
50
#include "intel_tc.h"
51
#include "intel_vdsc.h"
52

53 54 55
struct ddi_buf_trans {
	u32 trans1;	/* balance leg enable, de-emph level */
	u32 trans2;	/* vref sel, vswing */
56
	u8 i_boost;	/* SKL: I_boost; valid: 0x0, 0x1, 0x3, 0x7 */
57 58
};

59 60 61 62 63 64 65 66 67 68 69 70 71
static const u8 index_to_dp_signal_levels[] = {
	[0] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_0,
	[1] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_1,
	[2] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_2,
	[3] = DP_TRAIN_VOLTAGE_SWING_LEVEL_0 | DP_TRAIN_PRE_EMPH_LEVEL_3,
	[4] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_0,
	[5] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_1,
	[6] = DP_TRAIN_VOLTAGE_SWING_LEVEL_1 | DP_TRAIN_PRE_EMPH_LEVEL_2,
	[7] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_0,
	[8] = DP_TRAIN_VOLTAGE_SWING_LEVEL_2 | DP_TRAIN_PRE_EMPH_LEVEL_1,
	[9] = DP_TRAIN_VOLTAGE_SWING_LEVEL_3 | DP_TRAIN_PRE_EMPH_LEVEL_0,
};

72 73 74 75
/* HDMI/DVI modes ignore everything but the last 2 items. So we share
 * them for both DP and FDI transports, allowing those ports to
 * automatically adapt to HDMI connections as well
 */
76
static const struct ddi_buf_trans hsw_ddi_translations_dp[] = {
77 78 79 80 81 82 83 84 85
	{ 0x00FFFFFF, 0x0006000E, 0x0 },
	{ 0x00D75FFF, 0x0005000A, 0x0 },
	{ 0x00C30FFF, 0x00040006, 0x0 },
	{ 0x80AAAFFF, 0x000B0000, 0x0 },
	{ 0x00FFFFFF, 0x0005000A, 0x0 },
	{ 0x00D75FFF, 0x000C0004, 0x0 },
	{ 0x80C30FFF, 0x000B0000, 0x0 },
	{ 0x00FFFFFF, 0x00040006, 0x0 },
	{ 0x80D75FFF, 0x000B0000, 0x0 },
86 87
};

88
static const struct ddi_buf_trans hsw_ddi_translations_fdi[] = {
89 90 91 92 93 94 95 96 97
	{ 0x00FFFFFF, 0x0007000E, 0x0 },
	{ 0x00D75FFF, 0x000F000A, 0x0 },
	{ 0x00C30FFF, 0x00060006, 0x0 },
	{ 0x00AAAFFF, 0x001E0000, 0x0 },
	{ 0x00FFFFFF, 0x000F000A, 0x0 },
	{ 0x00D75FFF, 0x00160004, 0x0 },
	{ 0x00C30FFF, 0x001E0000, 0x0 },
	{ 0x00FFFFFF, 0x00060006, 0x0 },
	{ 0x00D75FFF, 0x001E0000, 0x0 },
98 99
};

100 101
static const struct ddi_buf_trans hsw_ddi_translations_hdmi[] = {
					/* Idx	NT mV d	T mV d	db	*/
102 103 104 105 106 107 108 109 110 111 112 113
	{ 0x00FFFFFF, 0x0006000E, 0x0 },/* 0:	400	400	0	*/
	{ 0x00E79FFF, 0x000E000C, 0x0 },/* 1:	400	500	2	*/
	{ 0x00D75FFF, 0x0005000A, 0x0 },/* 2:	400	600	3.5	*/
	{ 0x00FFFFFF, 0x0005000A, 0x0 },/* 3:	600	600	0	*/
	{ 0x00E79FFF, 0x001D0007, 0x0 },/* 4:	600	750	2	*/
	{ 0x00D75FFF, 0x000C0004, 0x0 },/* 5:	600	900	3.5	*/
	{ 0x00FFFFFF, 0x00040006, 0x0 },/* 6:	800	800	0	*/
	{ 0x80E79FFF, 0x00030002, 0x0 },/* 7:	800	1000	2	*/
	{ 0x00FFFFFF, 0x00140005, 0x0 },/* 8:	850	850	0	*/
	{ 0x00FFFFFF, 0x000C0004, 0x0 },/* 9:	900	900	0	*/
	{ 0x00FFFFFF, 0x001C0003, 0x0 },/* 10:	950	950	0	*/
	{ 0x80FFFFFF, 0x00030002, 0x0 },/* 11:	1000	1000	0	*/
114 115
};

116
static const struct ddi_buf_trans bdw_ddi_translations_edp[] = {
117 118 119 120 121 122 123 124 125
	{ 0x00FFFFFF, 0x00000012, 0x0 },
	{ 0x00EBAFFF, 0x00020011, 0x0 },
	{ 0x00C71FFF, 0x0006000F, 0x0 },
	{ 0x00AAAFFF, 0x000E000A, 0x0 },
	{ 0x00FFFFFF, 0x00020011, 0x0 },
	{ 0x00DB6FFF, 0x0005000F, 0x0 },
	{ 0x00BEEFFF, 0x000A000C, 0x0 },
	{ 0x00FFFFFF, 0x0005000F, 0x0 },
	{ 0x00DB6FFF, 0x000A000C, 0x0 },
126 127
};

128
static const struct ddi_buf_trans bdw_ddi_translations_dp[] = {
129 130 131 132 133 134 135 136 137
	{ 0x00FFFFFF, 0x0007000E, 0x0 },
	{ 0x00D75FFF, 0x000E000A, 0x0 },
	{ 0x00BEFFFF, 0x00140006, 0x0 },
	{ 0x80B2CFFF, 0x001B0002, 0x0 },
	{ 0x00FFFFFF, 0x000E000A, 0x0 },
	{ 0x00DB6FFF, 0x00160005, 0x0 },
	{ 0x80C71FFF, 0x001A0002, 0x0 },
	{ 0x00F7DFFF, 0x00180004, 0x0 },
	{ 0x80D75FFF, 0x001B0002, 0x0 },
138 139
};

140
static const struct ddi_buf_trans bdw_ddi_translations_fdi[] = {
141 142 143 144 145 146 147 148 149
	{ 0x00FFFFFF, 0x0001000E, 0x0 },
	{ 0x00D75FFF, 0x0004000A, 0x0 },
	{ 0x00C30FFF, 0x00070006, 0x0 },
	{ 0x00AAAFFF, 0x000C0000, 0x0 },
	{ 0x00FFFFFF, 0x0004000A, 0x0 },
	{ 0x00D75FFF, 0x00090004, 0x0 },
	{ 0x00C30FFF, 0x000C0000, 0x0 },
	{ 0x00FFFFFF, 0x00070006, 0x0 },
	{ 0x00D75FFF, 0x000C0000, 0x0 },
150 151
};

152 153
static const struct ddi_buf_trans bdw_ddi_translations_hdmi[] = {
					/* Idx	NT mV d	T mV df	db	*/
154 155 156 157 158 159 160 161 162 163
	{ 0x00FFFFFF, 0x0007000E, 0x0 },/* 0:	400	400	0	*/
	{ 0x00D75FFF, 0x000E000A, 0x0 },/* 1:	400	600	3.5	*/
	{ 0x00BEFFFF, 0x00140006, 0x0 },/* 2:	400	800	6	*/
	{ 0x00FFFFFF, 0x0009000D, 0x0 },/* 3:	450	450	0	*/
	{ 0x00FFFFFF, 0x000E000A, 0x0 },/* 4:	600	600	0	*/
	{ 0x00D7FFFF, 0x00140006, 0x0 },/* 5:	600	800	2.5	*/
	{ 0x80CB2FFF, 0x001B0002, 0x0 },/* 6:	600	1000	4.5	*/
	{ 0x00FFFFFF, 0x00140006, 0x0 },/* 7:	800	800	0	*/
	{ 0x80E79FFF, 0x001B0002, 0x0 },/* 8:	800	1000	2	*/
	{ 0x80FFFFFF, 0x001B0002, 0x0 },/* 9:	1000	1000	0	*/
164 165
};

166
/* Skylake H and S */
167
static const struct ddi_buf_trans skl_ddi_translations_dp[] = {
168 169 170
	{ 0x00002016, 0x000000A0, 0x0 },
	{ 0x00005012, 0x0000009B, 0x0 },
	{ 0x00007011, 0x00000088, 0x0 },
171
	{ 0x80009010, 0x000000C0, 0x1 },
172 173
	{ 0x00002016, 0x0000009B, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
174
	{ 0x80007011, 0x000000C0, 0x1 },
175
	{ 0x00002016, 0x000000DF, 0x0 },
176
	{ 0x80005012, 0x000000C0, 0x1 },
177 178
};

179 180
/* Skylake U */
static const struct ddi_buf_trans skl_u_ddi_translations_dp[] = {
181
	{ 0x0000201B, 0x000000A2, 0x0 },
182
	{ 0x00005012, 0x00000088, 0x0 },
183
	{ 0x80007011, 0x000000CD, 0x1 },
184
	{ 0x80009010, 0x000000C0, 0x1 },
185
	{ 0x0000201B, 0x0000009D, 0x0 },
186 187
	{ 0x80005012, 0x000000C0, 0x1 },
	{ 0x80007011, 0x000000C0, 0x1 },
188
	{ 0x00002016, 0x00000088, 0x0 },
189
	{ 0x80005012, 0x000000C0, 0x1 },
190 191
};

192 193
/* Skylake Y */
static const struct ddi_buf_trans skl_y_ddi_translations_dp[] = {
194 195
	{ 0x00000018, 0x000000A2, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
196
	{ 0x80007011, 0x000000CD, 0x3 },
197
	{ 0x80009010, 0x000000C0, 0x3 },
198
	{ 0x00000018, 0x0000009D, 0x0 },
199 200
	{ 0x80005012, 0x000000C0, 0x3 },
	{ 0x80007011, 0x000000C0, 0x3 },
201
	{ 0x00000018, 0x00000088, 0x0 },
202
	{ 0x80005012, 0x000000C0, 0x3 },
203 204
};

205 206 207 208 209 210 211 212 213 214 215 216 217 218 219 220 221 222 223 224 225 226 227 228 229 230 231 232 233 234 235 236 237 238 239 240 241 242 243
/* Kabylake H and S */
static const struct ddi_buf_trans kbl_ddi_translations_dp[] = {
	{ 0x00002016, 0x000000A0, 0x0 },
	{ 0x00005012, 0x0000009B, 0x0 },
	{ 0x00007011, 0x00000088, 0x0 },
	{ 0x80009010, 0x000000C0, 0x1 },
	{ 0x00002016, 0x0000009B, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
	{ 0x80007011, 0x000000C0, 0x1 },
	{ 0x00002016, 0x00000097, 0x0 },
	{ 0x80005012, 0x000000C0, 0x1 },
};

/* Kabylake U */
static const struct ddi_buf_trans kbl_u_ddi_translations_dp[] = {
	{ 0x0000201B, 0x000000A1, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
	{ 0x80007011, 0x000000CD, 0x3 },
	{ 0x80009010, 0x000000C0, 0x3 },
	{ 0x0000201B, 0x0000009D, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
	{ 0x80007011, 0x000000C0, 0x3 },
	{ 0x00002016, 0x0000004F, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
};

/* Kabylake Y */
static const struct ddi_buf_trans kbl_y_ddi_translations_dp[] = {
	{ 0x00001017, 0x000000A1, 0x0 },
	{ 0x00005012, 0x00000088, 0x0 },
	{ 0x80007011, 0x000000CD, 0x3 },
	{ 0x8000800F, 0x000000C0, 0x3 },
	{ 0x00001017, 0x0000009D, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
	{ 0x80007011, 0x000000C0, 0x3 },
	{ 0x00001017, 0x0000004C, 0x0 },
	{ 0x80005012, 0x000000C0, 0x3 },
};

244
/*
245
 * Skylake/Kabylake H and S
246 247
 * eDP 1.4 low vswing translation parameters
 */
248
static const struct ddi_buf_trans skl_ddi_translations_edp[] = {
249 250 251 252 253 254 255 256 257 258 259 260 261
	{ 0x00000018, 0x000000A8, 0x0 },
	{ 0x00004013, 0x000000A9, 0x0 },
	{ 0x00007011, 0x000000A2, 0x0 },
	{ 0x00009010, 0x0000009C, 0x0 },
	{ 0x00000018, 0x000000A9, 0x0 },
	{ 0x00006013, 0x000000A2, 0x0 },
	{ 0x00007011, 0x000000A6, 0x0 },
	{ 0x00000018, 0x000000AB, 0x0 },
	{ 0x00007013, 0x0000009F, 0x0 },
	{ 0x00000018, 0x000000DF, 0x0 },
};

/*
262
 * Skylake/Kabylake U
263 264 265 266 267 268 269 270 271 272 273 274 275
 * eDP 1.4 low vswing translation parameters
 */
static const struct ddi_buf_trans skl_u_ddi_translations_edp[] = {
	{ 0x00000018, 0x000000A8, 0x0 },
	{ 0x00004013, 0x000000A9, 0x0 },
	{ 0x00007011, 0x000000A2, 0x0 },
	{ 0x00009010, 0x0000009C, 0x0 },
	{ 0x00000018, 0x000000A9, 0x0 },
	{ 0x00006013, 0x000000A2, 0x0 },
	{ 0x00007011, 0x000000A6, 0x0 },
	{ 0x00002016, 0x000000AB, 0x0 },
	{ 0x00005013, 0x0000009F, 0x0 },
	{ 0x00000018, 0x000000DF, 0x0 },
276 277
};

278
/*
279
 * Skylake/Kabylake Y
280 281
 * eDP 1.4 low vswing translation parameters
 */
282
static const struct ddi_buf_trans skl_y_ddi_translations_edp[] = {
283 284 285 286 287 288 289 290 291 292 293
	{ 0x00000018, 0x000000A8, 0x0 },
	{ 0x00004013, 0x000000AB, 0x0 },
	{ 0x00007011, 0x000000A4, 0x0 },
	{ 0x00009010, 0x000000DF, 0x0 },
	{ 0x00000018, 0x000000AA, 0x0 },
	{ 0x00006013, 0x000000A4, 0x0 },
	{ 0x00007011, 0x0000009D, 0x0 },
	{ 0x00000018, 0x000000A0, 0x0 },
	{ 0x00006012, 0x000000DF, 0x0 },
	{ 0x00000018, 0x0000008A, 0x0 },
};
294

295
/* Skylake/Kabylake U, H and S */
296
static const struct ddi_buf_trans skl_ddi_translations_hdmi[] = {
297 298 299 300 301 302
	{ 0x00000018, 0x000000AC, 0x0 },
	{ 0x00005012, 0x0000009D, 0x0 },
	{ 0x00007011, 0x00000088, 0x0 },
	{ 0x00000018, 0x000000A1, 0x0 },
	{ 0x00000018, 0x00000098, 0x0 },
	{ 0x00004013, 0x00000088, 0x0 },
303
	{ 0x80006012, 0x000000CD, 0x1 },
304
	{ 0x00000018, 0x000000DF, 0x0 },
305 306 307
	{ 0x80003015, 0x000000CD, 0x1 },	/* Default */
	{ 0x80003015, 0x000000C0, 0x1 },
	{ 0x80000018, 0x000000C0, 0x1 },
308 309
};

310
/* Skylake/Kabylake Y */
311
static const struct ddi_buf_trans skl_y_ddi_translations_hdmi[] = {
312 313
	{ 0x00000018, 0x000000A1, 0x0 },
	{ 0x00005012, 0x000000DF, 0x0 },
314
	{ 0x80007011, 0x000000CB, 0x3 },
315 316 317
	{ 0x00000018, 0x000000A4, 0x0 },
	{ 0x00000018, 0x0000009D, 0x0 },
	{ 0x00004013, 0x00000080, 0x0 },
318
	{ 0x80006013, 0x000000C0, 0x3 },
319
	{ 0x00000018, 0x0000008A, 0x0 },
320 321 322
	{ 0x80003015, 0x000000C0, 0x3 },	/* Default */
	{ 0x80003015, 0x000000C0, 0x3 },
	{ 0x80000018, 0x000000C0, 0x3 },
323 324
};

325
struct bxt_ddi_buf_trans {
326 327 328 329
	u8 margin;	/* swing value */
	u8 scale;	/* scale value */
	u8 enable;	/* scale enable */
	u8 deemphasis;
330 331 332 333
};

static const struct bxt_ddi_buf_trans bxt_ddi_translations_dp[] = {
					/* Idx	NT mV diff	db  */
334 335 336 337 338 339 340 341 342 343
	{ 52,  0x9A, 0, 128, },	/* 0:	400		0   */
	{ 78,  0x9A, 0, 85,  },	/* 1:	400		3.5 */
	{ 104, 0x9A, 0, 64,  },	/* 2:	400		6   */
	{ 154, 0x9A, 0, 43,  },	/* 3:	400		9.5 */
	{ 77,  0x9A, 0, 128, },	/* 4:	600		0   */
	{ 116, 0x9A, 0, 85,  },	/* 5:	600		3.5 */
	{ 154, 0x9A, 0, 64,  },	/* 6:	600		6   */
	{ 102, 0x9A, 0, 128, },	/* 7:	800		0   */
	{ 154, 0x9A, 0, 85,  },	/* 8:	800		3.5 */
	{ 154, 0x9A, 1, 128, },	/* 9:	1200		0   */
344 345
};

346 347
static const struct bxt_ddi_buf_trans bxt_ddi_translations_edp[] = {
					/* Idx	NT mV diff	db  */
348 349 350 351 352 353 354 355 356 357
	{ 26, 0, 0, 128, },	/* 0:	200		0   */
	{ 38, 0, 0, 112, },	/* 1:	200		1.5 */
	{ 48, 0, 0, 96,  },	/* 2:	200		4   */
	{ 54, 0, 0, 69,  },	/* 3:	200		6   */
	{ 32, 0, 0, 128, },	/* 4:	250		0   */
	{ 48, 0, 0, 104, },	/* 5:	250		1.5 */
	{ 54, 0, 0, 85,  },	/* 6:	250		4   */
	{ 43, 0, 0, 128, },	/* 7:	300		0   */
	{ 54, 0, 0, 101, },	/* 8:	300		1.5 */
	{ 48, 0, 0, 128, },	/* 9:	300		0   */
358 359
};

360 361 362 363 364
/* BSpec has 2 recommended values - entries 0 and 8.
 * Using the entry with higher vswing.
 */
static const struct bxt_ddi_buf_trans bxt_ddi_translations_hdmi[] = {
					/* Idx	NT mV diff	db  */
365 366 367 368 369 370 371 372 373 374
	{ 52,  0x9A, 0, 128, },	/* 0:	400		0   */
	{ 52,  0x9A, 0, 85,  },	/* 1:	400		3.5 */
	{ 52,  0x9A, 0, 64,  },	/* 2:	400		6   */
	{ 42,  0x9A, 0, 43,  },	/* 3:	400		9.5 */
	{ 77,  0x9A, 0, 128, },	/* 4:	600		0   */
	{ 77,  0x9A, 0, 85,  },	/* 5:	600		3.5 */
	{ 77,  0x9A, 0, 64,  },	/* 6:	600		6   */
	{ 102, 0x9A, 0, 128, },	/* 7:	800		0   */
	{ 102, 0x9A, 0, 85,  },	/* 8:	800		3.5 */
	{ 154, 0x9A, 1, 128, },	/* 9:	1200		0   */
375 376
};

377
struct cnl_ddi_buf_trans {
378 379 380 381 382
	u8 dw2_swing_sel;
	u8 dw7_n_scalar;
	u8 dw4_cursor_coeff;
	u8 dw4_post_cursor_2;
	u8 dw4_post_cursor_1;
383 384 385 386 387 388 389 390 391 392 393 394 395 396 397 398 399 400 401 402 403 404 405 406 407 408 409 410 411 412 413 414 415 416 417 418 419 420 421 422 423 424 425 426 427 428 429 430 431 432 433 434 435 436 437 438 439 440 441 442 443 444 445 446 447 448 449 450 451 452 453 454 455 456 457 458 459 460 461 462 463 464 465 466 467 468 469 470 471 472 473 474 475 476 477 478 479 480 481 482 483 484 485 486 487 488 489 490 491 492 493 494 495 496 497 498 499 500 501 502 503 504 505 506 507 508 509 510 511 512 513 514 515 516
};

/* Voltage Swing Programming for VccIO 0.85V for DP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_85V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5D, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x6A, 0x38, 0x00, 0x07 },	/* 350   500      3.1   */
	{ 0xB, 0x7A, 0x32, 0x00, 0x0D },	/* 350   700      6.0   */
	{ 0x6, 0x7C, 0x2D, 0x00, 0x12 },	/* 350   900      8.2   */
	{ 0xA, 0x69, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xB, 0x7A, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7C, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xB, 0x7D, 0x3C, 0x00, 0x03 },	/* 650   725      0.9   */
	{ 0x6, 0x7C, 0x34, 0x00, 0x0B },	/* 600   900      3.5   */
	{ 0x6, 0x7B, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

/* Voltage Swing Programming for VccIO 0.85V for HDMI */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_85V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x60, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x73, 0x36, 0x00, 0x09 },	/* 450   650      3.2   */
	{ 0x6, 0x7F, 0x31, 0x00, 0x0E },	/* 450   850      5.5   */
	{ 0xB, 0x73, 0x3F, 0x00, 0x00 },	/* 650   650      0.0   */
	{ 0x6, 0x7F, 0x37, 0x00, 0x08 },	/* 650   850      2.3   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 850   850      0.0   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
};

/* Voltage Swing Programming for VccIO 0.85V for eDP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_85V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x66, 0x3A, 0x00, 0x05 },	/* 384   500      2.3   */
	{ 0x0, 0x7F, 0x38, 0x00, 0x07 },	/* 153   200      2.3   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 192   250      2.3   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 230   300      2.3   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 269   350      2.3   */
	{ 0xA, 0x66, 0x3C, 0x00, 0x03 },	/* 446   500      1.0   */
	{ 0xB, 0x70, 0x3C, 0x00, 0x03 },	/* 460   600      2.3   */
	{ 0xC, 0x75, 0x3C, 0x00, 0x03 },	/* 537   700      2.3   */
	{ 0x2, 0x7F, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
};

/* Voltage Swing Programming for VccIO 0.95V for DP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_0_95V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5D, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x6A, 0x38, 0x00, 0x07 },	/* 350   500      3.1   */
	{ 0xB, 0x7A, 0x32, 0x00, 0x0D },	/* 350   700      6.0   */
	{ 0x6, 0x7C, 0x2D, 0x00, 0x12 },	/* 350   900      8.2   */
	{ 0xA, 0x69, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xB, 0x7A, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7C, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xB, 0x7D, 0x3C, 0x00, 0x03 },	/* 650   725      0.9   */
	{ 0x6, 0x7C, 0x34, 0x00, 0x0B },	/* 600   900      3.5   */
	{ 0x6, 0x7B, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

/* Voltage Swing Programming for VccIO 0.95V for HDMI */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_0_95V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5C, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
	{ 0xB, 0x69, 0x37, 0x00, 0x08 },	/* 400   600      3.5   */
	{ 0x5, 0x76, 0x31, 0x00, 0x0E },	/* 400   800      6.0   */
	{ 0xA, 0x5E, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x69, 0x3F, 0x00, 0x00 },	/* 600   600      0.0   */
	{ 0xB, 0x79, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
	{ 0x6, 0x7D, 0x32, 0x00, 0x0D },	/* 600   1000     4.4   */
	{ 0x5, 0x76, 0x3F, 0x00, 0x00 },	/* 800   800      0.0   */
	{ 0x6, 0x7D, 0x39, 0x00, 0x06 },	/* 800   1000     1.9   */
	{ 0x6, 0x7F, 0x39, 0x00, 0x06 },	/* 850   1050     1.8   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1050  1050     0.0   */
};

/* Voltage Swing Programming for VccIO 0.95V for eDP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_0_95V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x61, 0x3A, 0x00, 0x05 },	/* 384   500      2.3   */
	{ 0x0, 0x7F, 0x38, 0x00, 0x07 },	/* 153   200      2.3   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 192   250      2.3   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 230   300      2.3   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 269   350      2.3   */
	{ 0xA, 0x61, 0x3C, 0x00, 0x03 },	/* 446   500      1.0   */
	{ 0xB, 0x68, 0x39, 0x00, 0x06 },	/* 460   600      2.3   */
	{ 0xC, 0x6E, 0x39, 0x00, 0x06 },	/* 537   700      2.3   */
	{ 0x4, 0x7F, 0x3A, 0x00, 0x05 },	/* 460   600      2.3   */
	{ 0x2, 0x7F, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
};

/* Voltage Swing Programming for VccIO 1.05V for DP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_dp_1_05V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x58, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
	{ 0xB, 0x64, 0x37, 0x00, 0x08 },	/* 400   600      3.5   */
	{ 0x5, 0x70, 0x31, 0x00, 0x0E },	/* 400   800      6.0   */
	{ 0x6, 0x7F, 0x2C, 0x00, 0x13 },	/* 400   1050     8.4   */
	{ 0xB, 0x64, 0x3F, 0x00, 0x00 },	/* 600   600      0.0   */
	{ 0x5, 0x73, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
	{ 0x6, 0x7F, 0x30, 0x00, 0x0F },	/* 550   1050     5.6   */
	{ 0x5, 0x76, 0x3E, 0x00, 0x01 },	/* 850   900      0.5   */
	{ 0x6, 0x7F, 0x36, 0x00, 0x09 },	/* 750   1050     2.9   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1050  1050     0.0   */
};

/* Voltage Swing Programming for VccIO 1.05V for HDMI */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_hdmi_1_05V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x58, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
	{ 0xB, 0x64, 0x37, 0x00, 0x08 },	/* 400   600      3.5   */
	{ 0x5, 0x70, 0x31, 0x00, 0x0E },	/* 400   800      6.0   */
	{ 0xA, 0x5B, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x64, 0x3F, 0x00, 0x00 },	/* 600   600      0.0   */
	{ 0x5, 0x73, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
	{ 0x6, 0x7C, 0x32, 0x00, 0x0D },	/* 600   1000     4.4   */
	{ 0x5, 0x70, 0x3F, 0x00, 0x00 },	/* 800   800      0.0   */
	{ 0x6, 0x7C, 0x39, 0x00, 0x06 },	/* 800   1000     1.9   */
	{ 0x6, 0x7F, 0x39, 0x00, 0x06 },	/* 850   1050     1.8   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1050  1050     0.0   */
};

/* Voltage Swing Programming for VccIO 1.05V for eDP */
static const struct cnl_ddi_buf_trans cnl_ddi_translations_edp_1_05V[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x5E, 0x3A, 0x00, 0x05 },	/* 384   500      2.3   */
	{ 0x0, 0x7F, 0x38, 0x00, 0x07 },	/* 153   200      2.3   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 192   250      2.3   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 230   300      2.3   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 269   350      2.3   */
	{ 0xA, 0x5E, 0x3C, 0x00, 0x03 },	/* 446   500      1.0   */
	{ 0xB, 0x64, 0x39, 0x00, 0x06 },	/* 460   600      2.3   */
	{ 0xE, 0x6A, 0x39, 0x00, 0x06 },	/* 537   700      2.3   */
	{ 0x2, 0x7F, 0x3F, 0x00, 0x00 },	/* 400   400      0.0   */
};

517 518 519 520 521 522 523 524 525 526 527 528 529
/* icl_combo_phy_ddi_translations */
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_dp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x71, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6C, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
530 531
};

532 533 534 535 536 537 538 539 540 541 542 543
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0x0, 0x7F, 0x3F, 0x00, 0x00 },	/* 200   200      0.0   */
	{ 0x8, 0x7F, 0x38, 0x00, 0x07 },	/* 200   250      1.9   */
	{ 0x1, 0x7F, 0x33, 0x00, 0x0C },	/* 200   300      3.5   */
	{ 0x9, 0x7F, 0x31, 0x00, 0x0E },	/* 200   350      4.9   */
	{ 0x8, 0x7F, 0x3F, 0x00, 0x00 },	/* 250   250      0.0   */
	{ 0x1, 0x7F, 0x38, 0x00, 0x07 },	/* 250   300      1.6   */
	{ 0x9, 0x7F, 0x35, 0x00, 0x0A },	/* 250   350      2.9   */
	{ 0x1, 0x7F, 0x3F, 0x00, 0x00 },	/* 300   300      0.0   */
	{ 0x9, 0x7F, 0x38, 0x00, 0x07 },	/* 300   350      1.3   */
	{ 0x9, 0x7F, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
544 545
};

546 547 548 549 550 551 552 553 554 555 556 557
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_edp_hbr3[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x71, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6C, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
558 559
};

560 561 562 563 564 565 566 567 568
static const struct cnl_ddi_buf_trans icl_combo_phy_ddi_translations_hdmi[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x60, 0x3F, 0x00, 0x00 },	/* 450   450      0.0   */
	{ 0xB, 0x73, 0x36, 0x00, 0x09 },	/* 450   650      3.2   */
	{ 0x6, 0x7F, 0x31, 0x00, 0x0E },	/* 450   850      5.5   */
	{ 0xB, 0x73, 0x3F, 0x00, 0x00 },	/* 650   650      0.0   ALS */
	{ 0x6, 0x7F, 0x37, 0x00, 0x08 },	/* 650   850      2.3   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 850   850      0.0   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   850      3.0   */
569 570
};

571
static const struct cnl_ddi_buf_trans ehl_combo_phy_ddi_translations_dp[] = {
572 573 574 575 576 577 578 579 580 581 582 583 584
						/* NT mV Trans mV db    */
	{ 0xA, 0x33, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x47, 0x36, 0x00, 0x09 },	/* 350   500      3.1   */
	{ 0xC, 0x64, 0x30, 0x00, 0x0F },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2C, 0x00, 0x13 },	/* 350   900      8.2   */
	{ 0xA, 0x46, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x64, 0x36, 0x00, 0x09 },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x30, 0x00, 0x0F },	/* 500   900      5.1   */
	{ 0xC, 0x61, 0x3F, 0x00, 0x00 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x37, 0x00, 0x08 },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

585 586
struct icl_mg_phy_ddi_buf_trans {
	u32 cri_txdeemph_override_11_6;
587
	u32 cri_txdeemph_override_5_0;
588 589 590
	u32 cri_txdeemph_override_17_12;
};

591 592 593 594 595 596 597 598 599 600 601 602 603 604 605
static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_rbr_hbr[] = {
				/* Voltage swing  pre-emphasis */
	{ 0x18, 0x00, 0x00 },	/* 0              0   */
	{ 0x1D, 0x00, 0x05 },	/* 0              1   */
	{ 0x24, 0x00, 0x0C },	/* 0              2   */
	{ 0x2B, 0x00, 0x14 },	/* 0              3   */
	{ 0x21, 0x00, 0x00 },	/* 1              0   */
	{ 0x2B, 0x00, 0x08 },	/* 1              1   */
	{ 0x30, 0x00, 0x0F },	/* 1              2   */
	{ 0x31, 0x00, 0x03 },	/* 2              0   */
	{ 0x34, 0x00, 0x0B },	/* 2              1   */
	{ 0x3F, 0x00, 0x00 },	/* 3              0   */
};

static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hbr2_hbr3[] = {
606
				/* Voltage swing  pre-emphasis */
607 608 609 610 611 612 613 614 615 616 617 618 619 620 621 622 623 624 625 626 627 628 629 630
	{ 0x18, 0x00, 0x00 },	/* 0              0   */
	{ 0x1D, 0x00, 0x05 },	/* 0              1   */
	{ 0x24, 0x00, 0x0C },	/* 0              2   */
	{ 0x2B, 0x00, 0x14 },	/* 0              3   */
	{ 0x26, 0x00, 0x00 },	/* 1              0   */
	{ 0x2C, 0x00, 0x07 },	/* 1              1   */
	{ 0x33, 0x00, 0x0C },	/* 1              2   */
	{ 0x2E, 0x00, 0x00 },	/* 2              0   */
	{ 0x36, 0x00, 0x09 },	/* 2              1   */
	{ 0x3F, 0x00, 0x00 },	/* 3              0   */
};

static const struct icl_mg_phy_ddi_buf_trans icl_mg_phy_ddi_translations_hdmi[] = {
				/* HDMI Preset	VS	Pre-emph */
	{ 0x1A, 0x0, 0x0 },	/* 1		400mV	0dB */
	{ 0x20, 0x0, 0x0 },	/* 2		500mV	0dB */
	{ 0x29, 0x0, 0x0 },	/* 3		650mV	0dB */
	{ 0x32, 0x0, 0x0 },	/* 4		800mV	0dB */
	{ 0x3F, 0x0, 0x0 },	/* 5		1000mV	0dB */
	{ 0x3A, 0x0, 0x5 },	/* 6		Full	-1.5 dB */
	{ 0x39, 0x0, 0x6 },	/* 7		Full	-1.8 dB */
	{ 0x38, 0x0, 0x7 },	/* 8		Full	-2 dB */
	{ 0x37, 0x0, 0x8 },	/* 9		Full	-2.5 dB */
	{ 0x36, 0x0, 0x9 },	/* 10		Full	-3 dB */
631 632
};

633 634 635 636 637 638
struct tgl_dkl_phy_ddi_buf_trans {
	u32 dkl_vswing_control;
	u32 dkl_preshoot_control;
	u32 dkl_de_emphasis_control;
};

639
static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans[] = {
640 641
				/* VS	pre-emp	Non-trans mV	Pre-emph dB */
	{ 0x7, 0x0, 0x00 },	/* 0	0	400mV		0 dB */
642 643
	{ 0x5, 0x0, 0x05 },	/* 0	1	400mV		3.5 dB */
	{ 0x2, 0x0, 0x0B },	/* 0	2	400mV		6 dB */
644 645 646 647 648 649 650 651 652 653 654 655 656 657
	{ 0x0, 0x0, 0x18 },	/* 0	3	400mV		9.5 dB */
	{ 0x5, 0x0, 0x00 },	/* 1	0	600mV		0 dB */
	{ 0x2, 0x0, 0x08 },	/* 1	1	600mV		3.5 dB */
	{ 0x0, 0x0, 0x14 },	/* 1	2	600mV		6 dB */
	{ 0x2, 0x0, 0x00 },	/* 2	0	800mV		0 dB */
	{ 0x0, 0x0, 0x0B },	/* 2	1	800mV		3.5 dB */
	{ 0x0, 0x0, 0x00 },	/* 3	0	1200mV		0 dB HDMI default */
};

static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_dp_ddi_trans_hbr2[] = {
				/* VS	pre-emp	Non-trans mV	Pre-emph dB */
	{ 0x7, 0x0, 0x00 },	/* 0	0	400mV		0 dB */
	{ 0x5, 0x0, 0x05 },	/* 0	1	400mV		3.5 dB */
	{ 0x2, 0x0, 0x0B },	/* 0	2	400mV		6 dB */
658 659
	{ 0x0, 0x0, 0x19 },	/* 0	3	400mV		9.5 dB */
	{ 0x5, 0x0, 0x00 },	/* 1	0	600mV		0 dB */
660
	{ 0x2, 0x0, 0x08 },	/* 1	1	600mV		3.5 dB */
661 662 663 664 665 666
	{ 0x0, 0x0, 0x14 },	/* 1	2	600mV		6 dB */
	{ 0x2, 0x0, 0x00 },	/* 2	0	800mV		0 dB */
	{ 0x0, 0x0, 0x0B },	/* 2	1	800mV		3.5 dB */
	{ 0x0, 0x0, 0x00 },	/* 3	0	1200mV		0 dB HDMI default */
};

667 668 669 670 671 672 673 674 675 676 677 678 679 680
static const struct tgl_dkl_phy_ddi_buf_trans tgl_dkl_phy_hdmi_ddi_trans[] = {
				/* HDMI Preset	VS	Pre-emph */
	{ 0x7, 0x0, 0x0 },	/* 1		400mV	0dB */
	{ 0x6, 0x0, 0x0 },	/* 2		500mV	0dB */
	{ 0x4, 0x0, 0x0 },	/* 3		650mV	0dB */
	{ 0x2, 0x0, 0x0 },	/* 4		800mV	0dB */
	{ 0x0, 0x0, 0x0 },	/* 5		1000mV	0dB */
	{ 0x0, 0x0, 0x5 },	/* 6		Full	-1.5 dB */
	{ 0x0, 0x0, 0x6 },	/* 7		Full	-1.8 dB */
	{ 0x0, 0x0, 0x7 },	/* 8		Full	-2 dB */
	{ 0x0, 0x0, 0x8 },	/* 9		Full	-2.5 dB */
	{ 0x0, 0x0, 0xA },	/* 10		Full	-3 dB */
};

681 682 683 684 685 686 687 688 689 690 691 692 693 694 695 696 697 698 699 700 701 702 703 704 705 706 707 708
static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x32, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x71, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7D, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x4C, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x73, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x6C, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7F, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_dp_hbr2[] = {
						/* NT mV Trans mV db    */
	{ 0xA, 0x35, 0x3F, 0x00, 0x00 },	/* 350   350      0.0   */
	{ 0xA, 0x4F, 0x37, 0x00, 0x08 },	/* 350   500      3.1   */
	{ 0xC, 0x63, 0x2F, 0x00, 0x10 },	/* 350   700      6.0   */
	{ 0x6, 0x7F, 0x2B, 0x00, 0x14 },	/* 350   900      8.2   */
	{ 0xA, 0x47, 0x3F, 0x00, 0x00 },	/* 500   500      0.0   */
	{ 0xC, 0x63, 0x34, 0x00, 0x0B },	/* 500   700      2.9   */
	{ 0x6, 0x7F, 0x2F, 0x00, 0x10 },	/* 500   900      5.1   */
	{ 0xC, 0x61, 0x3C, 0x00, 0x03 },	/* 650   700      0.6   */
	{ 0x6, 0x7B, 0x35, 0x00, 0x0A },	/* 600   900      3.5   */
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 900   900      0.0   */
};

709 710 711 712 713 714 715 716 717 718 719 720 721 722 723 724 725 726 727 728 729 730
/*
 * Cloned the HOBL entry to comply with the voltage and pre-emphasis entries
 * that DisplayPort specification requires
 */
static const struct cnl_ddi_buf_trans tgl_combo_phy_ddi_translations_edp_hbr2_hobl[] = {
						/* VS	pre-emp	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	0	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	1	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	2	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 0	3	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1	0	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1	1	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 1	2	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 2	0	*/
	{ 0x6, 0x7F, 0x3F, 0x00, 0x00 },	/* 2	1	*/
};

static bool is_hobl_buf_trans(const struct cnl_ddi_buf_trans *table)
{
	return table == tgl_combo_phy_ddi_translations_edp_hbr2_hobl;
}

731
static const struct ddi_buf_trans *
732
bdw_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
733
{
734 735
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

736 737 738 739 740 741 742 743 744
	if (dev_priv->vbt.edp.low_vswing) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_edp);
		return bdw_ddi_translations_edp;
	} else {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_dp);
		return bdw_ddi_translations_dp;
	}
}

745
static const struct ddi_buf_trans *
746
skl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
747
{
748 749
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

750
	if (IS_SKL_ULX(dev_priv)) {
751
		*n_entries = ARRAY_SIZE(skl_y_ddi_translations_dp);
752
		return skl_y_ddi_translations_dp;
753
	} else if (IS_SKL_ULT(dev_priv)) {
754
		*n_entries = ARRAY_SIZE(skl_u_ddi_translations_dp);
755
		return skl_u_ddi_translations_dp;
756 757
	} else {
		*n_entries = ARRAY_SIZE(skl_ddi_translations_dp);
758
		return skl_ddi_translations_dp;
759 760 761
	}
}

762
static const struct ddi_buf_trans *
763
kbl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
764
{
765 766
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

767 768 769
	if (IS_KBL_ULX(dev_priv) ||
	    IS_CFL_ULX(dev_priv) ||
	    IS_CML_ULX(dev_priv)) {
770 771
		*n_entries = ARRAY_SIZE(kbl_y_ddi_translations_dp);
		return kbl_y_ddi_translations_dp;
772 773 774
	} else if (IS_KBL_ULT(dev_priv) ||
		   IS_CFL_ULT(dev_priv) ||
		   IS_CML_ULT(dev_priv)) {
775 776 777 778 779 780 781 782
		*n_entries = ARRAY_SIZE(kbl_u_ddi_translations_dp);
		return kbl_u_ddi_translations_dp;
	} else {
		*n_entries = ARRAY_SIZE(kbl_ddi_translations_dp);
		return kbl_ddi_translations_dp;
	}
}

783
static const struct ddi_buf_trans *
784
skl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
785
{
786 787
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

788
	if (dev_priv->vbt.edp.low_vswing) {
789 790 791 792
		if (IS_SKL_ULX(dev_priv) ||
		    IS_KBL_ULX(dev_priv) ||
		    IS_CFL_ULX(dev_priv) ||
		    IS_CML_ULX(dev_priv)) {
793
			*n_entries = ARRAY_SIZE(skl_y_ddi_translations_edp);
794
			return skl_y_ddi_translations_edp;
795 796 797 798
		} else if (IS_SKL_ULT(dev_priv) ||
			   IS_KBL_ULT(dev_priv) ||
			   IS_CFL_ULT(dev_priv) ||
			   IS_CML_ULT(dev_priv)) {
799
			*n_entries = ARRAY_SIZE(skl_u_ddi_translations_edp);
800
			return skl_u_ddi_translations_edp;
801 802
		} else {
			*n_entries = ARRAY_SIZE(skl_ddi_translations_edp);
803
			return skl_ddi_translations_edp;
804 805
		}
	}
806

807 808 809
	if (IS_KABYLAKE(dev_priv) ||
	    IS_COFFEELAKE(dev_priv) ||
	    IS_COMETLAKE(dev_priv))
810
		return kbl_get_buf_trans_dp(encoder, n_entries);
811
	else
812
		return skl_get_buf_trans_dp(encoder, n_entries);
813 814 815
}

static const struct ddi_buf_trans *
816
skl_get_buf_trans_hdmi(struct drm_i915_private *dev_priv, int *n_entries)
817
{
818 819 820 821
	if (IS_SKL_ULX(dev_priv) ||
	    IS_KBL_ULX(dev_priv) ||
	    IS_CFL_ULX(dev_priv) ||
	    IS_CML_ULX(dev_priv)) {
822
		*n_entries = ARRAY_SIZE(skl_y_ddi_translations_hdmi);
823
		return skl_y_ddi_translations_hdmi;
824 825
	} else {
		*n_entries = ARRAY_SIZE(skl_ddi_translations_hdmi);
826
		return skl_ddi_translations_hdmi;
827 828 829
	}
}

830 831 832 833 834 835 836 837 838
static int skl_buf_trans_num_entries(enum port port, int n_entries)
{
	/* Only DDIA and DDIE can select the 10th register with DP */
	if (port == PORT_A || port == PORT_E)
		return min(n_entries, 10);
	else
		return min(n_entries, 9);
}

839
static const struct ddi_buf_trans *
840
intel_ddi_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
841
{
842 843
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

844 845 846
	if (IS_KABYLAKE(dev_priv) ||
	    IS_COFFEELAKE(dev_priv) ||
	    IS_COMETLAKE(dev_priv)) {
847
		const struct ddi_buf_trans *ddi_translations =
848
			kbl_get_buf_trans_dp(encoder, n_entries);
849
		*n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries);
850
		return ddi_translations;
851
	} else if (IS_SKYLAKE(dev_priv)) {
852
		const struct ddi_buf_trans *ddi_translations =
853
			skl_get_buf_trans_dp(encoder, n_entries);
854
		*n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries);
855
		return ddi_translations;
856 857 858 859 860 861 862 863 864 865 866 867 868
	} else if (IS_BROADWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_dp);
		return  bdw_ddi_translations_dp;
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_dp);
		return hsw_ddi_translations_dp;
	}

	*n_entries = 0;
	return NULL;
}

static const struct ddi_buf_trans *
869
intel_ddi_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
870
{
871 872
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

873
	if (IS_GEN9_BC(dev_priv)) {
874
		const struct ddi_buf_trans *ddi_translations =
875
			skl_get_buf_trans_edp(encoder, n_entries);
876
		*n_entries = skl_buf_trans_num_entries(encoder->port, *n_entries);
877
		return ddi_translations;
878
	} else if (IS_BROADWELL(dev_priv)) {
879
		return bdw_get_buf_trans_edp(encoder, n_entries);
880 881 882 883 884 885 886 887 888 889 890 891 892 893 894 895 896 897 898 899 900 901 902 903 904
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_dp);
		return hsw_ddi_translations_dp;
	}

	*n_entries = 0;
	return NULL;
}

static const struct ddi_buf_trans *
intel_ddi_get_buf_trans_fdi(struct drm_i915_private *dev_priv,
			    int *n_entries)
{
	if (IS_BROADWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_fdi);
		return bdw_ddi_translations_fdi;
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_fdi);
		return hsw_ddi_translations_fdi;
	}

	*n_entries = 0;
	return NULL;
}

905
static const struct ddi_buf_trans *
906
intel_ddi_get_buf_trans_hdmi(struct intel_encoder *encoder,
907 908
			     int *n_entries)
{
909 910
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

911 912 913 914 915 916 917 918 919 920 921 922 923 924
	if (IS_GEN9_BC(dev_priv)) {
		return skl_get_buf_trans_hdmi(dev_priv, n_entries);
	} else if (IS_BROADWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(bdw_ddi_translations_hdmi);
		return bdw_ddi_translations_hdmi;
	} else if (IS_HASWELL(dev_priv)) {
		*n_entries = ARRAY_SIZE(hsw_ddi_translations_hdmi);
		return hsw_ddi_translations_hdmi;
	}

	*n_entries = 0;
	return NULL;
}

925
static const struct bxt_ddi_buf_trans *
926
bxt_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
927 928 929 930 931 932
{
	*n_entries = ARRAY_SIZE(bxt_ddi_translations_dp);
	return bxt_ddi_translations_dp;
}

static const struct bxt_ddi_buf_trans *
933
bxt_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
934
{
935 936
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

937 938 939 940 941
	if (dev_priv->vbt.edp.low_vswing) {
		*n_entries = ARRAY_SIZE(bxt_ddi_translations_edp);
		return bxt_ddi_translations_edp;
	}

942
	return bxt_get_buf_trans_dp(encoder, n_entries);
943 944 945
}

static const struct bxt_ddi_buf_trans *
946
bxt_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries)
947 948 949 950 951
{
	*n_entries = ARRAY_SIZE(bxt_ddi_translations_hdmi);
	return bxt_ddi_translations_hdmi;
}

952
static const struct cnl_ddi_buf_trans *
953
cnl_get_buf_trans_hdmi(struct intel_encoder *encoder, int *n_entries)
954
{
955
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
956
	u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
957 958 959 960 961 962 963 964 965 966

	if (voltage == VOLTAGE_INFO_0_85V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_85V);
		return cnl_ddi_translations_hdmi_0_85V;
	} else if (voltage == VOLTAGE_INFO_0_95V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_0_95V);
		return cnl_ddi_translations_hdmi_0_95V;
	} else if (voltage == VOLTAGE_INFO_1_05V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_hdmi_1_05V);
		return cnl_ddi_translations_hdmi_1_05V;
967 968
	} else {
		*n_entries = 1; /* shut up gcc */
969
		MISSING_CASE(voltage);
970
	}
971 972 973 974
	return NULL;
}

static const struct cnl_ddi_buf_trans *
975
cnl_get_buf_trans_dp(struct intel_encoder *encoder, int *n_entries)
976
{
977
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
978
	u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
979 980 981 982 983 984 985 986 987 988

	if (voltage == VOLTAGE_INFO_0_85V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_85V);
		return cnl_ddi_translations_dp_0_85V;
	} else if (voltage == VOLTAGE_INFO_0_95V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_0_95V);
		return cnl_ddi_translations_dp_0_95V;
	} else if (voltage == VOLTAGE_INFO_1_05V) {
		*n_entries = ARRAY_SIZE(cnl_ddi_translations_dp_1_05V);
		return cnl_ddi_translations_dp_1_05V;
989 990
	} else {
		*n_entries = 1; /* shut up gcc */
991
		MISSING_CASE(voltage);
992
	}
993 994 995 996
	return NULL;
}

static const struct cnl_ddi_buf_trans *
997
cnl_get_buf_trans_edp(struct intel_encoder *encoder, int *n_entries)
998
{
999
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1000
	u32 voltage = intel_de_read(dev_priv, CNL_PORT_COMP_DW3) & VOLTAGE_INFO_MASK;
1001 1002 1003 1004 1005 1006 1007 1008 1009 1010 1011

	if (dev_priv->vbt.edp.low_vswing) {
		if (voltage == VOLTAGE_INFO_0_85V) {
			*n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_85V);
			return cnl_ddi_translations_edp_0_85V;
		} else if (voltage == VOLTAGE_INFO_0_95V) {
			*n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_0_95V);
			return cnl_ddi_translations_edp_0_95V;
		} else if (voltage == VOLTAGE_INFO_1_05V) {
			*n_entries = ARRAY_SIZE(cnl_ddi_translations_edp_1_05V);
			return cnl_ddi_translations_edp_1_05V;
1012 1013
		} else {
			*n_entries = 1; /* shut up gcc */
1014
			MISSING_CASE(voltage);
1015
		}
1016 1017
		return NULL;
	} else {
1018
		return cnl_get_buf_trans_dp(encoder, n_entries);
1019 1020 1021
	}
}

1022
static const struct cnl_ddi_buf_trans *
1023
icl_get_combo_buf_trans(struct intel_encoder *encoder, int type, int rate,
1024
			int *n_entries)
1025
{
1026 1027
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

1028 1029 1030 1031 1032 1033 1034 1035 1036
	if (type == INTEL_OUTPUT_HDMI) {
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_hdmi);
		return icl_combo_phy_ddi_translations_hdmi;
	} else if (rate > 540000 && type == INTEL_OUTPUT_EDP) {
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr3);
		return icl_combo_phy_ddi_translations_edp_hbr3;
	} else if (type == INTEL_OUTPUT_EDP && dev_priv->vbt.edp.low_vswing) {
		*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_edp_hbr2);
		return icl_combo_phy_ddi_translations_edp_hbr2;
1037
	}
1038 1039 1040

	*n_entries = ARRAY_SIZE(icl_combo_phy_ddi_translations_dp_hbr2);
	return icl_combo_phy_ddi_translations_dp_hbr2;
1041 1042
}

1043
static const struct icl_mg_phy_ddi_buf_trans *
1044
icl_get_mg_buf_trans(struct intel_encoder *encoder, int type, int rate,
1045 1046 1047 1048 1049 1050 1051 1052 1053 1054 1055 1056 1057 1058
		     int *n_entries)
{
	if (type == INTEL_OUTPUT_HDMI) {
		*n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hdmi);
		return icl_mg_phy_ddi_translations_hdmi;
	} else if (rate > 270000) {
		*n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_hbr2_hbr3);
		return icl_mg_phy_ddi_translations_hbr2_hbr3;
	}

	*n_entries = ARRAY_SIZE(icl_mg_phy_ddi_translations_rbr_hbr);
	return icl_mg_phy_ddi_translations_rbr_hbr;
}

1059
static const struct cnl_ddi_buf_trans *
1060
ehl_get_combo_buf_trans(struct intel_encoder *encoder, int type, int rate,
1061 1062
			int *n_entries)
{
1063 1064 1065
	if (type != INTEL_OUTPUT_HDMI && type != INTEL_OUTPUT_EDP) {
		*n_entries = ARRAY_SIZE(ehl_combo_phy_ddi_translations_dp);
		return ehl_combo_phy_ddi_translations_dp;
1066 1067
	}

1068
	return icl_get_combo_buf_trans(encoder, type, rate, n_entries);
1069 1070
}

1071
static const struct cnl_ddi_buf_trans *
1072
tgl_get_combo_buf_trans(struct intel_encoder *encoder, int type, int rate,
1073 1074
			int *n_entries)
{
1075 1076 1077 1078 1079 1080 1081 1082 1083 1084 1085 1086
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

	if (type == INTEL_OUTPUT_EDP && dev_priv->vbt.edp.hobl) {
		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);

		if (!intel_dp->hobl_failed && rate <= 540000) {
			/* Same table applies to TGL, RKL and DG1 */
			*n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_edp_hbr2_hobl);
			return tgl_combo_phy_ddi_translations_edp_hbr2_hobl;
		}
	}

1087
	if (type == INTEL_OUTPUT_HDMI || type == INTEL_OUTPUT_EDP) {
1088
		return icl_get_combo_buf_trans(encoder, type, rate, n_entries);
1089 1090 1091 1092 1093 1094 1095 1096 1097
	} else if (rate > 270000) {
		*n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr2);
		return tgl_combo_phy_ddi_translations_dp_hbr2;
	}

	*n_entries = ARRAY_SIZE(tgl_combo_phy_ddi_translations_dp_hbr);
	return tgl_combo_phy_ddi_translations_dp_hbr;
}

1098
static const struct tgl_dkl_phy_ddi_buf_trans *
1099
tgl_get_dkl_buf_trans(struct intel_encoder *encoder, int type, int rate,
1100 1101 1102 1103 1104 1105 1106 1107 1108 1109 1110 1111 1112 1113
		      int *n_entries)
{
	if (type == INTEL_OUTPUT_HDMI) {
		*n_entries = ARRAY_SIZE(tgl_dkl_phy_hdmi_ddi_trans);
		return tgl_dkl_phy_hdmi_ddi_trans;
	} else if (rate > 270000) {
		*n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans_hbr2);
		return tgl_dkl_phy_dp_ddi_trans_hbr2;
	}

	*n_entries = ARRAY_SIZE(tgl_dkl_phy_dp_ddi_trans);
	return tgl_dkl_phy_dp_ddi_trans;
}

1114
static int intel_ddi_hdmi_level(struct intel_encoder *encoder)
1115
{
1116
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1117
	int n_entries, level, default_entry;
1118
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
1119

1120 1121
	if (INTEL_GEN(dev_priv) >= 12) {
		if (intel_phy_is_combo(dev_priv, phy))
1122
			tgl_get_combo_buf_trans(encoder, INTEL_OUTPUT_HDMI,
1123 1124
						0, &n_entries);
		else
1125
			tgl_get_dkl_buf_trans(encoder, INTEL_OUTPUT_HDMI, 0,
1126
					      &n_entries);
1127 1128
		default_entry = n_entries - 1;
	} else if (INTEL_GEN(dev_priv) == 11) {
1129
		if (intel_phy_is_combo(dev_priv, phy))
1130
			icl_get_combo_buf_trans(encoder, INTEL_OUTPUT_HDMI,
1131
						0, &n_entries);
1132
		else
1133
			icl_get_mg_buf_trans(encoder, INTEL_OUTPUT_HDMI, 0,
1134
					     &n_entries);
1135 1136
		default_entry = n_entries - 1;
	} else if (IS_CANNONLAKE(dev_priv)) {
1137
		cnl_get_buf_trans_hdmi(encoder, &n_entries);
1138
		default_entry = n_entries - 1;
1139
	} else if (IS_GEN9_LP(dev_priv)) {
1140
		bxt_get_buf_trans_hdmi(encoder, &n_entries);
1141
		default_entry = n_entries - 1;
1142
	} else if (IS_GEN9_BC(dev_priv)) {
1143
		intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1144
		default_entry = 8;
1145
	} else if (IS_BROADWELL(dev_priv)) {
1146
		intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1147
		default_entry = 7;
1148
	} else if (IS_HASWELL(dev_priv)) {
1149
		intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1150
		default_entry = 6;
1151
	} else {
1152
		drm_WARN(&dev_priv->drm, 1, "ddi translation table missing\n");
1153
		return 0;
1154 1155
	}

1156
	if (drm_WARN_ON_ONCE(&dev_priv->drm, n_entries == 0))
1157
		return 0;
1158

1159 1160
	level = intel_bios_hdmi_level_shift(encoder);
	if (level < 0)
1161 1162
		level = default_entry;

1163
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
1164
		level = n_entries - 1;
1165

1166
	return level;
1167 1168
}

1169 1170
/*
 * Starting with Haswell, DDI port buffers must be programmed with correct
1171 1172
 * values in advance. This function programs the correct values for
 * DP/eDP/FDI use cases.
1173
 */
1174 1175
static void intel_prepare_dp_ddi_buffers(struct intel_encoder *encoder,
					 const struct intel_crtc_state *crtc_state)
1176
{
1177
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1178
	u32 iboost_bit = 0;
1179
	int i, n_entries;
1180
	enum port port = encoder->port;
1181
	const struct ddi_buf_trans *ddi_translations;
1182

1183 1184 1185 1186
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG))
		ddi_translations = intel_ddi_get_buf_trans_fdi(dev_priv,
							       &n_entries);
	else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_EDP))
1187
		ddi_translations = intel_ddi_get_buf_trans_edp(encoder,
1188
							       &n_entries);
1189
	else
1190
		ddi_translations = intel_ddi_get_buf_trans_dp(encoder,
1191
							      &n_entries);
1192

1193
	/* If we're boosting the current, set bit 31 of trans1 */
1194
	if (IS_GEN9_BC(dev_priv) && intel_bios_dp_boost_level(encoder))
1195
		iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE;
1196

1197
	for (i = 0; i < n_entries; i++) {
1198 1199 1200 1201
		intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, i),
			       ddi_translations[i].trans1 | iboost_bit);
		intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, i),
			       ddi_translations[i].trans2);
1202
	}
1203 1204 1205 1206 1207 1208 1209
}

/*
 * Starting with Haswell, DDI port buffers must be programmed with correct
 * values in advance. This function programs the correct values for
 * HDMI/DVI use cases.
 */
1210
static void intel_prepare_hdmi_ddi_buffers(struct intel_encoder *encoder,
1211
					   int level)
1212 1213 1214
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	u32 iboost_bit = 0;
1215
	int n_entries;
1216
	enum port port = encoder->port;
1217
	const struct ddi_buf_trans *ddi_translations;
1218

1219
	ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
1220

1221
	if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
1222
		return;
1223
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
1224
		level = n_entries - 1;
1225

1226
	/* If we're boosting the current, set bit 31 of trans1 */
1227
	if (IS_GEN9_BC(dev_priv) && intel_bios_hdmi_boost_level(encoder))
1228
		iboost_bit = DDI_BUF_BALANCE_LEG_ENABLE;
1229

1230
	/* Entry 9 is for HDMI: */
1231 1232 1233 1234
	intel_de_write(dev_priv, DDI_BUF_TRANS_LO(port, 9),
		       ddi_translations[level].trans1 | iboost_bit);
	intel_de_write(dev_priv, DDI_BUF_TRANS_HI(port, 9),
		       ddi_translations[level].trans2);
1235 1236
}

1237 1238 1239
static void intel_wait_ddi_buf_idle(struct drm_i915_private *dev_priv,
				    enum port port)
{
1240 1241 1242
	if (IS_BROXTON(dev_priv)) {
		udelay(16);
		return;
1243
	}
1244 1245 1246 1247 1248

	if (wait_for_us((intel_de_read(dev_priv, DDI_BUF_CTL(port)) &
			 DDI_BUF_IS_IDLE), 8))
		drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get idle\n",
			port_name(port));
1249
}
1250

1251 1252 1253 1254 1255 1256 1257 1258 1259 1260 1261 1262 1263 1264 1265
static void intel_wait_ddi_buf_active(struct drm_i915_private *dev_priv,
				      enum port port)
{
	/* Wait > 518 usecs for DDI_BUF_CTL to be non idle */
	if (INTEL_GEN(dev_priv) < 10 && !IS_GEMINILAKE(dev_priv)) {
		usleep_range(518, 1000);
		return;
	}

	if (wait_for_us(!(intel_de_read(dev_priv, DDI_BUF_CTL(port)) &
			  DDI_BUF_IS_IDLE), 500))
		drm_err(&dev_priv->drm, "Timeout waiting for DDI BUF %c to get active\n",
			port_name(port));
}

1266
static u32 hsw_pll_to_ddi_pll_sel(const struct intel_shared_dpll *pll)
1267
{
1268
	switch (pll->info->id) {
1269 1270 1271 1272 1273 1274 1275 1276 1277 1278 1279 1280 1281
	case DPLL_ID_WRPLL1:
		return PORT_CLK_SEL_WRPLL1;
	case DPLL_ID_WRPLL2:
		return PORT_CLK_SEL_WRPLL2;
	case DPLL_ID_SPLL:
		return PORT_CLK_SEL_SPLL;
	case DPLL_ID_LCPLL_810:
		return PORT_CLK_SEL_LCPLL_810;
	case DPLL_ID_LCPLL_1350:
		return PORT_CLK_SEL_LCPLL_1350;
	case DPLL_ID_LCPLL_2700:
		return PORT_CLK_SEL_LCPLL_2700;
	default:
1282
		MISSING_CASE(pll->info->id);
1283 1284 1285 1286
		return PORT_CLK_SEL_NONE;
	}
}

1287
static u32 icl_pll_to_ddi_clk_sel(struct intel_encoder *encoder,
1288
				  const struct intel_crtc_state *crtc_state)
1289
{
1290 1291
	const struct intel_shared_dpll *pll = crtc_state->shared_dpll;
	int clock = crtc_state->port_clock;
1292 1293 1294 1295
	const enum intel_dpll_id id = pll->info->id;

	switch (id) {
	default:
1296 1297 1298 1299
		/*
		 * DPLL_ID_ICL_DPLL0 and DPLL_ID_ICL_DPLL1 should not be used
		 * here, so do warn if this get passed in
		 */
1300 1301
		MISSING_CASE(id);
		return DDI_CLK_SEL_NONE;
1302 1303 1304 1305 1306 1307 1308 1309 1310 1311 1312 1313
	case DPLL_ID_ICL_TBTPLL:
		switch (clock) {
		case 162000:
			return DDI_CLK_SEL_TBT_162;
		case 270000:
			return DDI_CLK_SEL_TBT_270;
		case 540000:
			return DDI_CLK_SEL_TBT_540;
		case 810000:
			return DDI_CLK_SEL_TBT_810;
		default:
			MISSING_CASE(clock);
1314
			return DDI_CLK_SEL_NONE;
1315
		}
1316 1317 1318 1319
	case DPLL_ID_ICL_MGPLL1:
	case DPLL_ID_ICL_MGPLL2:
	case DPLL_ID_ICL_MGPLL3:
	case DPLL_ID_ICL_MGPLL4:
1320 1321
	case DPLL_ID_TGL_MGPLL5:
	case DPLL_ID_TGL_MGPLL6:
1322 1323 1324 1325
		return DDI_CLK_SEL_MG;
	}
}

1326 1327 1328 1329 1330 1331 1332 1333 1334
/* Starting with Haswell, different DDI ports can work in FDI mode for
 * connection to the PCH-located connectors. For this, it is necessary to train
 * both the DDI port and PCH receiver for the desired DDI buffer settings.
 *
 * The recommended port to work in FDI mode is DDI E, which we use here. Also,
 * please note that when FDI mode is active on DDI E, it shares 2 lines with
 * DDI A (which is used for eDP)
 */

1335
void hsw_fdi_link_train(struct intel_encoder *encoder,
1336
			const struct intel_crtc_state *crtc_state)
1337
{
1338 1339
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1340
	u32 temp, i, rx_ctl_val, ddi_pll_sel;
1341

1342
	intel_prepare_dp_ddi_buffers(encoder, crtc_state);
1343

1344 1345 1346 1347
	/* Set the FDI_RX_MISC pwrdn lanes and the 2 workarounds listed at the
	 * mode set "sequence for CRT port" document:
	 * - TP1 to TP2 time with the default value
	 * - FDI delay to 90h
1348 1349
	 *
	 * WaFDIAutoLinkSetTimingOverrride:hsw
1350
	 */
1351 1352
	intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A),
		       FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2) | FDI_RX_TP1_TO_TP2_48 | FDI_RX_FDI_DELAY_90);
1353 1354

	/* Enable the PCH Receiver FDI PLL */
1355
	rx_ctl_val = dev_priv->fdi_rx_config | FDI_RX_ENHANCE_FRAME_ENABLE |
1356
		     FDI_RX_PLL_ENABLE |
1357
		     FDI_DP_PORT_WIDTH(crtc_state->fdi_lanes);
1358 1359
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
	intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A));
1360 1361 1362 1363
	udelay(220);

	/* Switch from Rawclk to PCDclk */
	rx_ctl_val |= FDI_PCDCLK;
1364
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
1365 1366

	/* Configure Port Clock Select */
1367
	ddi_pll_sel = hsw_pll_to_ddi_pll_sel(crtc_state->shared_dpll);
1368
	intel_de_write(dev_priv, PORT_CLK_SEL(PORT_E), ddi_pll_sel);
1369
	drm_WARN_ON(&dev_priv->drm, ddi_pll_sel != PORT_CLK_SEL_SPLL);
1370 1371 1372

	/* Start the training iterating through available voltages and emphasis,
	 * testing each value twice. */
1373
	for (i = 0; i < ARRAY_SIZE(hsw_ddi_translations_fdi) * 2; i++) {
1374
		/* Configure DP_TP_CTL with auto-training */
1375
		intel_de_write(dev_priv, DP_TP_CTL(PORT_E),
1376 1377 1378 1379
			       DP_TP_CTL_FDI_AUTOTRAIN |
			       DP_TP_CTL_ENHANCED_FRAME_ENABLE |
			       DP_TP_CTL_LINK_TRAIN_PAT1 |
			       DP_TP_CTL_ENABLE);
1380

1381 1382 1383 1384
		/* Configure and enable DDI_BUF_CTL for DDI E with next voltage.
		 * DDI E does not support port reversal, the functionality is
		 * achieved on the PCH side in FDI_RX_CTL, so no need to set the
		 * port reversal bit */
1385 1386 1387
		intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E),
			       DDI_BUF_CTL_ENABLE | ((crtc_state->fdi_lanes - 1) << 1) | DDI_BUF_TRANS_SELECT(i / 2));
		intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E));
1388 1389 1390

		udelay(600);

1391
		/* Program PCH FDI Receiver TU */
1392
		intel_de_write(dev_priv, FDI_RX_TUSIZE1(PIPE_A), TU_SIZE(64));
1393 1394 1395

		/* Enable PCH FDI Receiver with auto-training */
		rx_ctl_val |= FDI_RX_ENABLE | FDI_LINK_TRAIN_AUTO;
1396 1397
		intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
		intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A));
1398 1399 1400 1401 1402

		/* Wait for FDI receiver lane calibration */
		udelay(30);

		/* Unset FDI_RX_MISC pwrdn lanes */
1403
		temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A));
1404
		temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK);
1405 1406
		intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp);
		intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A));
1407 1408 1409

		/* Wait for FDI auto training time */
		udelay(5);
1410

1411
		temp = intel_de_read(dev_priv, DP_TP_STATUS(PORT_E));
1412
		if (temp & DP_TP_STATUS_AUTOTRAIN_DONE) {
1413 1414
			drm_dbg_kms(&dev_priv->drm,
				    "FDI link training done on step %d\n", i);
1415 1416
			break;
		}
1417

1418 1419 1420 1421 1422
		/*
		 * Leave things enabled even if we failed to train FDI.
		 * Results in less fireworks from the state checker.
		 */
		if (i == ARRAY_SIZE(hsw_ddi_translations_fdi) * 2 - 1) {
1423
			drm_err(&dev_priv->drm, "FDI link training failed!\n");
1424
			break;
1425
		}
1426

1427
		rx_ctl_val &= ~FDI_RX_ENABLE;
1428 1429
		intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), rx_ctl_val);
		intel_de_posting_read(dev_priv, FDI_RX_CTL(PIPE_A));
1430

1431
		temp = intel_de_read(dev_priv, DDI_BUF_CTL(PORT_E));
1432
		temp &= ~DDI_BUF_CTL_ENABLE;
1433 1434
		intel_de_write(dev_priv, DDI_BUF_CTL(PORT_E), temp);
		intel_de_posting_read(dev_priv, DDI_BUF_CTL(PORT_E));
1435

1436
		/* Disable DP_TP_CTL and FDI_RX_CTL and retry */
1437
		temp = intel_de_read(dev_priv, DP_TP_CTL(PORT_E));
1438 1439
		temp &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK);
		temp |= DP_TP_CTL_LINK_TRAIN_PAT1;
1440 1441
		intel_de_write(dev_priv, DP_TP_CTL(PORT_E), temp);
		intel_de_posting_read(dev_priv, DP_TP_CTL(PORT_E));
1442 1443

		intel_wait_ddi_buf_idle(dev_priv, PORT_E);
1444 1445

		/* Reset FDI_RX_MISC pwrdn lanes */
1446
		temp = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A));
1447 1448
		temp &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK);
		temp |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2);
1449 1450
		intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), temp);
		intel_de_posting_read(dev_priv, FDI_RX_MISC(PIPE_A));
1451 1452
	}

1453
	/* Enable normal pixel sending for FDI */
1454
	intel_de_write(dev_priv, DP_TP_CTL(PORT_E),
1455 1456 1457 1458
		       DP_TP_CTL_FDI_AUTOTRAIN |
		       DP_TP_CTL_LINK_TRAIN_NORMAL |
		       DP_TP_CTL_ENHANCED_FRAME_ENABLE |
		       DP_TP_CTL_ENABLE);
1459
}
1460

1461
static void intel_ddi_init_dp_buf_reg(struct intel_encoder *encoder)
1462
{
1463
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
1464
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
1465

1466
	intel_dp->DP = dig_port->saved_port_bits |
1467
		DDI_BUF_CTL_ENABLE | DDI_BUF_TRANS_SELECT(0);
1468
	intel_dp->DP |= DDI_PORT_WIDTH(intel_dp->lane_count);
1469 1470
}

1471 1472 1473
static int icl_calc_tbt_pll_link(struct drm_i915_private *dev_priv,
				 enum port port)
{
1474
	u32 val = intel_de_read(dev_priv, DDI_CLK_SEL(port)) & DDI_CLK_SEL_MASK;
1475 1476 1477 1478 1479 1480 1481 1482 1483 1484 1485 1486 1487 1488 1489 1490 1491 1492

	switch (val) {
	case DDI_CLK_SEL_NONE:
		return 0;
	case DDI_CLK_SEL_TBT_162:
		return 162000;
	case DDI_CLK_SEL_TBT_270:
		return 270000;
	case DDI_CLK_SEL_TBT_540:
		return 540000;
	case DDI_CLK_SEL_TBT_810:
		return 810000;
	default:
		MISSING_CASE(val);
		return 0;
	}
}

1493 1494 1495 1496 1497 1498 1499
static void ddi_dotclock_get(struct intel_crtc_state *pipe_config)
{
	int dotclock;

	if (pipe_config->has_pch_encoder)
		dotclock = intel_dotclock_calculate(pipe_config->port_clock,
						    &pipe_config->fdi_m_n);
1500
	else if (intel_crtc_has_dp_encoder(pipe_config))
1501 1502
		dotclock = intel_dotclock_calculate(pipe_config->port_clock,
						    &pipe_config->dp_m_n);
1503 1504
	else if (pipe_config->has_hdmi_sink && pipe_config->pipe_bpp > 24)
		dotclock = pipe_config->port_clock * 24 / pipe_config->pipe_bpp;
1505 1506 1507
	else
		dotclock = pipe_config->port_clock;

1508 1509
	if (pipe_config->output_format == INTEL_OUTPUT_FORMAT_YCBCR420 &&
	    !intel_crtc_has_dp_encoder(pipe_config))
1510 1511
		dotclock *= 2;

1512 1513 1514
	if (pipe_config->pixel_multiplier)
		dotclock /= pipe_config->pixel_multiplier;

1515
	pipe_config->hw.adjusted_mode.crtc_clock = dotclock;
1516
}
1517

1518 1519
static void intel_ddi_clock_get(struct intel_encoder *encoder,
				struct intel_crtc_state *pipe_config)
1520
{
1521
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
1522
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
1523

1524
	if (intel_phy_is_tc(dev_priv, phy) &&
1525 1526 1527 1528 1529
	    intel_get_shared_dpll_id(dev_priv, pipe_config->shared_dpll) ==
	    DPLL_ID_ICL_TBTPLL)
		pipe_config->port_clock = icl_calc_tbt_pll_link(dev_priv,
								encoder->port);
	else
1530 1531
		pipe_config->port_clock =
			intel_dpll_get_freq(dev_priv, pipe_config->shared_dpll);
1532 1533

	ddi_dotclock_get(pipe_config);
1534 1535
}

1536 1537
void intel_ddi_set_dp_msa(const struct intel_crtc_state *crtc_state,
			  const struct drm_connector_state *conn_state)
1538
{
1539
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1540
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
1541
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1542
	u32 temp;
1543

1544 1545
	if (!intel_crtc_has_dp_encoder(crtc_state))
		return;
J
Jani Nikula 已提交
1546

1547
	drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder));
1548

1549
	temp = DP_MSA_MISC_SYNC_CLOCK;
1550

1551 1552
	switch (crtc_state->pipe_bpp) {
	case 18:
1553
		temp |= DP_MSA_MISC_6_BPC;
1554 1555
		break;
	case 24:
1556
		temp |= DP_MSA_MISC_8_BPC;
1557 1558
		break;
	case 30:
1559
		temp |= DP_MSA_MISC_10_BPC;
1560 1561
		break;
	case 36:
1562
		temp |= DP_MSA_MISC_12_BPC;
1563 1564 1565 1566
		break;
	default:
		MISSING_CASE(crtc_state->pipe_bpp);
		break;
1567
	}
1568

1569
	/* nonsense combination */
1570 1571
	drm_WARN_ON(&dev_priv->drm, crtc_state->limited_color_range &&
		    crtc_state->output_format != INTEL_OUTPUT_FORMAT_RGB);
1572 1573

	if (crtc_state->limited_color_range)
1574
		temp |= DP_MSA_MISC_COLOR_CEA_RGB;
1575

1576 1577 1578
	/*
	 * As per DP 1.2 spec section 2.3.4.3 while sending
	 * YCBCR 444 signals we should program MSA MISC1/0 fields with
1579
	 * colorspace information.
1580 1581
	 */
	if (crtc_state->output_format == INTEL_OUTPUT_FORMAT_YCBCR444)
1582
		temp |= DP_MSA_MISC_COLOR_YCBCR_444_BT709;
1583

1584 1585 1586
	/*
	 * As per DP 1.4a spec section 2.2.4.3 [MSA Field for Indication
	 * of Color Encoding Format and Content Color Gamut] while sending
1587 1588
	 * YCBCR 420, HDR BT.2020 signals we should program MSA MISC1 fields
	 * which indicate VSC SDP for the Pixel Encoding/Colorimetry Format.
1589
	 */
1590
	if (intel_dp_needs_vsc_sdp(crtc_state, conn_state))
1591
		temp |= DP_MSA_MISC_COLOR_VSC_SDP;
1592

1593
	intel_de_write(dev_priv, TRANS_MSA_MISC(cpu_transcoder), temp);
1594 1595
}

1596 1597 1598 1599 1600 1601 1602 1603
static u32 bdw_trans_port_sync_master_select(enum transcoder master_transcoder)
{
	if (master_transcoder == TRANSCODER_EDP)
		return 0;
	else
		return master_transcoder + 1;
}

1604 1605 1606 1607 1608 1609 1610
/*
 * Returns the TRANS_DDI_FUNC_CTL value based on CRTC state.
 *
 * Only intended to be used by intel_ddi_enable_transcoder_func() and
 * intel_ddi_config_transcoder_func().
 */
static u32
1611 1612
intel_ddi_transcoder_func_reg_val_get(struct intel_encoder *encoder,
				      const struct intel_crtc_state *crtc_state)
1613
{
1614
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1615 1616
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum pipe pipe = crtc->pipe;
1617
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1618
	enum port port = encoder->port;
1619
	u32 temp;
1620

1621 1622
	/* Enable TRANS_DDI_FUNC_CTL for the pipe to work in HDMI mode */
	temp = TRANS_DDI_FUNC_ENABLE;
1623 1624 1625 1626
	if (INTEL_GEN(dev_priv) >= 12)
		temp |= TGL_TRANS_DDI_SELECT_PORT(port);
	else
		temp |= TRANS_DDI_SELECT_PORT(port);
1627

1628
	switch (crtc_state->pipe_bpp) {
1629
	case 18:
1630
		temp |= TRANS_DDI_BPC_6;
1631 1632
		break;
	case 24:
1633
		temp |= TRANS_DDI_BPC_8;
1634 1635
		break;
	case 30:
1636
		temp |= TRANS_DDI_BPC_10;
1637 1638
		break;
	case 36:
1639
		temp |= TRANS_DDI_BPC_12;
1640 1641
		break;
	default:
1642
		BUG();
1643
	}
1644

1645
	if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PVSYNC)
1646
		temp |= TRANS_DDI_PVSYNC;
1647
	if (crtc_state->hw.adjusted_mode.flags & DRM_MODE_FLAG_PHSYNC)
1648
		temp |= TRANS_DDI_PHSYNC;
1649

1650 1651 1652
	if (cpu_transcoder == TRANSCODER_EDP) {
		switch (pipe) {
		case PIPE_A:
1653 1654 1655 1656
			/* On Haswell, can only use the always-on power well for
			 * eDP when not using the panel fitter, and when not
			 * using motion blur mitigation (which we don't
			 * support). */
1657
			if (crtc_state->pch_pfit.force_thru)
1658 1659 1660
				temp |= TRANS_DDI_EDP_INPUT_A_ONOFF;
			else
				temp |= TRANS_DDI_EDP_INPUT_A_ON;
1661 1662 1663 1664 1665 1666 1667 1668 1669 1670 1671 1672 1673
			break;
		case PIPE_B:
			temp |= TRANS_DDI_EDP_INPUT_B_ONOFF;
			break;
		case PIPE_C:
			temp |= TRANS_DDI_EDP_INPUT_C_ONOFF;
			break;
		default:
			BUG();
			break;
		}
	}

1674
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
1675
		if (crtc_state->has_hdmi_sink)
1676
			temp |= TRANS_DDI_MODE_SELECT_HDMI;
1677
		else
1678
			temp |= TRANS_DDI_MODE_SELECT_DVI;
S
Shashank Sharma 已提交
1679 1680

		if (crtc_state->hdmi_scrambling)
1681
			temp |= TRANS_DDI_HDMI_SCRAMBLING;
S
Shashank Sharma 已提交
1682 1683
		if (crtc_state->hdmi_high_tmds_clock_ratio)
			temp |= TRANS_DDI_HIGH_TMDS_CHAR_RATE;
1684
	} else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_ANALOG)) {
1685
		temp |= TRANS_DDI_MODE_SELECT_FDI;
1686
		temp |= (crtc_state->fdi_lanes - 1) << 1;
1687
	} else if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) {
1688
		temp |= TRANS_DDI_MODE_SELECT_DP_MST;
1689
		temp |= DDI_PORT_WIDTH(crtc_state->lane_count);
1690

1691 1692 1693 1694
		if (INTEL_GEN(dev_priv) >= 12) {
			enum transcoder master;

			master = crtc_state->mst_master_transcoder;
1695 1696
			drm_WARN_ON(&dev_priv->drm,
				    master == INVALID_TRANSCODER);
1697 1698
			temp |= TRANS_DDI_MST_TRANSPORT_SELECT(master);
		}
1699
	} else {
1700 1701
		temp |= TRANS_DDI_MODE_SELECT_DP_SST;
		temp |= DDI_PORT_WIDTH(crtc_state->lane_count);
1702 1703
	}

1704 1705 1706 1707 1708 1709 1710 1711 1712
	if (IS_GEN_RANGE(dev_priv, 8, 10) &&
	    crtc_state->master_transcoder != INVALID_TRANSCODER) {
		u8 master_select =
			bdw_trans_port_sync_master_select(crtc_state->master_transcoder);

		temp |= TRANS_DDI_PORT_SYNC_ENABLE |
			TRANS_DDI_PORT_SYNC_MASTER_SELECT(master_select);
	}

1713 1714 1715
	return temp;
}

1716 1717
void intel_ddi_enable_transcoder_func(struct intel_encoder *encoder,
				      const struct intel_crtc_state *crtc_state)
1718
{
1719
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1720 1721
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1722 1723 1724 1725 1726 1727

	if (INTEL_GEN(dev_priv) >= 11) {
		enum transcoder master_transcoder = crtc_state->master_transcoder;
		u32 ctl2 = 0;

		if (master_transcoder != INVALID_TRANSCODER) {
1728 1729
			u8 master_select =
				bdw_trans_port_sync_master_select(master_transcoder);
1730

1731
			ctl2 |= PORT_SYNC_MODE_ENABLE |
1732
				PORT_SYNC_MODE_MASTER_SELECT(master_select);
1733 1734 1735 1736 1737 1738
		}

		intel_de_write(dev_priv,
			       TRANS_DDI_FUNC_CTL2(cpu_transcoder), ctl2);
	}

1739 1740 1741
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder),
		       intel_ddi_transcoder_func_reg_val_get(encoder,
							     crtc_state));
1742 1743 1744 1745 1746 1747 1748
}

/*
 * Same as intel_ddi_enable_transcoder_func(), but it does not set the enable
 * bit.
 */
static void
1749 1750
intel_ddi_config_transcoder_func(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
1751
{
1752
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1753 1754
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1755
	u32 ctl;
1756

1757
	ctl = intel_ddi_transcoder_func_reg_val_get(encoder, crtc_state);
1758 1759
	ctl &= ~TRANS_DDI_FUNC_ENABLE;
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl);
1760
}
1761

1762
void intel_ddi_disable_transcoder_func(const struct intel_crtc_state *crtc_state)
1763
{
1764
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
1765 1766
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
1767
	u32 ctl;
1768

1769 1770 1771 1772 1773
	if (INTEL_GEN(dev_priv) >= 11)
		intel_de_write(dev_priv,
			       TRANS_DDI_FUNC_CTL2(cpu_transcoder), 0);

	ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
1774

1775
	ctl &= ~TRANS_DDI_FUNC_ENABLE;
1776

1777 1778 1779 1780
	if (IS_GEN_RANGE(dev_priv, 8, 10))
		ctl &= ~(TRANS_DDI_PORT_SYNC_ENABLE |
			 TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK);

1781
	if (INTEL_GEN(dev_priv) >= 12) {
1782
		if (!intel_dp_mst_is_master_trans(crtc_state)) {
1783
			ctl &= ~(TGL_TRANS_DDI_PORT_MASK |
1784 1785
				 TRANS_DDI_MODE_SELECT_MASK);
		}
1786
	} else {
1787
		ctl &= ~(TRANS_DDI_PORT_MASK | TRANS_DDI_MODE_SELECT_MASK);
1788
	}
1789

1790
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder), ctl);
1791 1792 1793

	if (dev_priv->quirks & QUIRK_INCREASE_DDI_DISABLED_TIME &&
	    intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
1794 1795
		drm_dbg_kms(&dev_priv->drm,
			    "Quirk Increase DDI disabled time\n");
1796 1797 1798
		/* Quirk time at 100ms for reliable operation */
		msleep(100);
	}
1799 1800
}

S
Sean Paul 已提交
1801 1802 1803 1804 1805
int intel_ddi_toggle_hdcp_signalling(struct intel_encoder *intel_encoder,
				     bool enable)
{
	struct drm_device *dev = intel_encoder->base.dev;
	struct drm_i915_private *dev_priv = to_i915(dev);
1806
	intel_wakeref_t wakeref;
S
Sean Paul 已提交
1807 1808
	enum pipe pipe = 0;
	int ret = 0;
1809
	u32 tmp;
S
Sean Paul 已提交
1810

1811 1812
	wakeref = intel_display_power_get_if_enabled(dev_priv,
						     intel_encoder->power_domain);
1813
	if (drm_WARN_ON(dev, !wakeref))
S
Sean Paul 已提交
1814 1815
		return -ENXIO;

1816 1817
	if (drm_WARN_ON(dev,
			!intel_encoder->get_hw_state(intel_encoder, &pipe))) {
S
Sean Paul 已提交
1818 1819 1820 1821
		ret = -EIO;
		goto out;
	}

1822
	tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(pipe));
S
Sean Paul 已提交
1823 1824 1825 1826
	if (enable)
		tmp |= TRANS_DDI_HDCP_SIGNALLING;
	else
		tmp &= ~TRANS_DDI_HDCP_SIGNALLING;
1827
	intel_de_write(dev_priv, TRANS_DDI_FUNC_CTL(pipe), tmp);
S
Sean Paul 已提交
1828
out:
1829
	intel_display_power_put(dev_priv, intel_encoder->power_domain, wakeref);
S
Sean Paul 已提交
1830 1831 1832
	return ret;
}

1833 1834 1835
bool intel_ddi_connector_get_hw_state(struct intel_connector *intel_connector)
{
	struct drm_device *dev = intel_connector->base.dev;
1836
	struct drm_i915_private *dev_priv = to_i915(dev);
1837
	struct intel_encoder *encoder = intel_attached_encoder(intel_connector);
1838
	int type = intel_connector->base.connector_type;
1839
	enum port port = encoder->port;
1840
	enum transcoder cpu_transcoder;
1841 1842
	intel_wakeref_t wakeref;
	enum pipe pipe = 0;
1843
	u32 tmp;
1844
	bool ret;
1845

1846 1847 1848
	wakeref = intel_display_power_get_if_enabled(dev_priv,
						     encoder->power_domain);
	if (!wakeref)
1849 1850
		return false;

1851
	if (!encoder->get_hw_state(encoder, &pipe)) {
1852 1853 1854
		ret = false;
		goto out;
	}
1855

1856
	if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A)
1857 1858
		cpu_transcoder = TRANSCODER_EDP;
	else
D
Daniel Vetter 已提交
1859
		cpu_transcoder = (enum transcoder) pipe;
1860

1861
	tmp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
1862 1863 1864 1865

	switch (tmp & TRANS_DDI_MODE_SELECT_MASK) {
	case TRANS_DDI_MODE_SELECT_HDMI:
	case TRANS_DDI_MODE_SELECT_DVI:
1866 1867
		ret = type == DRM_MODE_CONNECTOR_HDMIA;
		break;
1868 1869

	case TRANS_DDI_MODE_SELECT_DP_SST:
1870 1871 1872 1873
		ret = type == DRM_MODE_CONNECTOR_eDP ||
		      type == DRM_MODE_CONNECTOR_DisplayPort;
		break;

1874 1875 1876
	case TRANS_DDI_MODE_SELECT_DP_MST:
		/* if the transcoder is in MST state then
		 * connector isn't connected */
1877 1878
		ret = false;
		break;
1879 1880

	case TRANS_DDI_MODE_SELECT_FDI:
1881 1882
		ret = type == DRM_MODE_CONNECTOR_VGA;
		break;
1883 1884

	default:
1885 1886
		ret = false;
		break;
1887
	}
1888 1889

out:
1890
	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
1891 1892

	return ret;
1893 1894
}

1895 1896
static void intel_ddi_get_encoder_pipes(struct intel_encoder *encoder,
					u8 *pipe_mask, bool *is_dp_mst)
1897 1898
{
	struct drm_device *dev = encoder->base.dev;
1899
	struct drm_i915_private *dev_priv = to_i915(dev);
1900
	enum port port = encoder->port;
1901
	intel_wakeref_t wakeref;
1902
	enum pipe p;
1903
	u32 tmp;
1904 1905 1906 1907
	u8 mst_pipe_mask;

	*pipe_mask = 0;
	*is_dp_mst = false;
1908

1909 1910 1911
	wakeref = intel_display_power_get_if_enabled(dev_priv,
						     encoder->power_domain);
	if (!wakeref)
1912
		return;
1913

1914
	tmp = intel_de_read(dev_priv, DDI_BUF_CTL(port));
1915
	if (!(tmp & DDI_BUF_CTL_ENABLE))
1916
		goto out;
1917

1918
	if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A) {
1919 1920
		tmp = intel_de_read(dev_priv,
				    TRANS_DDI_FUNC_CTL(TRANSCODER_EDP));
1921

1922
		switch (tmp & TRANS_DDI_EDP_INPUT_MASK) {
1923 1924 1925
		default:
			MISSING_CASE(tmp & TRANS_DDI_EDP_INPUT_MASK);
			/* fallthrough */
1926 1927
		case TRANS_DDI_EDP_INPUT_A_ON:
		case TRANS_DDI_EDP_INPUT_A_ONOFF:
1928
			*pipe_mask = BIT(PIPE_A);
1929 1930
			break;
		case TRANS_DDI_EDP_INPUT_B_ONOFF:
1931
			*pipe_mask = BIT(PIPE_B);
1932 1933
			break;
		case TRANS_DDI_EDP_INPUT_C_ONOFF:
1934
			*pipe_mask = BIT(PIPE_C);
1935 1936 1937
			break;
		}

1938 1939
		goto out;
	}
1940

1941
	mst_pipe_mask = 0;
1942
	for_each_pipe(dev_priv, p) {
1943
		enum transcoder cpu_transcoder = (enum transcoder)p;
1944
		unsigned int port_mask, ddi_select;
1945 1946 1947 1948 1949 1950
		intel_wakeref_t trans_wakeref;

		trans_wakeref = intel_display_power_get_if_enabled(dev_priv,
								   POWER_DOMAIN_TRANSCODER(cpu_transcoder));
		if (!trans_wakeref)
			continue;
1951 1952 1953 1954 1955 1956 1957 1958

		if (INTEL_GEN(dev_priv) >= 12) {
			port_mask = TGL_TRANS_DDI_PORT_MASK;
			ddi_select = TGL_TRANS_DDI_SELECT_PORT(port);
		} else {
			port_mask = TRANS_DDI_PORT_MASK;
			ddi_select = TRANS_DDI_SELECT_PORT(port);
		}
1959

1960 1961
		tmp = intel_de_read(dev_priv,
				    TRANS_DDI_FUNC_CTL(cpu_transcoder));
1962 1963
		intel_display_power_put(dev_priv, POWER_DOMAIN_TRANSCODER(cpu_transcoder),
					trans_wakeref);
1964

1965
		if ((tmp & port_mask) != ddi_select)
1966
			continue;
1967

1968 1969 1970
		if ((tmp & TRANS_DDI_MODE_SELECT_MASK) ==
		    TRANS_DDI_MODE_SELECT_DP_MST)
			mst_pipe_mask |= BIT(p);
1971

1972
		*pipe_mask |= BIT(p);
1973 1974
	}

1975
	if (!*pipe_mask)
1976 1977 1978
		drm_dbg_kms(&dev_priv->drm,
			    "No pipe for [ENCODER:%d:%s] found\n",
			    encoder->base.base.id, encoder->base.name);
1979 1980

	if (!mst_pipe_mask && hweight8(*pipe_mask) > 1) {
1981 1982 1983 1984
		drm_dbg_kms(&dev_priv->drm,
			    "Multiple pipes for [ENCODER:%d:%s] (pipe_mask %02x)\n",
			    encoder->base.base.id, encoder->base.name,
			    *pipe_mask);
1985 1986 1987 1988
		*pipe_mask = BIT(ffs(*pipe_mask) - 1);
	}

	if (mst_pipe_mask && mst_pipe_mask != *pipe_mask)
1989 1990 1991 1992
		drm_dbg_kms(&dev_priv->drm,
			    "Conflicting MST and non-MST state for [ENCODER:%d:%s] (pipe_mask %02x mst_pipe_mask %02x)\n",
			    encoder->base.base.id, encoder->base.name,
			    *pipe_mask, mst_pipe_mask);
1993 1994
	else
		*is_dp_mst = mst_pipe_mask;
1995

1996
out:
1997
	if (*pipe_mask && IS_GEN9_LP(dev_priv)) {
1998
		tmp = intel_de_read(dev_priv, BXT_PHY_CTL(port));
1999 2000
		if ((tmp & (BXT_PHY_CMNLANE_POWERDOWN_ACK |
			    BXT_PHY_LANE_POWERDOWN_ACK |
2001
			    BXT_PHY_LANE_ENABLED)) != BXT_PHY_LANE_ENABLED)
2002 2003 2004
			drm_err(&dev_priv->drm,
				"[ENCODER:%d:%s] enabled but PHY powered down? (PHY_CTL %08x)\n",
				encoder->base.base.id, encoder->base.name, tmp);
2005 2006
	}

2007
	intel_display_power_put(dev_priv, encoder->power_domain, wakeref);
2008
}
2009

2010 2011 2012 2013 2014 2015 2016 2017 2018 2019 2020 2021 2022 2023
bool intel_ddi_get_hw_state(struct intel_encoder *encoder,
			    enum pipe *pipe)
{
	u8 pipe_mask;
	bool is_mst;

	intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst);

	if (is_mst || !pipe_mask)
		return false;

	*pipe = ffs(pipe_mask) - 1;

	return true;
2024 2025
}

2026
static enum intel_display_power_domain
I
Imre Deak 已提交
2027
intel_ddi_main_link_aux_domain(struct intel_digital_port *dig_port)
2028
{
2029
	/* CNL+ HW requires corresponding AUX IOs to be powered up for PSR with
2030 2031 2032 2033 2034 2035 2036 2037 2038 2039 2040
	 * DC states enabled at the same time, while for driver initiated AUX
	 * transfers we need the same AUX IOs to be powered but with DC states
	 * disabled. Accordingly use the AUX power domain here which leaves DC
	 * states enabled.
	 * However, for non-A AUX ports the corresponding non-EDP transcoders
	 * would have already enabled power well 2 and DC_OFF. This means we can
	 * acquire a wider POWER_DOMAIN_AUX_{B,C,D,F} reference instead of a
	 * specific AUX_IO reference without powering up any extra wells.
	 * Note that PSR is enabled only on Port A even though this function
	 * returns the correct domain for other ports too.
	 */
2041
	return dig_port->aux_ch == AUX_CH_A ? POWER_DOMAIN_AUX_IO_A :
2042
					      intel_aux_power_domain(dig_port);
2043 2044
}

2045 2046
static void intel_ddi_get_power_domains(struct intel_encoder *encoder,
					struct intel_crtc_state *crtc_state)
2047
{
2048
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2049
	struct intel_digital_port *dig_port;
2050
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2051

2052 2053
	/*
	 * TODO: Add support for MST encoders. Atm, the following should never
2054 2055
	 * happen since fake-MST encoders don't set their get_power_domains()
	 * hook.
2056
	 */
2057 2058
	if (drm_WARN_ON(&dev_priv->drm,
			intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)))
2059
		return;
2060

2061
	dig_port = enc_to_dig_port(encoder);
2062 2063 2064 2065 2066

	if (!intel_phy_is_tc(dev_priv, phy) ||
	    dig_port->tc_mode != TC_PORT_TBT_ALT)
		intel_display_power_get(dev_priv,
					dig_port->ddi_io_power_domain);
2067

2068 2069 2070 2071 2072
	/*
	 * AUX power is only needed for (e)DP mode, and for HDMI mode on TC
	 * ports.
	 */
	if (intel_crtc_has_dp_encoder(crtc_state) ||
2073
	    intel_phy_is_tc(dev_priv, phy))
2074 2075
		intel_display_power_get(dev_priv,
					intel_ddi_main_link_aux_domain(dig_port));
2076

2077 2078 2079
	/*
	 * VDSC power is needed when DSC is enabled
	 */
2080
	if (crtc_state->dsc.compression_enable)
2081 2082
		intel_display_power_get(dev_priv,
					intel_dsc_power_domain(crtc_state));
2083 2084
}

2085 2086
void intel_ddi_enable_pipe_clock(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
2087
{
2088
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
2089
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
2090
	enum port port = encoder->port;
2091
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2092

2093 2094
	if (cpu_transcoder != TRANSCODER_EDP) {
		if (INTEL_GEN(dev_priv) >= 12)
2095 2096 2097
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TGL_TRANS_CLK_SEL_PORT(port));
2098
		else
2099 2100 2101
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TRANS_CLK_SEL_PORT(port));
2102
	}
2103 2104
}

2105
void intel_ddi_disable_pipe_clock(const struct intel_crtc_state *crtc_state)
2106
{
2107
	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
2108
	enum transcoder cpu_transcoder = crtc_state->cpu_transcoder;
2109

2110 2111
	if (cpu_transcoder != TRANSCODER_EDP) {
		if (INTEL_GEN(dev_priv) >= 12)
2112 2113 2114
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TGL_TRANS_CLK_SEL_DISABLED);
2115
		else
2116 2117 2118
			intel_de_write(dev_priv,
				       TRANS_CLK_SEL(cpu_transcoder),
				       TRANS_CLK_SEL_DISABLED);
2119
	}
2120 2121
}

2122
static void _skl_ddi_set_iboost(struct drm_i915_private *dev_priv,
2123
				enum port port, u8 iboost)
2124
{
2125 2126
	u32 tmp;

2127
	tmp = intel_de_read(dev_priv, DISPIO_CR_TX_BMU_CR0);
2128 2129 2130 2131 2132
	tmp &= ~(BALANCE_LEG_MASK(port) | BALANCE_LEG_DISABLE(port));
	if (iboost)
		tmp |= iboost << BALANCE_LEG_SHIFT(port);
	else
		tmp |= BALANCE_LEG_DISABLE(port);
2133
	intel_de_write(dev_priv, DISPIO_CR_TX_BMU_CR0, tmp);
2134 2135
}

2136 2137
static void skl_ddi_set_iboost(struct intel_encoder *encoder,
			       int level, enum intel_output_type type)
2138
{
2139
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
2140
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2141
	u8 iboost;
2142

2143
	if (type == INTEL_OUTPUT_HDMI)
2144
		iboost = intel_bios_hdmi_boost_level(encoder);
2145
	else
2146
		iboost = intel_bios_dp_boost_level(encoder);
2147

2148 2149 2150 2151 2152
	if (iboost == 0) {
		const struct ddi_buf_trans *ddi_translations;
		int n_entries;

		if (type == INTEL_OUTPUT_HDMI)
2153
			ddi_translations = intel_ddi_get_buf_trans_hdmi(encoder, &n_entries);
2154
		else if (type == INTEL_OUTPUT_EDP)
2155 2156
			ddi_translations = intel_ddi_get_buf_trans_edp(encoder,
								       &n_entries);
2157
		else
2158 2159
			ddi_translations = intel_ddi_get_buf_trans_dp(encoder,
								      &n_entries);
2160

2161
		if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
2162
			return;
2163
		if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
2164 2165
			level = n_entries - 1;

2166
		iboost = ddi_translations[level].i_boost;
2167 2168 2169 2170
	}

	/* Make sure that the requested I_boost is valid */
	if (iboost && iboost != 0x1 && iboost != 0x3 && iboost != 0x7) {
2171
		drm_err(&dev_priv->drm, "Invalid I_boost value %u\n", iboost);
2172 2173 2174
		return;
	}

2175
	_skl_ddi_set_iboost(dev_priv, encoder->port, iboost);
2176

2177
	if (encoder->port == PORT_A && dig_port->max_lanes == 4)
2178
		_skl_ddi_set_iboost(dev_priv, PORT_E, iboost);
2179 2180
}

2181 2182
static void bxt_ddi_vswing_sequence(struct intel_encoder *encoder,
				    int level, enum intel_output_type type)
2183
{
2184
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2185
	const struct bxt_ddi_buf_trans *ddi_translations;
2186
	enum port port = encoder->port;
2187
	int n_entries;
2188 2189

	if (type == INTEL_OUTPUT_HDMI)
2190
		ddi_translations = bxt_get_buf_trans_hdmi(encoder, &n_entries);
2191
	else if (type == INTEL_OUTPUT_EDP)
2192
		ddi_translations = bxt_get_buf_trans_edp(encoder, &n_entries);
2193
	else
2194
		ddi_translations = bxt_get_buf_trans_dp(encoder, &n_entries);
2195

2196
	if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
2197
		return;
2198
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
2199 2200
		level = n_entries - 1;

2201 2202 2203 2204 2205
	bxt_ddi_phy_set_signal_level(dev_priv, port,
				     ddi_translations[level].margin,
				     ddi_translations[level].scale,
				     ddi_translations[level].enable,
				     ddi_translations[level].deemphasis);
2206 2207
}

2208
static u8 intel_ddi_dp_voltage_max(struct intel_dp *intel_dp)
2209
{
2210
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
2211
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2212
	enum port port = encoder->port;
2213
	enum phy phy = intel_port_to_phy(dev_priv, port);
2214 2215
	int n_entries;

2216 2217
	if (INTEL_GEN(dev_priv) >= 12) {
		if (intel_phy_is_combo(dev_priv, phy))
2218
			tgl_get_combo_buf_trans(encoder, encoder->type,
2219 2220
						intel_dp->link_rate, &n_entries);
		else
2221
			tgl_get_dkl_buf_trans(encoder, encoder->type,
2222
					      intel_dp->link_rate, &n_entries);
2223
	} else if (INTEL_GEN(dev_priv) == 11) {
2224
		if (IS_ELKHARTLAKE(dev_priv))
2225
			ehl_get_combo_buf_trans(encoder, encoder->type,
2226 2227
						intel_dp->link_rate, &n_entries);
		else if (intel_phy_is_combo(dev_priv, phy))
2228
			icl_get_combo_buf_trans(encoder, encoder->type,
2229
						intel_dp->link_rate, &n_entries);
2230
		else
2231
			icl_get_mg_buf_trans(encoder, encoder->type,
2232
					     intel_dp->link_rate, &n_entries);
2233
	} else if (IS_CANNONLAKE(dev_priv)) {
R
Rodrigo Vivi 已提交
2234
		if (encoder->type == INTEL_OUTPUT_EDP)
2235
			cnl_get_buf_trans_edp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2236
		else
2237
			cnl_get_buf_trans_dp(encoder, &n_entries);
2238 2239
	} else if (IS_GEN9_LP(dev_priv)) {
		if (encoder->type == INTEL_OUTPUT_EDP)
2240
			bxt_get_buf_trans_edp(encoder, &n_entries);
2241
		else
2242
			bxt_get_buf_trans_dp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2243 2244
	} else {
		if (encoder->type == INTEL_OUTPUT_EDP)
2245
			intel_ddi_get_buf_trans_edp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2246
		else
2247
			intel_ddi_get_buf_trans_dp(encoder, &n_entries);
R
Rodrigo Vivi 已提交
2248
	}
2249

2250
	if (drm_WARN_ON(&dev_priv->drm, n_entries < 1))
2251
		n_entries = 1;
2252 2253
	if (drm_WARN_ON(&dev_priv->drm,
			n_entries > ARRAY_SIZE(index_to_dp_signal_levels)))
2254 2255 2256 2257 2258 2259
		n_entries = ARRAY_SIZE(index_to_dp_signal_levels);

	return index_to_dp_signal_levels[n_entries - 1] &
		DP_TRAIN_VOLTAGE_SWING_MASK;
}

2260 2261 2262 2263 2264
/*
 * We assume that the full set of pre-emphasis values can be
 * used on all DDI platforms. Should that change we need to
 * rethink this code.
 */
2265
static u8 intel_ddi_dp_preemph_max(struct intel_dp *intel_dp)
2266
{
2267
	return DP_TRAIN_PRE_EMPH_LEVEL_3;
2268 2269
}

2270 2271
static void cnl_ddi_vswing_program(struct intel_encoder *encoder,
				   int level, enum intel_output_type type)
2272
{
2273 2274
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	const struct cnl_ddi_buf_trans *ddi_translations;
2275
	enum port port = encoder->port;
2276 2277
	int n_entries, ln;
	u32 val;
2278

2279
	if (type == INTEL_OUTPUT_HDMI)
2280
		ddi_translations = cnl_get_buf_trans_hdmi(encoder, &n_entries);
2281
	else if (type == INTEL_OUTPUT_EDP)
2282
		ddi_translations = cnl_get_buf_trans_edp(encoder, &n_entries);
2283
	else
2284
		ddi_translations = cnl_get_buf_trans_dp(encoder, &n_entries);
2285

2286
	if (drm_WARN_ON_ONCE(&dev_priv->drm, !ddi_translations))
2287
		return;
2288
	if (drm_WARN_ON_ONCE(&dev_priv->drm, level >= n_entries))
2289 2290 2291
		level = n_entries - 1;

	/* Set PORT_TX_DW5 Scaling Mode Sel to 010b. */
2292
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2293
	val &= ~SCALING_MODE_SEL_MASK;
2294
	val |= SCALING_MODE_SEL(2);
2295
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2296 2297

	/* Program PORT_TX_DW2 */
2298
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW2_LN0(port));
2299 2300
	val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
		 RCOMP_SCALAR_MASK);
2301 2302 2303 2304
	val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel);
	val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel);
	/* Rcomp scalar is fixed as 0x98 for every table entry */
	val |= RCOMP_SCALAR(0x98);
2305
	intel_de_write(dev_priv, CNL_PORT_TX_DW2_GRP(port), val);
2306

2307
	/* Program PORT_TX_DW4 */
2308 2309
	/* We cannot write to GRP. It would overrite individual loadgen */
	for (ln = 0; ln < 4; ln++) {
2310
		val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port));
2311 2312
		val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
			 CURSOR_COEFF_MASK);
2313 2314 2315
		val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1);
		val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2);
		val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff);
2316
		intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val);
2317 2318
	}

2319
	/* Program PORT_TX_DW5 */
2320
	/* All DW5 values are fixed for every table entry */
2321
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2322
	val &= ~RTERM_SELECT_MASK;
2323 2324
	val |= RTERM_SELECT(6);
	val |= TAP3_DISABLE;
2325
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2326

2327
	/* Program PORT_TX_DW7 */
2328
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW7_LN0(port));
2329
	val &= ~N_SCALAR_MASK;
2330
	val |= N_SCALAR(ddi_translations[level].dw7_n_scalar);
2331
	intel_de_write(dev_priv, CNL_PORT_TX_DW7_GRP(port), val);
2332 2333
}

2334 2335
static void cnl_ddi_vswing_sequence(struct intel_encoder *encoder,
				    int level, enum intel_output_type type)
2336
{
2337
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2338
	enum port port = encoder->port;
2339
	int width, rate, ln;
2340
	u32 val;
2341

2342
	if (type == INTEL_OUTPUT_HDMI) {
2343
		width = 4;
2344
		rate = 0; /* Rate is always < than 6GHz for HDMI */
2345
	} else {
2346
		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2347 2348 2349

		width = intel_dp->lane_count;
		rate = intel_dp->link_rate;
2350
	}
2351 2352 2353 2354 2355 2356

	/*
	 * 1. If port type is eDP or DP,
	 * set PORT_PCS_DW1 cmnkeeper_enable to 1b,
	 * else clear to 0b.
	 */
2357
	val = intel_de_read(dev_priv, CNL_PORT_PCS_DW1_LN0(port));
2358
	if (type != INTEL_OUTPUT_HDMI)
2359 2360 2361
		val |= COMMON_KEEPER_EN;
	else
		val &= ~COMMON_KEEPER_EN;
2362
	intel_de_write(dev_priv, CNL_PORT_PCS_DW1_GRP(port), val);
2363 2364 2365

	/* 2. Program loadgen select */
	/*
2366 2367 2368 2369
	 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes
	 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1)
	 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0)
	 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0)
2370
	 */
2371
	for (ln = 0; ln <= 3; ln++) {
2372
		val = intel_de_read(dev_priv, CNL_PORT_TX_DW4_LN(ln, port));
2373 2374
		val &= ~LOADGEN_SELECT;

2375 2376
		if ((rate <= 600000 && width == 4 && ln >= 1)  ||
		    (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) {
2377 2378
			val |= LOADGEN_SELECT;
		}
2379
		intel_de_write(dev_priv, CNL_PORT_TX_DW4_LN(ln, port), val);
2380
	}
2381 2382

	/* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */
2383
	val = intel_de_read(dev_priv, CNL_PORT_CL1CM_DW5);
2384
	val |= SUS_CLOCK_CONFIG;
2385
	intel_de_write(dev_priv, CNL_PORT_CL1CM_DW5, val);
2386 2387

	/* 4. Clear training enable to change swing values */
2388
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2389
	val &= ~TX_TRAINING_EN;
2390
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2391 2392

	/* 5. Program swing and de-emphasis */
2393
	cnl_ddi_vswing_program(encoder, level, type);
2394 2395

	/* 6. Set training enable to trigger update */
2396
	val = intel_de_read(dev_priv, CNL_PORT_TX_DW5_LN0(port));
2397
	val |= TX_TRAINING_EN;
2398
	intel_de_write(dev_priv, CNL_PORT_TX_DW5_GRP(port), val);
2399 2400
}

2401
static void icl_ddi_combo_vswing_program(struct intel_encoder *encoder,
2402
					 u32 level, int type, int rate)
2403
{
2404
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2405
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2406
	const struct cnl_ddi_buf_trans *ddi_translations = NULL;
2407 2408 2409
	u32 n_entries, val;
	int ln;

2410
	if (INTEL_GEN(dev_priv) >= 12)
2411
		ddi_translations = tgl_get_combo_buf_trans(encoder, type, rate,
2412
							   &n_entries);
2413
	else if (IS_ELKHARTLAKE(dev_priv))
2414
		ddi_translations = ehl_get_combo_buf_trans(encoder, type, rate,
2415
							   &n_entries);
2416
	else
2417
		ddi_translations = icl_get_combo_buf_trans(encoder, type, rate,
2418
							   &n_entries);
2419 2420 2421 2422
	if (!ddi_translations)
		return;

	if (level >= n_entries) {
2423 2424 2425
		drm_dbg_kms(&dev_priv->drm,
			    "DDI translation not found for level %d. Using %d instead.",
			    level, n_entries - 1);
2426 2427 2428
		level = n_entries - 1;
	}

2429 2430 2431 2432 2433 2434 2435 2436 2437
	if (type == INTEL_OUTPUT_EDP) {
		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);

		val = EDP4K2K_MODE_OVRD_EN | EDP4K2K_MODE_OVRD_OPTIMIZED;
		intel_dp->hobl_active = is_hobl_buf_trans(ddi_translations);
		intel_de_rmw(dev_priv, ICL_PORT_CL_DW10(phy), val,
			     intel_dp->hobl_active ? val : 0);
	}

2438
	/* Set PORT_TX_DW5 */
2439
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy));
2440 2441 2442
	val &= ~(SCALING_MODE_SEL_MASK | RTERM_SELECT_MASK |
		  TAP2_DISABLE | TAP3_DISABLE);
	val |= SCALING_MODE_SEL(0x2);
2443
	val |= RTERM_SELECT(0x6);
2444
	val |= TAP3_DISABLE;
2445
	intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val);
2446 2447

	/* Program PORT_TX_DW2 */
2448
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW2_LN0(phy));
2449 2450
	val &= ~(SWING_SEL_LOWER_MASK | SWING_SEL_UPPER_MASK |
		 RCOMP_SCALAR_MASK);
2451 2452
	val |= SWING_SEL_UPPER(ddi_translations[level].dw2_swing_sel);
	val |= SWING_SEL_LOWER(ddi_translations[level].dw2_swing_sel);
2453
	/* Program Rcomp scalar for every table entry */
2454
	val |= RCOMP_SCALAR(0x98);
2455
	intel_de_write(dev_priv, ICL_PORT_TX_DW2_GRP(phy), val);
2456 2457 2458 2459

	/* Program PORT_TX_DW4 */
	/* We cannot write to GRP. It would overwrite individual loadgen. */
	for (ln = 0; ln <= 3; ln++) {
2460
		val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy));
2461 2462
		val &= ~(POST_CURSOR_1_MASK | POST_CURSOR_2_MASK |
			 CURSOR_COEFF_MASK);
2463 2464 2465
		val |= POST_CURSOR_1(ddi_translations[level].dw4_post_cursor_1);
		val |= POST_CURSOR_2(ddi_translations[level].dw4_post_cursor_2);
		val |= CURSOR_COEFF(ddi_translations[level].dw4_cursor_coeff);
2466
		intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val);
2467
	}
2468 2469

	/* Program PORT_TX_DW7 */
2470
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW7_LN0(phy));
2471 2472
	val &= ~N_SCALAR_MASK;
	val |= N_SCALAR(ddi_translations[level].dw7_n_scalar);
2473
	intel_de_write(dev_priv, ICL_PORT_TX_DW7_GRP(phy), val);
2474 2475 2476 2477 2478 2479 2480
}

static void icl_combo_phy_ddi_vswing_sequence(struct intel_encoder *encoder,
					      u32 level,
					      enum intel_output_type type)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2481
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2482 2483 2484 2485 2486 2487 2488 2489 2490
	int width = 0;
	int rate = 0;
	u32 val;
	int ln = 0;

	if (type == INTEL_OUTPUT_HDMI) {
		width = 4;
		/* Rate is always < than 6GHz for HDMI */
	} else {
2491
		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
2492 2493 2494 2495 2496 2497 2498 2499 2500 2501

		width = intel_dp->lane_count;
		rate = intel_dp->link_rate;
	}

	/*
	 * 1. If port type is eDP or DP,
	 * set PORT_PCS_DW1 cmnkeeper_enable to 1b,
	 * else clear to 0b.
	 */
2502
	val = intel_de_read(dev_priv, ICL_PORT_PCS_DW1_LN0(phy));
2503 2504 2505 2506
	if (type == INTEL_OUTPUT_HDMI)
		val &= ~COMMON_KEEPER_EN;
	else
		val |= COMMON_KEEPER_EN;
2507
	intel_de_write(dev_priv, ICL_PORT_PCS_DW1_GRP(phy), val);
2508 2509 2510 2511 2512 2513 2514 2515 2516

	/* 2. Program loadgen select */
	/*
	 * Program PORT_TX_DW4_LN depending on Bit rate and used lanes
	 * <= 6 GHz and 4 lanes (LN0=0, LN1=1, LN2=1, LN3=1)
	 * <= 6 GHz and 1,2 lanes (LN0=0, LN1=1, LN2=1, LN3=0)
	 * > 6 GHz (LN0=0, LN1=0, LN2=0, LN3=0)
	 */
	for (ln = 0; ln <= 3; ln++) {
2517
		val = intel_de_read(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy));
2518 2519 2520 2521 2522 2523
		val &= ~LOADGEN_SELECT;

		if ((rate <= 600000 && width == 4 && ln >= 1) ||
		    (rate <= 600000 && width < 4 && (ln == 1 || ln == 2))) {
			val |= LOADGEN_SELECT;
		}
2524
		intel_de_write(dev_priv, ICL_PORT_TX_DW4_LN(ln, phy), val);
2525 2526 2527
	}

	/* 3. Set PORT_CL_DW5 SUS Clock Config to 11b */
2528
	val = intel_de_read(dev_priv, ICL_PORT_CL_DW5(phy));
2529
	val |= SUS_CLOCK_CONFIG;
2530
	intel_de_write(dev_priv, ICL_PORT_CL_DW5(phy), val);
2531 2532

	/* 4. Clear training enable to change swing values */
2533
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy));
2534
	val &= ~TX_TRAINING_EN;
2535
	intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val);
2536 2537

	/* 5. Program swing and de-emphasis */
2538
	icl_ddi_combo_vswing_program(encoder, level, type, rate);
2539 2540

	/* 6. Set training enable to trigger update */
2541
	val = intel_de_read(dev_priv, ICL_PORT_TX_DW5_LN0(phy));
2542
	val |= TX_TRAINING_EN;
2543
	intel_de_write(dev_priv, ICL_PORT_TX_DW5_GRP(phy), val);
2544 2545
}

2546
static void icl_mg_phy_ddi_vswing_sequence(struct intel_encoder *encoder,
2547 2548
					   int link_clock, u32 level,
					   enum intel_output_type type)
2549 2550
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2551
	enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port);
2552 2553
	const struct icl_mg_phy_ddi_buf_trans *ddi_translations;
	u32 n_entries, val;
2554 2555 2556 2557 2558 2559 2560
	int ln, rate = 0;

	if (type != INTEL_OUTPUT_HDMI) {
		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);

		rate = intel_dp->link_rate;
	}
2561

2562
	ddi_translations = icl_get_mg_buf_trans(encoder, type, rate,
2563
						&n_entries);
2564 2565
	/* The table does not have values for level 3 and level 9. */
	if (level >= n_entries || level == 3 || level == 9) {
2566 2567 2568
		drm_dbg_kms(&dev_priv->drm,
			    "DDI translation not found for level %d. Using %d instead.",
			    level, n_entries - 2);
2569 2570 2571 2572 2573
		level = n_entries - 2;
	}

	/* Set MG_TX_LINK_PARAMS cri_use_fs32 to 0. */
	for (ln = 0; ln < 2; ln++) {
2574
		val = intel_de_read(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port));
2575
		val &= ~CRI_USE_FS32;
2576
		intel_de_write(dev_priv, MG_TX1_LINK_PARAMS(ln, tc_port), val);
2577

2578
		val = intel_de_read(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port));
2579
		val &= ~CRI_USE_FS32;
2580
		intel_de_write(dev_priv, MG_TX2_LINK_PARAMS(ln, tc_port), val);
2581 2582 2583 2584
	}

	/* Program MG_TX_SWINGCTRL with values from vswing table */
	for (ln = 0; ln < 2; ln++) {
2585
		val = intel_de_read(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port));
2586 2587 2588
		val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK;
		val |= CRI_TXDEEMPH_OVERRIDE_17_12(
			ddi_translations[level].cri_txdeemph_override_17_12);
2589
		intel_de_write(dev_priv, MG_TX1_SWINGCTRL(ln, tc_port), val);
2590

2591
		val = intel_de_read(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port));
2592 2593 2594
		val &= ~CRI_TXDEEMPH_OVERRIDE_17_12_MASK;
		val |= CRI_TXDEEMPH_OVERRIDE_17_12(
			ddi_translations[level].cri_txdeemph_override_17_12);
2595
		intel_de_write(dev_priv, MG_TX2_SWINGCTRL(ln, tc_port), val);
2596 2597 2598 2599
	}

	/* Program MG_TX_DRVCTRL with values from vswing table */
	for (ln = 0; ln < 2; ln++) {
2600
		val = intel_de_read(dev_priv, MG_TX1_DRVCTRL(ln, tc_port));
2601 2602 2603 2604 2605 2606 2607
		val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK |
			 CRI_TXDEEMPH_OVERRIDE_5_0_MASK);
		val |= CRI_TXDEEMPH_OVERRIDE_5_0(
			ddi_translations[level].cri_txdeemph_override_5_0) |
			CRI_TXDEEMPH_OVERRIDE_11_6(
				ddi_translations[level].cri_txdeemph_override_11_6) |
			CRI_TXDEEMPH_OVERRIDE_EN;
2608
		intel_de_write(dev_priv, MG_TX1_DRVCTRL(ln, tc_port), val);
2609

2610
		val = intel_de_read(dev_priv, MG_TX2_DRVCTRL(ln, tc_port));
2611 2612 2613 2614 2615 2616 2617
		val &= ~(CRI_TXDEEMPH_OVERRIDE_11_6_MASK |
			 CRI_TXDEEMPH_OVERRIDE_5_0_MASK);
		val |= CRI_TXDEEMPH_OVERRIDE_5_0(
			ddi_translations[level].cri_txdeemph_override_5_0) |
			CRI_TXDEEMPH_OVERRIDE_11_6(
				ddi_translations[level].cri_txdeemph_override_11_6) |
			CRI_TXDEEMPH_OVERRIDE_EN;
2618
		intel_de_write(dev_priv, MG_TX2_DRVCTRL(ln, tc_port), val);
2619 2620 2621 2622 2623 2624 2625 2626 2627 2628

		/* FIXME: Program CRI_LOADGEN_SEL after the spec is updated */
	}

	/*
	 * Program MG_CLKHUB<LN, port being used> with value from frequency table
	 * In case of Legacy mode on MG PHY, both TX1 and TX2 enabled so use the
	 * values from table for which TX1 and TX2 enabled.
	 */
	for (ln = 0; ln < 2; ln++) {
2629
		val = intel_de_read(dev_priv, MG_CLKHUB(ln, tc_port));
2630 2631 2632 2633
		if (link_clock < 300000)
			val |= CFG_LOW_RATE_LKREN_EN;
		else
			val &= ~CFG_LOW_RATE_LKREN_EN;
2634
		intel_de_write(dev_priv, MG_CLKHUB(ln, tc_port), val);
2635 2636 2637 2638
	}

	/* Program the MG_TX_DCC<LN, port being used> based on the link frequency */
	for (ln = 0; ln < 2; ln++) {
2639
		val = intel_de_read(dev_priv, MG_TX1_DCC(ln, tc_port));
2640 2641 2642 2643 2644 2645 2646
		val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK;
		if (link_clock <= 500000) {
			val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN;
		} else {
			val |= CFG_AMI_CK_DIV_OVERRIDE_EN |
				CFG_AMI_CK_DIV_OVERRIDE_VAL(1);
		}
2647
		intel_de_write(dev_priv, MG_TX1_DCC(ln, tc_port), val);
2648

2649
		val = intel_de_read(dev_priv, MG_TX2_DCC(ln, tc_port));
2650 2651 2652 2653 2654 2655 2656
		val &= ~CFG_AMI_CK_DIV_OVERRIDE_VAL_MASK;
		if (link_clock <= 500000) {
			val &= ~CFG_AMI_CK_DIV_OVERRIDE_EN;
		} else {
			val |= CFG_AMI_CK_DIV_OVERRIDE_EN |
				CFG_AMI_CK_DIV_OVERRIDE_VAL(1);
		}
2657
		intel_de_write(dev_priv, MG_TX2_DCC(ln, tc_port), val);
2658 2659 2660 2661
	}

	/* Program MG_TX_PISO_READLOAD with values from vswing table */
	for (ln = 0; ln < 2; ln++) {
2662 2663
		val = intel_de_read(dev_priv,
				    MG_TX1_PISO_READLOAD(ln, tc_port));
2664
		val |= CRI_CALCINIT;
2665 2666
		intel_de_write(dev_priv, MG_TX1_PISO_READLOAD(ln, tc_port),
			       val);
2667

2668 2669
		val = intel_de_read(dev_priv,
				    MG_TX2_PISO_READLOAD(ln, tc_port));
2670
		val |= CRI_CALCINIT;
2671 2672
		intel_de_write(dev_priv, MG_TX2_PISO_READLOAD(ln, tc_port),
			       val);
2673 2674 2675 2676 2677 2678
	}
}

static void icl_ddi_vswing_sequence(struct intel_encoder *encoder,
				    int link_clock,
				    u32 level,
2679 2680
				    enum intel_output_type type)
{
2681
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2682
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2683

2684
	if (intel_phy_is_combo(dev_priv, phy))
2685 2686
		icl_combo_phy_ddi_vswing_sequence(encoder, level, type);
	else
2687 2688
		icl_mg_phy_ddi_vswing_sequence(encoder, link_clock, level,
					       type);
2689 2690
}

2691 2692
static void
tgl_dkl_phy_ddi_vswing_sequence(struct intel_encoder *encoder, int link_clock,
2693
				u32 level, enum intel_output_type type)
2694 2695 2696 2697 2698
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum tc_port tc_port = intel_port_to_tc(dev_priv, encoder->port);
	const struct tgl_dkl_phy_ddi_buf_trans *ddi_translations;
	u32 n_entries, val, ln, dpcnt_mask, dpcnt_val;
2699
	int rate = 0;
2700

2701
	if (type == INTEL_OUTPUT_HDMI) {
2702 2703 2704
		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);

		rate = intel_dp->link_rate;
2705
	}
2706

2707
	ddi_translations = tgl_get_dkl_buf_trans(encoder, encoder->type, rate,
2708 2709
						 &n_entries);

2710 2711 2712 2713 2714 2715 2716 2717 2718 2719 2720
	if (level >= n_entries)
		level = n_entries - 1;

	dpcnt_mask = (DKL_TX_PRESHOOT_COEFF_MASK |
		      DKL_TX_DE_EMPAHSIS_COEFF_MASK |
		      DKL_TX_VSWING_CONTROL_MASK);
	dpcnt_val = DKL_TX_VSWING_CONTROL(ddi_translations[level].dkl_vswing_control);
	dpcnt_val |= DKL_TX_DE_EMPHASIS_COEFF(ddi_translations[level].dkl_de_emphasis_control);
	dpcnt_val |= DKL_TX_PRESHOOT_COEFF(ddi_translations[level].dkl_preshoot_control);

	for (ln = 0; ln < 2; ln++) {
2721 2722
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, ln));
2723

2724
		intel_de_write(dev_priv, DKL_TX_PMD_LANE_SUS(tc_port), 0);
2725

2726
		/* All the registers are RMW */
2727
		val = intel_de_read(dev_priv, DKL_TX_DPCNTL0(tc_port));
2728 2729
		val &= ~dpcnt_mask;
		val |= dpcnt_val;
2730
		intel_de_write(dev_priv, DKL_TX_DPCNTL0(tc_port), val);
2731

2732
		val = intel_de_read(dev_priv, DKL_TX_DPCNTL1(tc_port));
2733 2734
		val &= ~dpcnt_mask;
		val |= dpcnt_val;
2735
		intel_de_write(dev_priv, DKL_TX_DPCNTL1(tc_port), val);
2736

2737
		val = intel_de_read(dev_priv, DKL_TX_DPCNTL2(tc_port));
2738
		val &= ~DKL_TX_DP20BITMODE;
2739
		intel_de_write(dev_priv, DKL_TX_DPCNTL2(tc_port), val);
2740 2741 2742 2743 2744 2745 2746 2747 2748 2749 2750 2751 2752 2753
	}
}

static void tgl_ddi_vswing_sequence(struct intel_encoder *encoder,
				    int link_clock,
				    u32 level,
				    enum intel_output_type type)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);

	if (intel_phy_is_combo(dev_priv, phy))
		icl_combo_phy_ddi_vswing_sequence(encoder, level, type);
	else
2754
		tgl_dkl_phy_ddi_vswing_sequence(encoder, link_clock, level, type);
2755 2756
}

2757
static u32 translate_signal_level(struct intel_dp *intel_dp, int signal_levels)
2758
{
2759
	struct drm_i915_private *i915 = dp_to_i915(intel_dp);
2760
	int i;
2761

2762 2763 2764
	for (i = 0; i < ARRAY_SIZE(index_to_dp_signal_levels); i++) {
		if (index_to_dp_signal_levels[i] == signal_levels)
			return i;
2765 2766
	}

2767 2768 2769
	drm_WARN(&i915->drm, 1,
		 "Unsupported voltage swing/pre-emphasis level: 0x%x\n",
		 signal_levels);
2770 2771

	return 0;
2772 2773
}

2774
static u32 intel_ddi_dp_level(struct intel_dp *intel_dp)
2775
{
2776
	u8 train_set = intel_dp->train_set[0];
2777 2778 2779
	int signal_levels = train_set & (DP_TRAIN_VOLTAGE_SWING_MASK |
					 DP_TRAIN_PRE_EMPHASIS_MASK);

2780
	return translate_signal_level(intel_dp, signal_levels);
2781 2782
}

2783 2784
static void
tgl_set_signal_levels(struct intel_dp *intel_dp)
2785
{
2786
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
2787
	int level = intel_ddi_dp_level(intel_dp);
2788

2789 2790 2791
	tgl_ddi_vswing_sequence(encoder, intel_dp->link_rate,
				level, encoder->type);
}
2792

2793 2794 2795 2796 2797 2798 2799 2800
static void
icl_set_signal_levels(struct intel_dp *intel_dp)
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	int level = intel_ddi_dp_level(intel_dp);

	icl_ddi_vswing_sequence(encoder, intel_dp->link_rate,
				level, encoder->type);
2801 2802
}

2803 2804
static void
cnl_set_signal_levels(struct intel_dp *intel_dp)
2805
{
2806
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
2807
	int level = intel_ddi_dp_level(intel_dp);
2808

2809 2810 2811 2812 2813 2814 2815 2816 2817 2818 2819 2820 2821 2822 2823 2824 2825 2826 2827 2828 2829 2830 2831 2832 2833 2834 2835 2836 2837
	cnl_ddi_vswing_sequence(encoder, level, encoder->type);
}

static void
bxt_set_signal_levels(struct intel_dp *intel_dp)
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	int level = intel_ddi_dp_level(intel_dp);

	bxt_ddi_vswing_sequence(encoder, level, encoder->type);
}

static void
hsw_set_signal_levels(struct intel_dp *intel_dp)
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	int level = intel_ddi_dp_level(intel_dp);
	enum port port = encoder->port;
	u32 signal_levels;

	signal_levels = DDI_BUF_TRANS_SELECT(level);

	drm_dbg_kms(&dev_priv->drm, "Using signal levels %08x\n",
		    signal_levels);

	intel_dp->DP &= ~DDI_BUF_EMP_MASK;
	intel_dp->DP |= signal_levels;

2838
	if (IS_GEN9_BC(dev_priv))
2839
		skl_ddi_set_iboost(encoder, level, encoder->type);
2840

2841 2842
	intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP);
	intel_de_posting_read(dev_priv, DDI_BUF_CTL(port));
2843 2844
}

2845 2846
static u32 icl_dpclka_cfgcr0_clk_off(struct drm_i915_private *dev_priv,
				     enum phy phy)
2847
{
2848 2849 2850
	if (IS_ROCKETLAKE(dev_priv)) {
		return RKL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
	} else if (intel_phy_is_combo(dev_priv, phy)) {
2851 2852 2853 2854
		return ICL_DPCLKA_CFGCR0_DDI_CLK_OFF(phy);
	} else if (intel_phy_is_tc(dev_priv, phy)) {
		enum tc_port tc_port = intel_port_to_tc(dev_priv,
							(enum port)phy);
2855 2856 2857 2858 2859 2860 2861

		return ICL_DPCLKA_CFGCR0_TC_CLK_OFF(tc_port);
	}

	return 0;
}

2862 2863
static void icl_map_plls_to_ports(struct intel_encoder *encoder,
				  const struct intel_crtc_state *crtc_state)
2864
{
2865
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2866
	struct intel_shared_dpll *pll = crtc_state->shared_dpll;
2867
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2868
	u32 val;
2869

2870
	mutex_lock(&dev_priv->dpll.lock);
2871

2872
	val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
2873 2874
	drm_WARN_ON(&dev_priv->drm,
		    (val & icl_dpclka_cfgcr0_clk_off(dev_priv, phy)) == 0);
2875

2876
	if (intel_phy_is_combo(dev_priv, phy)) {
2877 2878 2879 2880 2881 2882 2883 2884 2885 2886
		u32 mask, sel;

		if (IS_ROCKETLAKE(dev_priv)) {
			mask = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
			sel = RKL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
		} else {
			mask = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(phy);
			sel = ICL_DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, phy);
		}

2887 2888 2889 2890 2891 2892 2893 2894 2895 2896
		/*
		 * Even though this register references DDIs, note that we
		 * want to pass the PHY rather than the port (DDI).  For
		 * ICL, port=phy in all cases so it doesn't matter, but for
		 * EHL the bspec notes the following:
		 *
		 *   "DDID clock tied to DDIA clock, so DPCLKA_CFGCR0 DDIA
		 *   Clock Select chooses the PLL for both DDIA and DDID and
		 *   drives port A in all cases."
		 */
2897 2898
		val &= ~mask;
		val |= sel;
2899 2900
		intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
		intel_de_posting_read(dev_priv, ICL_DPCLKA_CFGCR0);
2901
	}
2902

2903
	val &= ~icl_dpclka_cfgcr0_clk_off(dev_priv, phy);
2904
	intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
2905

2906
	mutex_unlock(&dev_priv->dpll.lock);
2907 2908
}

2909
static void icl_unmap_plls_to_ports(struct intel_encoder *encoder)
2910
{
2911
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2912
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
2913
	u32 val;
2914

2915
	mutex_lock(&dev_priv->dpll.lock);
2916

2917
	val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
2918
	val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy);
2919
	intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
2920

2921
	mutex_unlock(&dev_priv->dpll.lock);
2922 2923
}

2924 2925 2926 2927 2928 2929
static void icl_sanitize_port_clk_off(struct drm_i915_private *dev_priv,
				      u32 port_mask, bool ddi_clk_needed)
{
	enum port port;
	u32 val;

2930
	val = intel_de_read(dev_priv, ICL_DPCLKA_CFGCR0);
2931 2932
	for_each_port_masked(port, port_mask) {
		enum phy phy = intel_port_to_phy(dev_priv, port);
2933 2934
		bool ddi_clk_off = val & icl_dpclka_cfgcr0_clk_off(dev_priv,
								   phy);
2935

2936
		if (ddi_clk_needed == !ddi_clk_off)
2937 2938 2939 2940 2941 2942
			continue;

		/*
		 * Punt on the case now where clock is gated, but it would
		 * be needed by the port. Something else is really broken then.
		 */
2943
		if (drm_WARN_ON(&dev_priv->drm, ddi_clk_needed))
2944 2945
			continue;

2946 2947 2948
		drm_notice(&dev_priv->drm,
			   "PHY %c is disabled/in DSI mode with an ungated DDI clock, gate it\n",
			   phy_name(phy));
2949
		val |= icl_dpclka_cfgcr0_clk_off(dev_priv, phy);
2950
		intel_de_write(dev_priv, ICL_DPCLKA_CFGCR0, val);
2951 2952 2953
	}
}

2954 2955 2956
void icl_sanitize_encoder_pll_mapping(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
2957 2958
	u32 port_mask;
	bool ddi_clk_needed;
2959 2960 2961 2962 2963 2964 2965 2966 2967 2968 2969 2970 2971 2972 2973 2974 2975

	/*
	 * In case of DP MST, we sanitize the primary encoder only, not the
	 * virtual ones.
	 */
	if (encoder->type == INTEL_OUTPUT_DP_MST)
		return;

	if (!encoder->base.crtc && intel_encoder_is_dp(encoder)) {
		u8 pipe_mask;
		bool is_mst;

		intel_ddi_get_encoder_pipes(encoder, &pipe_mask, &is_mst);
		/*
		 * In the unlikely case that BIOS enables DP in MST mode, just
		 * warn since our MST HW readout is incomplete.
		 */
2976
		if (drm_WARN_ON(&dev_priv->drm, is_mst))
2977 2978
			return;
	}
2979

2980 2981
	port_mask = BIT(encoder->port);
	ddi_clk_needed = encoder->base.crtc;
2982

2983 2984
	if (encoder->type == INTEL_OUTPUT_DSI) {
		struct intel_encoder *other_encoder;
2985

2986 2987 2988 2989 2990 2991 2992 2993 2994
		port_mask = intel_dsi_encoder_ports(encoder);
		/*
		 * Sanity check that we haven't incorrectly registered another
		 * encoder using any of the ports of this DSI encoder.
		 */
		for_each_intel_encoder(&dev_priv->drm, other_encoder) {
			if (other_encoder == encoder)
				continue;

2995 2996
			if (drm_WARN_ON(&dev_priv->drm,
					port_mask & BIT(other_encoder->port)))
2997 2998 2999
				return;
		}
		/*
3000 3001
		 * For DSI we keep the ddi clocks gated
		 * except during enable/disable sequence.
3002
		 */
3003
		ddi_clk_needed = false;
3004 3005
	}

3006
	icl_sanitize_port_clk_off(dev_priv, port_mask, ddi_clk_needed);
3007 3008
}

3009
static void intel_ddi_clk_select(struct intel_encoder *encoder,
3010
				 const struct intel_crtc_state *crtc_state)
3011
{
3012
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3013
	enum port port = encoder->port;
3014
	enum phy phy = intel_port_to_phy(dev_priv, port);
3015
	u32 val;
3016
	const struct intel_shared_dpll *pll = crtc_state->shared_dpll;
3017

3018
	if (drm_WARN_ON(&dev_priv->drm, !pll))
3019 3020
		return;

3021
	mutex_lock(&dev_priv->dpll.lock);
3022

3023
	if (INTEL_GEN(dev_priv) >= 11) {
3024
		if (!intel_phy_is_combo(dev_priv, phy))
3025 3026
			intel_de_write(dev_priv, DDI_CLK_SEL(port),
				       icl_pll_to_ddi_clk_sel(encoder, crtc_state));
3027 3028 3029 3030 3031
		else if (IS_ELKHARTLAKE(dev_priv) && port >= PORT_C)
			/*
			 * MG does not exist but the programming is required
			 * to ungate DDIC and DDID
			 */
3032 3033
			intel_de_write(dev_priv, DDI_CLK_SEL(port),
				       DDI_CLK_SEL_MG);
3034
	} else if (IS_CANNONLAKE(dev_priv)) {
R
Rodrigo Vivi 已提交
3035
		/* Configure DPCLKA_CFGCR0 to map the DPLL to the DDI. */
3036
		val = intel_de_read(dev_priv, DPCLKA_CFGCR0);
3037
		val &= ~DPCLKA_CFGCR0_DDI_CLK_SEL_MASK(port);
3038
		val |= DPCLKA_CFGCR0_DDI_CLK_SEL(pll->info->id, port);
3039
		intel_de_write(dev_priv, DPCLKA_CFGCR0, val);
3040

R
Rodrigo Vivi 已提交
3041 3042 3043 3044 3045
		/*
		 * Configure DPCLKA_CFGCR0 to turn on the clock for the DDI.
		 * This step and the step before must be done with separate
		 * register writes.
		 */
3046
		val = intel_de_read(dev_priv, DPCLKA_CFGCR0);
R
Rodrigo Vivi 已提交
3047
		val &= ~DPCLKA_CFGCR0_DDI_CLK_OFF(port);
3048
		intel_de_write(dev_priv, DPCLKA_CFGCR0, val);
R
Rodrigo Vivi 已提交
3049
	} else if (IS_GEN9_BC(dev_priv)) {
3050
		/* DDI -> PLL mapping  */
3051
		val = intel_de_read(dev_priv, DPLL_CTRL2);
3052 3053

		val &= ~(DPLL_CTRL2_DDI_CLK_OFF(port) |
3054
			 DPLL_CTRL2_DDI_CLK_SEL_MASK(port));
3055
		val |= (DPLL_CTRL2_DDI_CLK_SEL(pll->info->id, port) |
3056 3057
			DPLL_CTRL2_DDI_SEL_OVERRIDE(port));

3058
		intel_de_write(dev_priv, DPLL_CTRL2, val);
3059

3060
	} else if (INTEL_GEN(dev_priv) < 9) {
3061 3062
		intel_de_write(dev_priv, PORT_CLK_SEL(port),
			       hsw_pll_to_ddi_pll_sel(pll));
3063
	}
3064

3065
	mutex_unlock(&dev_priv->dpll.lock);
3066 3067
}

3068 3069 3070
static void intel_ddi_clk_disable(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3071
	enum port port = encoder->port;
3072
	enum phy phy = intel_port_to_phy(dev_priv, port);
3073

3074
	if (INTEL_GEN(dev_priv) >= 11) {
3075 3076
		if (!intel_phy_is_combo(dev_priv, phy) ||
		    (IS_ELKHARTLAKE(dev_priv) && port >= PORT_C))
3077 3078
			intel_de_write(dev_priv, DDI_CLK_SEL(port),
				       DDI_CLK_SEL_NONE);
3079
	} else if (IS_CANNONLAKE(dev_priv)) {
3080 3081
		intel_de_write(dev_priv, DPCLKA_CFGCR0,
			       intel_de_read(dev_priv, DPCLKA_CFGCR0) | DPCLKA_CFGCR0_DDI_CLK_OFF(port));
3082
	} else if (IS_GEN9_BC(dev_priv)) {
3083 3084
		intel_de_write(dev_priv, DPLL_CTRL2,
			       intel_de_read(dev_priv, DPLL_CTRL2) | DPLL_CTRL2_DDI_CLK_OFF(port));
3085
	} else if (INTEL_GEN(dev_priv) < 9) {
3086 3087
		intel_de_write(dev_priv, PORT_CLK_SEL(port),
			       PORT_CLK_SEL_NONE);
3088
	}
3089 3090
}

3091
static void
3092
icl_program_mg_dp_mode(struct intel_digital_port *dig_port,
3093
		       const struct intel_crtc_state *crtc_state)
3094
{
3095 3096
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
	enum tc_port tc_port = intel_port_to_tc(dev_priv, dig_port->base.port);
3097 3098
	u32 ln0, ln1, pin_assignment;
	u8 width;
3099

3100
	if (dig_port->tc_mode == TC_PORT_TBT_ALT)
3101 3102
		return;

3103
	if (INTEL_GEN(dev_priv) >= 12) {
3104 3105 3106 3107 3108 3109
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x0));
		ln0 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port));
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x1));
		ln1 = intel_de_read(dev_priv, DKL_DP_MODE(tc_port));
3110
	} else {
3111 3112
		ln0 = intel_de_read(dev_priv, MG_DP_MODE(0, tc_port));
		ln1 = intel_de_read(dev_priv, MG_DP_MODE(1, tc_port));
3113
	}
3114

3115
	ln0 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE);
3116
	ln1 &= ~(MG_DP_MODE_CFG_DP_X1_MODE | MG_DP_MODE_CFG_DP_X2_MODE);
3117

3118
	/* DPPATC */
3119
	pin_assignment = intel_tc_port_get_pin_assignment_mask(dig_port);
3120
	width = crtc_state->lane_count;
3121

3122 3123
	switch (pin_assignment) {
	case 0x0:
3124
		drm_WARN_ON(&dev_priv->drm,
3125
			    dig_port->tc_mode != TC_PORT_LEGACY);
3126 3127 3128 3129 3130 3131 3132 3133 3134 3135 3136 3137 3138 3139 3140 3141 3142 3143 3144 3145 3146 3147
		if (width == 1) {
			ln1 |= MG_DP_MODE_CFG_DP_X1_MODE;
		} else {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
		break;
	case 0x1:
		if (width == 4) {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
		break;
	case 0x2:
		if (width == 2) {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
		break;
	case 0x3:
	case 0x5:
		if (width == 1) {
3148 3149
			ln0 |= MG_DP_MODE_CFG_DP_X1_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X1_MODE;
3150 3151 3152
		} else {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
3153 3154
		}
		break;
3155 3156 3157 3158 3159 3160 3161 3162 3163
	case 0x4:
	case 0x6:
		if (width == 1) {
			ln0 |= MG_DP_MODE_CFG_DP_X1_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X1_MODE;
		} else {
			ln0 |= MG_DP_MODE_CFG_DP_X2_MODE;
			ln1 |= MG_DP_MODE_CFG_DP_X2_MODE;
		}
3164 3165
		break;
	default:
3166
		MISSING_CASE(pin_assignment);
3167 3168
	}

3169
	if (INTEL_GEN(dev_priv) >= 12) {
3170 3171 3172 3173 3174 3175
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x0));
		intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln0);
		intel_de_write(dev_priv, HIP_INDEX_REG(tc_port),
			       HIP_INDEX_VAL(tc_port, 0x1));
		intel_de_write(dev_priv, DKL_DP_MODE(tc_port), ln1);
3176
	} else {
3177 3178
		intel_de_write(dev_priv, MG_DP_MODE(0, tc_port), ln0);
		intel_de_write(dev_priv, MG_DP_MODE(1, tc_port), ln1);
3179
	}
3180 3181
}

3182 3183 3184
static void intel_dp_sink_set_fec_ready(struct intel_dp *intel_dp,
					const struct intel_crtc_state *crtc_state)
{
3185 3186
	struct drm_i915_private *i915 = dp_to_i915(intel_dp);

3187 3188 3189 3190
	if (!crtc_state->fec_enable)
		return;

	if (drm_dp_dpcd_writeb(&intel_dp->aux, DP_FEC_CONFIGURATION, DP_FEC_READY) <= 0)
3191 3192
		drm_dbg_kms(&i915->drm,
			    "Failed to set FEC_READY in the sink\n");
3193 3194
}

3195 3196 3197 3198
static void intel_ddi_enable_fec(struct intel_encoder *encoder,
				 const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3199
	struct intel_dp *intel_dp;
3200 3201 3202 3203 3204
	u32 val;

	if (!crtc_state->fec_enable)
		return;

3205
	intel_dp = enc_to_intel_dp(encoder);
3206
	val = intel_de_read(dev_priv, intel_dp->regs.dp_tp_ctl);
3207
	val |= DP_TP_CTL_FEC_ENABLE;
3208
	intel_de_write(dev_priv, intel_dp->regs.dp_tp_ctl, val);
3209

3210
	if (intel_de_wait_for_set(dev_priv, intel_dp->regs.dp_tp_status,
3211
				  DP_TP_STATUS_FEC_ENABLE_LIVE, 1))
3212 3213
		drm_err(&dev_priv->drm,
			"Timed out waiting for FEC Enable Status\n");
3214 3215
}

A
Anusha Srivatsa 已提交
3216 3217 3218 3219
static void intel_ddi_disable_fec_state(struct intel_encoder *encoder,
					const struct intel_crtc_state *crtc_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3220
	struct intel_dp *intel_dp;
A
Anusha Srivatsa 已提交
3221 3222 3223 3224 3225
	u32 val;

	if (!crtc_state->fec_enable)
		return;

3226
	intel_dp = enc_to_intel_dp(encoder);
3227
	val = intel_de_read(dev_priv, intel_dp->regs.dp_tp_ctl);
A
Anusha Srivatsa 已提交
3228
	val &= ~DP_TP_CTL_FEC_ENABLE;
3229 3230
	intel_de_write(dev_priv, intel_dp->regs.dp_tp_ctl, val);
	intel_de_posting_read(dev_priv, intel_dp->regs.dp_tp_ctl);
A
Anusha Srivatsa 已提交
3231 3232
}

3233 3234
static void tgl_ddi_pre_enable_dp(struct intel_atomic_state *state,
				  struct intel_encoder *encoder,
3235 3236 3237
				  const struct intel_crtc_state *crtc_state,
				  const struct drm_connector_state *conn_state)
{
3238
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3239 3240
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3241
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3242 3243
	bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST);
	int level = intel_ddi_dp_level(intel_dp);
3244
	enum transcoder transcoder = crtc_state->cpu_transcoder;
3245 3246 3247 3248

	intel_dp_set_link_params(intel_dp, crtc_state->port_clock,
				 crtc_state->lane_count, is_mst);

3249 3250 3251
	intel_dp->regs.dp_tp_ctl = TGL_DP_TP_CTL(transcoder);
	intel_dp->regs.dp_tp_status = TGL_DP_TP_STATUS(transcoder);

3252 3253 3254 3255 3256 3257
	/*
	 * 1. Enable Power Wells
	 *
	 * This was handled at the beginning of intel_atomic_commit_tail(),
	 * before we called down into this function.
	 */
3258

3259
	/* 2. Enable Panel Power if PPS is required */
3260 3261 3262
	intel_edp_panel_on(intel_dp);

	/*
3263 3264 3265 3266
	 * 3. For non-TBT Type-C ports, set FIA lane count
	 * (DFLEXDPSP.DPX4TXLATC)
	 *
	 * This was done before tgl_ddi_pre_enable_dp by
3267
	 * hsw_crtc_enable()->intel_encoders_pre_pll_enable().
3268 3269
	 */

3270 3271 3272 3273
	/*
	 * 4. Enable the port PLL.
	 *
	 * The PLL enabling itself was already done before this function by
3274
	 * hsw_crtc_enable()->intel_enable_shared_dpll().  We need only
3275 3276
	 * configure the PLL to port mapping here.
	 */
3277 3278
	intel_ddi_clk_select(encoder, crtc_state);

3279
	/* 5. If IO power is controlled through PWR_WELL_CTL, Enable IO Power */
3280 3281 3282 3283 3284
	if (!intel_phy_is_tc(dev_priv, phy) ||
	    dig_port->tc_mode != TC_PORT_TBT_ALT)
		intel_display_power_get(dev_priv,
					dig_port->ddi_io_power_domain);

3285
	/* 6. Program DP_MODE */
3286
	icl_program_mg_dp_mode(dig_port, crtc_state);
3287 3288

	/*
3289 3290 3291 3292 3293 3294 3295 3296 3297 3298 3299 3300
	 * 7. The rest of the below are substeps under the bspec's "Enable and
	 * Train Display Port" step.  Note that steps that are specific to
	 * MST will be handled by intel_mst_pre_enable_dp() before/after it
	 * calls into this function.  Also intel_mst_pre_enable_dp() only calls
	 * us when active_mst_links==0, so any steps designated for "single
	 * stream or multi-stream master transcoder" can just be performed
	 * unconditionally here.
	 */

	/*
	 * 7.a Configure Transcoder Clock Select to direct the Port clock to the
	 * Transcoder.
3301
	 */
3302
	intel_ddi_enable_pipe_clock(encoder, crtc_state);
3303

3304 3305 3306 3307
	/*
	 * 7.b Configure TRANS_DDI_FUNC_CTL DDI Select, DDI Mode Select & MST
	 * Transport Select
	 */
3308
	intel_ddi_config_transcoder_func(encoder, crtc_state);
3309

3310 3311 3312 3313 3314 3315 3316 3317 3318
	/*
	 * 7.c Configure & enable DP_TP_CTL with link training pattern 1
	 * selected
	 *
	 * This will be handled by the intel_dp_start_link_train() farther
	 * down this function.
	 */

	/* 7.e Configure voltage swing and related IO settings */
3319
	tgl_ddi_vswing_sequence(encoder, crtc_state->port_clock, level,
3320 3321
				encoder->type);

3322 3323 3324 3325
	/*
	 * 7.f Combo PHY: Configure PORT_CL_DW10 Static Power Down to power up
	 * the used lanes of the DDI.
	 */
3326 3327 3328 3329 3330 3331 3332 3333 3334
	if (intel_phy_is_combo(dev_priv, phy)) {
		bool lane_reversal =
			dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL;

		intel_combo_phy_power_up_lanes(dev_priv, phy, false,
					       crtc_state->lane_count,
					       lane_reversal);
	}

3335 3336 3337 3338 3339 3340 3341 3342
	/*
	 * 7.g Configure and enable DDI_BUF_CTL
	 * 7.h Wait for DDI_BUF_CTL DDI Idle Status = 0b (Not Idle), timeout
	 *     after 500 us.
	 *
	 * We only configure what the register value will be here.  Actual
	 * enabling happens during link training farther down.
	 */
3343 3344 3345 3346 3347 3348 3349 3350 3351 3352 3353 3354
	intel_ddi_init_dp_buf_reg(encoder);

	if (!is_mst)
		intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);

	intel_dp_sink_set_decompression_state(intel_dp, crtc_state, true);
	/*
	 * DDI FEC: "anticipates enabling FEC encoding sets the FEC_READY bit
	 * in the FEC_CONFIGURATION register to 1 before initiating link
	 * training
	 */
	intel_dp_sink_set_fec_ready(intel_dp, crtc_state);
3355 3356 3357 3358 3359 3360 3361 3362

	/*
	 * 7.i Follow DisplayPort specification training sequence (see notes for
	 *     failure handling)
	 * 7.j If DisplayPort multi-stream - Set DP_TP_CTL link training to Idle
	 *     Pattern, wait for 5 idle patterns (DP_TP_STATUS Min_Idles_Sent)
	 *     (timeout after 800 us)
	 */
3363 3364
	intel_dp_start_link_train(intel_dp);

3365
	/* 7.k Set DP_TP_CTL link training to Normal */
3366 3367
	if (!is_trans_port_sync_mode(crtc_state))
		intel_dp_stop_link_train(intel_dp);
3368

3369
	/* 7.l Configure and enable FEC if needed */
3370 3371 3372 3373
	intel_ddi_enable_fec(encoder, crtc_state);
	intel_dsc_enable(encoder, crtc_state);
}

3374 3375
static void hsw_ddi_pre_enable_dp(struct intel_atomic_state *state,
				  struct intel_encoder *encoder,
3376 3377
				  const struct intel_crtc_state *crtc_state,
				  const struct drm_connector_state *conn_state)
3378
{
3379
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3380
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3381
	enum port port = encoder->port;
3382
	enum phy phy = intel_port_to_phy(dev_priv, port);
3383
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3384
	bool is_mst = intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST);
3385
	int level = intel_ddi_dp_level(intel_dp);
3386

3387
	if (INTEL_GEN(dev_priv) < 11)
3388 3389
		drm_WARN_ON(&dev_priv->drm,
			    is_mst && (port == PORT_A || port == PORT_E));
3390
	else
3391
		drm_WARN_ON(&dev_priv->drm, is_mst && port == PORT_A);
3392

3393 3394
	intel_dp_set_link_params(intel_dp, crtc_state->port_clock,
				 crtc_state->lane_count, is_mst);
3395 3396

	intel_edp_panel_on(intel_dp);
3397

3398
	intel_ddi_clk_select(encoder, crtc_state);
3399

3400
	if (!intel_phy_is_tc(dev_priv, phy) ||
3401 3402 3403
	    dig_port->tc_mode != TC_PORT_TBT_ALT)
		intel_display_power_get(dev_priv,
					dig_port->ddi_io_power_domain);
3404

3405
	icl_program_mg_dp_mode(dig_port, crtc_state);
P
Paulo Zanoni 已提交
3406

3407
	if (INTEL_GEN(dev_priv) >= 11)
3408 3409
		icl_ddi_vswing_sequence(encoder, crtc_state->port_clock,
					level, encoder->type);
3410
	else if (IS_CANNONLAKE(dev_priv))
3411
		cnl_ddi_vswing_sequence(encoder, level, encoder->type);
3412
	else if (IS_GEN9_LP(dev_priv))
3413
		bxt_ddi_vswing_sequence(encoder, level, encoder->type);
3414
	else
3415
		intel_prepare_dp_ddi_buffers(encoder, crtc_state);
3416

3417
	if (intel_phy_is_combo(dev_priv, phy)) {
3418 3419 3420
		bool lane_reversal =
			dig_port->saved_port_bits & DDI_BUF_PORT_REVERSAL;

3421
		intel_combo_phy_power_up_lanes(dev_priv, phy, false,
3422 3423 3424 3425
					       crtc_state->lane_count,
					       lane_reversal);
	}

3426
	intel_ddi_init_dp_buf_reg(encoder);
3427 3428
	if (!is_mst)
		intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
3429 3430
	intel_dp_sink_set_decompression_state(intel_dp, crtc_state,
					      true);
3431
	intel_dp_sink_set_fec_ready(intel_dp, crtc_state);
3432
	intel_dp_start_link_train(intel_dp);
3433 3434
	if ((port != PORT_A || INTEL_GEN(dev_priv) >= 9) &&
	    !is_trans_port_sync_mode(crtc_state))
3435
		intel_dp_stop_link_train(intel_dp);
3436

3437 3438
	intel_ddi_enable_fec(encoder, crtc_state);

3439
	if (!is_mst)
3440
		intel_ddi_enable_pipe_clock(encoder, crtc_state);
3441 3442

	intel_dsc_enable(encoder, crtc_state);
3443
}
3444

3445 3446
static void intel_ddi_pre_enable_dp(struct intel_atomic_state *state,
				    struct intel_encoder *encoder,
3447 3448 3449 3450 3451 3452
				    const struct intel_crtc_state *crtc_state,
				    const struct drm_connector_state *conn_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);

	if (INTEL_GEN(dev_priv) >= 12)
3453
		tgl_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state);
3454
	else
3455
		hsw_ddi_pre_enable_dp(state, encoder, crtc_state, conn_state);
3456

3457 3458 3459
	/* MST will call a setting of MSA after an allocating of Virtual Channel
	 * from MST encoder pre_enable callback.
	 */
3460
	if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_DP_MST)) {
3461
		intel_ddi_set_dp_msa(crtc_state, conn_state);
3462

3463 3464
		intel_dp_set_m_n(crtc_state, M1_N1);
	}
3465 3466
}

3467 3468
static void intel_ddi_pre_enable_hdmi(struct intel_atomic_state *state,
				      struct intel_encoder *encoder,
3469
				      const struct intel_crtc_state *crtc_state,
3470
				      const struct drm_connector_state *conn_state)
3471
{
3472 3473
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
	struct intel_hdmi *intel_hdmi = &dig_port->hdmi;
3474
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3475
	int level = intel_ddi_hdmi_level(encoder);
3476

3477
	intel_dp_dual_mode_set_tmds_output(intel_hdmi, true);
3478
	intel_ddi_clk_select(encoder, crtc_state);
3479 3480 3481

	intel_display_power_get(dev_priv, dig_port->ddi_io_power_domain);

3482
	icl_program_mg_dp_mode(dig_port, crtc_state);
3483

3484 3485 3486 3487
	if (INTEL_GEN(dev_priv) >= 12)
		tgl_ddi_vswing_sequence(encoder, crtc_state->port_clock,
					level, INTEL_OUTPUT_HDMI);
	else if (INTEL_GEN(dev_priv) == 11)
3488 3489
		icl_ddi_vswing_sequence(encoder, crtc_state->port_clock,
					level, INTEL_OUTPUT_HDMI);
3490
	else if (IS_CANNONLAKE(dev_priv))
3491
		cnl_ddi_vswing_sequence(encoder, level, INTEL_OUTPUT_HDMI);
3492
	else if (IS_GEN9_LP(dev_priv))
3493
		bxt_ddi_vswing_sequence(encoder, level, INTEL_OUTPUT_HDMI);
3494
	else
3495
		intel_prepare_hdmi_ddi_buffers(encoder, level);
3496 3497

	if (IS_GEN9_BC(dev_priv))
3498
		skl_ddi_set_iboost(encoder, level, INTEL_OUTPUT_HDMI);
3499

3500
	intel_ddi_enable_pipe_clock(encoder, crtc_state);
3501

3502 3503 3504
	dig_port->set_infoframes(encoder,
				 crtc_state->has_infoframe,
				 crtc_state, conn_state);
3505
}
3506

3507 3508
static void intel_ddi_pre_enable(struct intel_atomic_state *state,
				 struct intel_encoder *encoder,
3509
				 const struct intel_crtc_state *crtc_state,
3510
				 const struct drm_connector_state *conn_state)
3511
{
3512
	struct intel_crtc *crtc = to_intel_crtc(crtc_state->uapi.crtc);
3513 3514
	struct drm_i915_private *dev_priv = to_i915(crtc->base.dev);
	enum pipe pipe = crtc->pipe;
3515

3516 3517 3518 3519 3520 3521 3522 3523 3524 3525 3526 3527 3528
	/*
	 * When called from DP MST code:
	 * - conn_state will be NULL
	 * - encoder will be the main encoder (ie. mst->primary)
	 * - the main connector associated with this port
	 *   won't be active or linked to a crtc
	 * - crtc_state will be the state of the first stream to
	 *   be activated on this port, and it may not be the same
	 *   stream that will be deactivated last, but each stream
	 *   should have a state that is identical when it comes to
	 *   the DP link parameteres
	 */

3529
	drm_WARN_ON(&dev_priv->drm, crtc_state->has_pch_encoder);
3530

3531 3532 3533
	if (INTEL_GEN(dev_priv) >= 11)
		icl_map_plls_to_ports(encoder, crtc_state);

3534 3535
	intel_set_cpu_fifo_underrun_reporting(dev_priv, pipe, true);

3536
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI)) {
3537 3538
		intel_ddi_pre_enable_hdmi(state, encoder, crtc_state,
					  conn_state);
3539 3540
	} else {
		struct intel_lspcon *lspcon =
3541
				enc_to_intel_lspcon(encoder);
3542

3543 3544
		intel_ddi_pre_enable_dp(state, encoder, crtc_state,
					conn_state);
3545 3546
		if (lspcon->active) {
			struct intel_digital_port *dig_port =
3547
					enc_to_dig_port(encoder);
3548 3549 3550 3551 3552 3553

			dig_port->set_infoframes(encoder,
						 crtc_state->has_infoframe,
						 crtc_state, conn_state);
		}
	}
3554 3555
}

A
Anusha Srivatsa 已提交
3556 3557
static void intel_disable_ddi_buf(struct intel_encoder *encoder,
				  const struct intel_crtc_state *crtc_state)
3558 3559
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3560
	enum port port = encoder->port;
3561 3562 3563
	bool wait = false;
	u32 val;

3564
	val = intel_de_read(dev_priv, DDI_BUF_CTL(port));
3565 3566
	if (val & DDI_BUF_CTL_ENABLE) {
		val &= ~DDI_BUF_CTL_ENABLE;
3567
		intel_de_write(dev_priv, DDI_BUF_CTL(port), val);
3568 3569 3570
		wait = true;
	}

3571
	if (intel_crtc_has_dp_encoder(crtc_state)) {
3572
		struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3573

3574
		val = intel_de_read(dev_priv, intel_dp->regs.dp_tp_ctl);
3575 3576
		val &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK);
		val |= DP_TP_CTL_LINK_TRAIN_PAT1;
3577
		intel_de_write(dev_priv, intel_dp->regs.dp_tp_ctl, val);
3578
	}
3579

A
Anusha Srivatsa 已提交
3580 3581 3582
	/* Disable FEC in DP Sink */
	intel_ddi_disable_fec_state(encoder, crtc_state);

3583 3584 3585 3586
	if (wait)
		intel_wait_ddi_buf_idle(dev_priv, port);
}

3587 3588
static void intel_ddi_post_disable_dp(struct intel_atomic_state *state,
				      struct intel_encoder *encoder,
3589 3590
				      const struct intel_crtc_state *old_crtc_state,
				      const struct drm_connector_state *old_conn_state)
3591
{
3592
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3593
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3594
	struct intel_dp *intel_dp = &dig_port->dp;
3595 3596
	bool is_mst = intel_crtc_has_type(old_crtc_state,
					  INTEL_OUTPUT_DP_MST);
3597
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
3598

3599 3600 3601
	if (!is_mst)
		intel_dp_set_infoframes(encoder, false,
					old_crtc_state, old_conn_state);
3602

3603 3604 3605 3606 3607 3608
	/*
	 * Power down sink before disabling the port, otherwise we end
	 * up getting interrupts from the sink on detecting link loss.
	 */
	intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_OFF);

3609 3610 3611 3612 3613
	if (INTEL_GEN(dev_priv) >= 12) {
		if (is_mst) {
			enum transcoder cpu_transcoder = old_crtc_state->cpu_transcoder;
			u32 val;

3614 3615
			val = intel_de_read(dev_priv,
					    TRANS_DDI_FUNC_CTL(cpu_transcoder));
3616 3617
			val &= ~(TGL_TRANS_DDI_PORT_MASK |
				 TRANS_DDI_MODE_SELECT_MASK);
3618 3619 3620
			intel_de_write(dev_priv,
				       TRANS_DDI_FUNC_CTL(cpu_transcoder),
				       val);
3621 3622 3623 3624 3625
		}
	} else {
		if (!is_mst)
			intel_ddi_disable_pipe_clock(old_crtc_state);
	}
3626

A
Anusha Srivatsa 已提交
3627
	intel_disable_ddi_buf(encoder, old_crtc_state);
3628

3629 3630 3631 3632 3633 3634 3635 3636
	/*
	 * From TGL spec: "If single stream or multi-stream master transcoder:
	 * Configure Transcoder Clock select to direct no clock to the
	 * transcoder"
	 */
	if (INTEL_GEN(dev_priv) >= 12)
		intel_ddi_disable_pipe_clock(old_crtc_state);

3637 3638
	intel_edp_panel_vdd_on(intel_dp);
	intel_edp_panel_off(intel_dp);
3639

3640
	if (!intel_phy_is_tc(dev_priv, phy) ||
3641 3642 3643
	    dig_port->tc_mode != TC_PORT_TBT_ALT)
		intel_display_power_put_unchecked(dev_priv,
						  dig_port->ddi_io_power_domain);
3644

3645 3646
	intel_ddi_clk_disable(encoder);
}
3647

3648 3649
static void intel_ddi_post_disable_hdmi(struct intel_atomic_state *state,
					struct intel_encoder *encoder,
3650 3651 3652 3653
					const struct intel_crtc_state *old_crtc_state,
					const struct drm_connector_state *old_conn_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3654
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3655
	struct intel_hdmi *intel_hdmi = &dig_port->hdmi;
3656

3657
	dig_port->set_infoframes(encoder, false,
3658 3659
				 old_crtc_state, old_conn_state);

3660 3661
	intel_ddi_disable_pipe_clock(old_crtc_state);

A
Anusha Srivatsa 已提交
3662
	intel_disable_ddi_buf(encoder, old_crtc_state);
3663

3664 3665
	intel_display_power_put_unchecked(dev_priv,
					  dig_port->ddi_io_power_domain);
3666

3667 3668 3669 3670 3671
	intel_ddi_clk_disable(encoder);

	intel_dp_dual_mode_set_tmds_output(intel_hdmi, false);
}

3672 3673
static void intel_ddi_post_disable(struct intel_atomic_state *state,
				   struct intel_encoder *encoder,
3674 3675 3676
				   const struct intel_crtc_state *old_crtc_state,
				   const struct drm_connector_state *old_conn_state)
{
3677
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3678
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3679 3680
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
	bool is_tc_port = intel_phy_is_tc(dev_priv, phy);
3681

3682 3683
	if (!intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_DP_MST)) {
		intel_crtc_vblank_off(old_crtc_state);
3684

3685
		intel_disable_pipe(old_crtc_state);
3686

3687
		intel_ddi_disable_transcoder_func(old_crtc_state);
3688

3689
		intel_dsc_disable(old_crtc_state);
3690

3691 3692 3693 3694 3695
		if (INTEL_GEN(dev_priv) >= 9)
			skl_scaler_disable(old_crtc_state);
		else
			ilk_pfit_disable(old_crtc_state);
	}
3696

3697
	/*
3698 3699 3700 3701 3702 3703 3704 3705 3706 3707
	 * When called from DP MST code:
	 * - old_conn_state will be NULL
	 * - encoder will be the main encoder (ie. mst->primary)
	 * - the main connector associated with this port
	 *   won't be active or linked to a crtc
	 * - old_crtc_state will be the state of the last stream to
	 *   be deactivated on this port, and it may not be the same
	 *   stream that was activated last, but each stream
	 *   should have a state that is identical when it comes to
	 *   the DP link parameteres
3708
	 */
3709 3710

	if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI))
3711 3712
		intel_ddi_post_disable_hdmi(state, encoder, old_crtc_state,
					    old_conn_state);
3713
	else
3714 3715
		intel_ddi_post_disable_dp(state, encoder, old_crtc_state,
					  old_conn_state);
3716 3717 3718

	if (INTEL_GEN(dev_priv) >= 11)
		icl_unmap_plls_to_ports(encoder);
3719 3720 3721 3722 3723 3724 3725

	if (intel_crtc_has_dp_encoder(old_crtc_state) || is_tc_port)
		intel_display_power_put_unchecked(dev_priv,
						  intel_ddi_main_link_aux_domain(dig_port));

	if (is_tc_port)
		intel_tc_port_put_link(dig_port);
3726 3727
}

3728 3729
void intel_ddi_fdi_post_disable(struct intel_atomic_state *state,
				struct intel_encoder *encoder,
3730 3731
				const struct intel_crtc_state *old_crtc_state,
				const struct drm_connector_state *old_conn_state)
3732
{
3733
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3734
	u32 val;
3735 3736 3737 3738 3739 3740 3741

	/*
	 * Bspec lists this as both step 13 (before DDI_BUF_CTL disable)
	 * and step 18 (after clearing PORT_CLK_SEL). Based on a BUN,
	 * step 13 is the correct place for it. Step 18 is where it was
	 * originally before the BUN.
	 */
3742
	val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A));
3743
	val &= ~FDI_RX_ENABLE;
3744
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val);
3745

A
Anusha Srivatsa 已提交
3746
	intel_disable_ddi_buf(encoder, old_crtc_state);
3747
	intel_ddi_clk_disable(encoder);
3748

3749
	val = intel_de_read(dev_priv, FDI_RX_MISC(PIPE_A));
3750 3751
	val &= ~(FDI_RX_PWRDN_LANE1_MASK | FDI_RX_PWRDN_LANE0_MASK);
	val |= FDI_RX_PWRDN_LANE1_VAL(2) | FDI_RX_PWRDN_LANE0_VAL(2);
3752
	intel_de_write(dev_priv, FDI_RX_MISC(PIPE_A), val);
3753

3754
	val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A));
3755
	val &= ~FDI_PCDCLK;
3756
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val);
3757

3758
	val = intel_de_read(dev_priv, FDI_RX_CTL(PIPE_A));
3759
	val &= ~FDI_RX_PLL_ENABLE;
3760
	intel_de_write(dev_priv, FDI_RX_CTL(PIPE_A), val);
3761 3762
}

3763 3764 3765 3766 3767 3768 3769 3770 3771 3772 3773 3774 3775 3776 3777 3778 3779 3780 3781 3782 3783 3784 3785 3786 3787 3788 3789 3790 3791 3792 3793 3794 3795 3796 3797
static void trans_port_sync_stop_link_train(struct intel_atomic_state *state,
					    struct intel_encoder *encoder,
					    const struct intel_crtc_state *crtc_state)
{
	const struct drm_connector_state *conn_state;
	struct drm_connector *conn;
	int i;

	if (!crtc_state->sync_mode_slaves_mask)
		return;

	for_each_new_connector_in_state(&state->base, conn, conn_state, i) {
		struct intel_encoder *slave_encoder =
			to_intel_encoder(conn_state->best_encoder);
		struct intel_crtc *slave_crtc = to_intel_crtc(conn_state->crtc);
		const struct intel_crtc_state *slave_crtc_state;

		if (!slave_crtc)
			continue;

		slave_crtc_state =
			intel_atomic_get_new_crtc_state(state, slave_crtc);

		if (slave_crtc_state->master_transcoder !=
		    crtc_state->cpu_transcoder)
			continue;

		intel_dp_stop_link_train(enc_to_intel_dp(slave_encoder));
	}

	usleep_range(200, 400);

	intel_dp_stop_link_train(enc_to_intel_dp(encoder));
}

3798 3799
static void intel_enable_ddi_dp(struct intel_atomic_state *state,
				struct intel_encoder *encoder,
3800 3801
				const struct intel_crtc_state *crtc_state,
				const struct drm_connector_state *conn_state)
3802
{
3803
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3804
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3805
	enum port port = encoder->port;
3806

3807 3808
	if (port == PORT_A && INTEL_GEN(dev_priv) < 9)
		intel_dp_stop_link_train(intel_dp);
3809

3810
	intel_edp_backlight_on(crtc_state, conn_state);
3811
	intel_psr_enable(intel_dp, crtc_state, conn_state);
3812
	intel_dp_set_infoframes(encoder, true, crtc_state, conn_state);
3813
	intel_edp_drrs_enable(intel_dp, crtc_state);
3814

3815 3816
	if (crtc_state->has_audio)
		intel_audio_codec_enable(encoder, crtc_state, conn_state);
3817 3818

	trans_port_sync_stop_link_train(state, encoder, crtc_state);
3819 3820
}

3821 3822 3823 3824
static i915_reg_t
gen9_chicken_trans_reg_by_port(struct drm_i915_private *dev_priv,
			       enum port port)
{
3825 3826 3827 3828 3829 3830
	static const enum transcoder trans[] = {
		[PORT_A] = TRANSCODER_EDP,
		[PORT_B] = TRANSCODER_A,
		[PORT_C] = TRANSCODER_B,
		[PORT_D] = TRANSCODER_C,
		[PORT_E] = TRANSCODER_A,
3831 3832
	};

3833
	drm_WARN_ON(&dev_priv->drm, INTEL_GEN(dev_priv) < 9);
3834

3835
	if (drm_WARN_ON(&dev_priv->drm, port < PORT_A || port > PORT_E))
3836 3837
		port = PORT_A;

3838
	return CHICKEN_TRANS(trans[port]);
3839 3840
}

3841 3842
static void intel_enable_ddi_hdmi(struct intel_atomic_state *state,
				  struct intel_encoder *encoder,
3843 3844 3845 3846
				  const struct intel_crtc_state *crtc_state,
				  const struct drm_connector_state *conn_state)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
3847
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
3848
	struct drm_connector *connector = conn_state->connector;
3849
	enum port port = encoder->port;
3850

3851 3852 3853
	if (!intel_hdmi_handle_sink_scrambling(encoder, connector,
					       crtc_state->hdmi_high_tmds_clock_ratio,
					       crtc_state->hdmi_scrambling))
3854 3855 3856
		drm_dbg_kms(&dev_priv->drm,
			    "[CONNECTOR:%d:%s] Failed to configure sink scrambling/TMDS bit clock ratio\n",
			    connector->base.id, connector->name);
3857

3858 3859 3860 3861 3862 3863 3864 3865
	/* Display WA #1143: skl,kbl,cfl */
	if (IS_GEN9_BC(dev_priv)) {
		/*
		 * For some reason these chicken bits have been
		 * stuffed into a transcoder register, event though
		 * the bits affect a specific DDI port rather than
		 * a specific transcoder.
		 */
3866
		i915_reg_t reg = gen9_chicken_trans_reg_by_port(dev_priv, port);
3867 3868
		u32 val;

3869
		val = intel_de_read(dev_priv, reg);
3870 3871 3872 3873 3874 3875 3876 3877

		if (port == PORT_E)
			val |= DDIE_TRAINING_OVERRIDE_ENABLE |
				DDIE_TRAINING_OVERRIDE_VALUE;
		else
			val |= DDI_TRAINING_OVERRIDE_ENABLE |
				DDI_TRAINING_OVERRIDE_VALUE;

3878 3879
		intel_de_write(dev_priv, reg, val);
		intel_de_posting_read(dev_priv, reg);
3880 3881 3882 3883 3884 3885 3886 3887 3888 3889

		udelay(1);

		if (port == PORT_E)
			val &= ~(DDIE_TRAINING_OVERRIDE_ENABLE |
				 DDIE_TRAINING_OVERRIDE_VALUE);
		else
			val &= ~(DDI_TRAINING_OVERRIDE_ENABLE |
				 DDI_TRAINING_OVERRIDE_VALUE);

3890
		intel_de_write(dev_priv, reg, val);
3891 3892
	}

3893 3894 3895 3896
	/* In HDMI/DVI mode, the port width, and swing/emphasis values
	 * are ignored so nothing special needs to be done besides
	 * enabling the port.
	 */
3897 3898
	intel_de_write(dev_priv, DDI_BUF_CTL(port),
		       dig_port->saved_port_bits | DDI_BUF_CTL_ENABLE);
3899

3900 3901 3902 3903
	if (crtc_state->has_audio)
		intel_audio_codec_enable(encoder, crtc_state, conn_state);
}

3904 3905
static void intel_enable_ddi(struct intel_atomic_state *state,
			     struct intel_encoder *encoder,
3906 3907 3908
			     const struct intel_crtc_state *crtc_state,
			     const struct drm_connector_state *conn_state)
{
3909
	drm_WARN_ON(state->base.dev, crtc_state->has_pch_encoder);
3910

3911
	intel_ddi_enable_transcoder_func(encoder, crtc_state);
3912

3913 3914 3915 3916
	intel_enable_pipe(crtc_state);

	intel_crtc_vblank_on(crtc_state);

3917
	if (intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
3918
		intel_enable_ddi_hdmi(state, encoder, crtc_state, conn_state);
3919
	else
3920
		intel_enable_ddi_dp(state, encoder, crtc_state, conn_state);
3921 3922 3923 3924

	/* Enable hdcp if it's desired */
	if (conn_state->content_protection ==
	    DRM_MODE_CONTENT_PROTECTION_DESIRED)
3925
		intel_hdcp_enable(to_intel_connector(conn_state->connector),
3926
				  crtc_state->cpu_transcoder,
3927
				  (u8)conn_state->hdcp_content_type);
3928 3929
}

3930 3931
static void intel_disable_ddi_dp(struct intel_atomic_state *state,
				 struct intel_encoder *encoder,
3932 3933
				 const struct intel_crtc_state *old_crtc_state,
				 const struct drm_connector_state *old_conn_state)
3934
{
3935
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3936

3937 3938
	intel_dp->link_trained = false;

3939
	if (old_crtc_state->has_audio)
3940 3941
		intel_audio_codec_disable(encoder,
					  old_crtc_state, old_conn_state);
3942

3943 3944 3945
	intel_edp_drrs_disable(intel_dp, old_crtc_state);
	intel_psr_disable(intel_dp, old_crtc_state);
	intel_edp_backlight_off(old_conn_state);
3946 3947 3948
	/* Disable the decompression in DP Sink */
	intel_dp_sink_set_decompression_state(intel_dp, old_crtc_state,
					      false);
3949
}
S
Shashank Sharma 已提交
3950

3951 3952
static void intel_disable_ddi_hdmi(struct intel_atomic_state *state,
				   struct intel_encoder *encoder,
3953 3954 3955
				   const struct intel_crtc_state *old_crtc_state,
				   const struct drm_connector_state *old_conn_state)
{
3956
	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
3957 3958
	struct drm_connector *connector = old_conn_state->connector;

3959
	if (old_crtc_state->has_audio)
3960 3961
		intel_audio_codec_disable(encoder,
					  old_crtc_state, old_conn_state);
3962

3963 3964
	if (!intel_hdmi_handle_sink_scrambling(encoder, connector,
					       false, false))
3965 3966 3967
		drm_dbg_kms(&i915->drm,
			    "[CONNECTOR:%d:%s] Failed to reset sink scrambling/TMDS bit clock ratio\n",
			    connector->base.id, connector->name);
3968 3969
}

3970 3971
static void intel_disable_ddi(struct intel_atomic_state *state,
			      struct intel_encoder *encoder,
3972 3973 3974
			      const struct intel_crtc_state *old_crtc_state,
			      const struct drm_connector_state *old_conn_state)
{
3975 3976
	intel_hdcp_disable(to_intel_connector(old_conn_state->connector));

3977
	if (intel_crtc_has_type(old_crtc_state, INTEL_OUTPUT_HDMI))
3978 3979
		intel_disable_ddi_hdmi(state, encoder, old_crtc_state,
				       old_conn_state);
3980
	else
3981 3982
		intel_disable_ddi_dp(state, encoder, old_crtc_state,
				     old_conn_state);
3983
}
P
Paulo Zanoni 已提交
3984

3985 3986
static void intel_ddi_update_pipe_dp(struct intel_atomic_state *state,
				     struct intel_encoder *encoder,
3987 3988 3989
				     const struct intel_crtc_state *crtc_state,
				     const struct drm_connector_state *conn_state)
{
3990
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
3991

3992
	intel_ddi_set_dp_msa(crtc_state, conn_state);
3993

3994
	intel_psr_update(intel_dp, crtc_state, conn_state);
3995
	intel_dp_set_infoframes(encoder, true, crtc_state, conn_state);
3996
	intel_edp_drrs_enable(intel_dp, crtc_state);
3997

3998
	intel_panel_update_backlight(state, encoder, crtc_state, conn_state);
3999 4000
}

4001 4002
static void intel_ddi_update_pipe(struct intel_atomic_state *state,
				  struct intel_encoder *encoder,
4003 4004 4005
				  const struct intel_crtc_state *crtc_state,
				  const struct drm_connector_state *conn_state)
{
4006

4007
	if (!intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI))
4008 4009
		intel_ddi_update_pipe_dp(state, encoder, crtc_state,
					 conn_state);
4010

4011
	intel_hdcp_update_pipe(state, encoder, crtc_state, conn_state);
4012 4013
}

4014 4015 4016 4017 4018 4019 4020 4021 4022
static void
intel_ddi_update_prepare(struct intel_atomic_state *state,
			 struct intel_encoder *encoder,
			 struct intel_crtc *crtc)
{
	struct intel_crtc_state *crtc_state =
		crtc ? intel_atomic_get_new_crtc_state(state, crtc) : NULL;
	int required_lanes = crtc_state ? crtc_state->lane_count : 1;

4023
	drm_WARN_ON(state->base.dev, crtc && crtc->active);
4024

4025 4026
	intel_tc_port_get_link(enc_to_dig_port(encoder),
		               required_lanes);
4027
	if (crtc_state && crtc_state->hw.active)
4028 4029 4030 4031 4032 4033 4034 4035
		intel_update_active_dpll(state, crtc, encoder);
}

static void
intel_ddi_update_complete(struct intel_atomic_state *state,
			  struct intel_encoder *encoder,
			  struct intel_crtc *crtc)
{
4036
	intel_tc_port_put_link(enc_to_dig_port(encoder));
4037 4038
}

I
Imre Deak 已提交
4039
static void
4040 4041
intel_ddi_pre_pll_enable(struct intel_atomic_state *state,
			 struct intel_encoder *encoder,
I
Imre Deak 已提交
4042 4043
			 const struct intel_crtc_state *crtc_state,
			 const struct drm_connector_state *conn_state)
4044
{
I
Imre Deak 已提交
4045
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4046
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4047 4048
	enum phy phy = intel_port_to_phy(dev_priv, encoder->port);
	bool is_tc_port = intel_phy_is_tc(dev_priv, phy);
I
Imre Deak 已提交
4049

4050 4051 4052 4053
	if (is_tc_port)
		intel_tc_port_get_link(dig_port, crtc_state->lane_count);

	if (intel_crtc_has_dp_encoder(crtc_state) || is_tc_port)
I
Imre Deak 已提交
4054 4055 4056
		intel_display_power_get(dev_priv,
					intel_ddi_main_link_aux_domain(dig_port));

4057 4058 4059 4060 4061 4062 4063
	if (is_tc_port && dig_port->tc_mode != TC_PORT_TBT_ALT)
		/*
		 * Program the lane count for static/dynamic connections on
		 * Type-C ports.  Skip this step for TBT.
		 */
		intel_tc_port_set_fia_lane_count(dig_port, crtc_state->lane_count);
	else if (IS_GEN9_LP(dev_priv))
I
Imre Deak 已提交
4064 4065 4066 4067
		bxt_ddi_phy_set_lane_optim_mask(encoder,
						crtc_state->lane_lat_optim_mask);
}

4068
static void intel_ddi_prepare_link_retrain(struct intel_dp *intel_dp)
4069
{
4070 4071 4072
	struct intel_digital_port *dig_port = dp_to_dig_port(intel_dp);
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
	enum port port = dig_port->base.port;
4073
	u32 dp_tp_ctl, ddi_buf_ctl;
4074
	bool wait = false;
4075

4076
	dp_tp_ctl = intel_de_read(dev_priv, intel_dp->regs.dp_tp_ctl);
4077 4078

	if (dp_tp_ctl & DP_TP_CTL_ENABLE) {
4079
		ddi_buf_ctl = intel_de_read(dev_priv, DDI_BUF_CTL(port));
4080
		if (ddi_buf_ctl & DDI_BUF_CTL_ENABLE) {
4081 4082
			intel_de_write(dev_priv, DDI_BUF_CTL(port),
				       ddi_buf_ctl & ~DDI_BUF_CTL_ENABLE);
4083 4084 4085
			wait = true;
		}

4086 4087
		dp_tp_ctl &= ~(DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_MASK);
		dp_tp_ctl |= DP_TP_CTL_LINK_TRAIN_PAT1;
4088 4089
		intel_de_write(dev_priv, intel_dp->regs.dp_tp_ctl, dp_tp_ctl);
		intel_de_posting_read(dev_priv, intel_dp->regs.dp_tp_ctl);
4090 4091 4092 4093 4094

		if (wait)
			intel_wait_ddi_buf_idle(dev_priv, port);
	}

4095
	dp_tp_ctl = DP_TP_CTL_ENABLE | DP_TP_CTL_LINK_TRAIN_PAT1;
4096
	if (intel_dp->link_mst)
4097
		dp_tp_ctl |= DP_TP_CTL_MODE_MST;
4098
	else {
4099
		dp_tp_ctl |= DP_TP_CTL_MODE_SST;
4100
		if (drm_dp_enhanced_frame_cap(intel_dp->dpcd))
4101
			dp_tp_ctl |= DP_TP_CTL_ENHANCED_FRAME_ENABLE;
4102
	}
4103 4104
	intel_de_write(dev_priv, intel_dp->regs.dp_tp_ctl, dp_tp_ctl);
	intel_de_posting_read(dev_priv, intel_dp->regs.dp_tp_ctl);
4105 4106

	intel_dp->DP |= DDI_BUF_CTL_ENABLE;
4107 4108
	intel_de_write(dev_priv, DDI_BUF_CTL(port), intel_dp->DP);
	intel_de_posting_read(dev_priv, DDI_BUF_CTL(port));
4109

4110
	intel_wait_ddi_buf_active(dev_priv, port);
4111
}
P
Paulo Zanoni 已提交
4112

4113 4114 4115 4116 4117 4118 4119 4120 4121 4122 4123 4124 4125 4126 4127 4128 4129 4130 4131 4132 4133 4134 4135 4136 4137 4138 4139 4140 4141 4142 4143
static void intel_ddi_set_link_train(struct intel_dp *intel_dp,
				     u8 dp_train_pat)
{
	struct drm_i915_private *dev_priv = dp_to_i915(intel_dp);
	u8 train_pat_mask = drm_dp_training_pattern_mask(intel_dp->dpcd);
	u32 temp;

	temp = intel_de_read(dev_priv, intel_dp->regs.dp_tp_ctl);

	temp &= ~DP_TP_CTL_LINK_TRAIN_MASK;
	switch (dp_train_pat & train_pat_mask) {
	case DP_TRAINING_PATTERN_DISABLE:
		temp |= DP_TP_CTL_LINK_TRAIN_NORMAL;
		break;
	case DP_TRAINING_PATTERN_1:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT1;
		break;
	case DP_TRAINING_PATTERN_2:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT2;
		break;
	case DP_TRAINING_PATTERN_3:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT3;
		break;
	case DP_TRAINING_PATTERN_4:
		temp |= DP_TP_CTL_LINK_TRAIN_PAT4;
		break;
	}

	intel_de_write(dev_priv, intel_dp->regs.dp_tp_ctl, temp);
}

4144 4145 4146 4147 4148 4149 4150 4151 4152 4153 4154 4155 4156 4157 4158 4159 4160 4161 4162 4163 4164 4165 4166 4167 4168 4169 4170 4171
static void intel_ddi_set_idle_link_train(struct intel_dp *intel_dp)
{
	struct intel_encoder *encoder = &dp_to_dig_port(intel_dp)->base;
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
	enum port port = encoder->port;
	u32 val;

	val = intel_de_read(dev_priv, intel_dp->regs.dp_tp_ctl);
	val &= ~DP_TP_CTL_LINK_TRAIN_MASK;
	val |= DP_TP_CTL_LINK_TRAIN_IDLE;
	intel_de_write(dev_priv, intel_dp->regs.dp_tp_ctl, val);

	/*
	 * Until TGL on PORT_A we can have only eDP in SST mode. There the only
	 * reason we need to set idle transmission mode is to work around a HW
	 * issue where we enable the pipe while not in idle link-training mode.
	 * In this case there is requirement to wait for a minimum number of
	 * idle patterns to be sent.
	 */
	if (port == PORT_A && INTEL_GEN(dev_priv) < 12)
		return;

	if (intel_de_wait_for_set(dev_priv, intel_dp->regs.dp_tp_status,
				  DP_TP_STATUS_IDLE_DONE, 1))
		drm_err(&dev_priv->drm,
			"Timed out waiting for DP idle patterns\n");
}

4172 4173
static bool intel_ddi_is_audio_enabled(struct drm_i915_private *dev_priv,
				       enum transcoder cpu_transcoder)
4174
{
4175 4176
	if (cpu_transcoder == TRANSCODER_EDP)
		return false;
4177

4178 4179 4180
	if (!intel_display_power_is_enabled(dev_priv, POWER_DOMAIN_AUDIO))
		return false;

4181
	return intel_de_read(dev_priv, HSW_AUD_PIN_ELD_CP_VLD) &
4182
		AUDIO_OUTPUT_ENABLE(cpu_transcoder);
4183 4184
}

4185 4186 4187
void intel_ddi_compute_min_voltage_level(struct drm_i915_private *dev_priv,
					 struct intel_crtc_state *crtc_state)
{
4188 4189 4190
	if (INTEL_GEN(dev_priv) >= 12 && crtc_state->port_clock > 594000)
		crtc_state->min_voltage_level = 2;
	else if (IS_ELKHARTLAKE(dev_priv) && crtc_state->port_clock > 594000)
4191 4192
		crtc_state->min_voltage_level = 3;
	else if (INTEL_GEN(dev_priv) >= 11 && crtc_state->port_clock > 594000)
4193
		crtc_state->min_voltage_level = 1;
4194 4195
	else if (IS_CANNONLAKE(dev_priv) && crtc_state->port_clock > 594000)
		crtc_state->min_voltage_level = 2;
4196 4197
}

4198 4199
static enum transcoder bdw_transcoder_master_readout(struct drm_i915_private *dev_priv,
						     enum transcoder cpu_transcoder)
4200
{
4201 4202 4203 4204
	u32 master_select;

	if (INTEL_GEN(dev_priv) >= 11) {
		u32 ctl2 = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL2(cpu_transcoder));
4205

4206 4207
		if ((ctl2 & PORT_SYNC_MODE_ENABLE) == 0)
			return INVALID_TRANSCODER;
4208

4209 4210 4211
		master_select = REG_FIELD_GET(PORT_SYNC_MODE_MASTER_SELECT_MASK, ctl2);
	} else {
		u32 ctl = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
4212

4213 4214 4215 4216 4217
		if ((ctl & TRANS_DDI_PORT_SYNC_ENABLE) == 0)
			return INVALID_TRANSCODER;

		master_select = REG_FIELD_GET(TRANS_DDI_PORT_SYNC_MASTER_SELECT_MASK, ctl);
	}
4218 4219 4220 4221 4222 4223 4224

	if (master_select == 0)
		return TRANSCODER_EDP;
	else
		return master_select - 1;
}

4225
static void bdw_get_trans_port_sync_config(struct intel_crtc_state *crtc_state)
4226 4227 4228 4229 4230 4231 4232
{
	struct drm_i915_private *dev_priv = to_i915(crtc_state->uapi.crtc->dev);
	u32 transcoders = BIT(TRANSCODER_A) | BIT(TRANSCODER_B) |
		BIT(TRANSCODER_C) | BIT(TRANSCODER_D);
	enum transcoder cpu_transcoder;

	crtc_state->master_transcoder =
4233
		bdw_transcoder_master_readout(dev_priv, crtc_state->cpu_transcoder);
4234 4235 4236 4237 4238 4239 4240 4241 4242 4243 4244 4245

	for_each_cpu_transcoder_masked(dev_priv, cpu_transcoder, transcoders) {
		enum intel_display_power_domain power_domain;
		intel_wakeref_t trans_wakeref;

		power_domain = POWER_DOMAIN_TRANSCODER(cpu_transcoder);
		trans_wakeref = intel_display_power_get_if_enabled(dev_priv,
								   power_domain);

		if (!trans_wakeref)
			continue;

4246
		if (bdw_transcoder_master_readout(dev_priv, cpu_transcoder) ==
4247 4248 4249 4250 4251 4252 4253 4254 4255 4256 4257
		    crtc_state->cpu_transcoder)
			crtc_state->sync_mode_slaves_mask |= BIT(cpu_transcoder);

		intel_display_power_put(dev_priv, power_domain, trans_wakeref);
	}

	drm_WARN_ON(&dev_priv->drm,
		    crtc_state->master_transcoder != INVALID_TRANSCODER &&
		    crtc_state->sync_mode_slaves_mask);
}

4258
void intel_ddi_get_config(struct intel_encoder *encoder,
4259
			  struct intel_crtc_state *pipe_config)
4260
{
4261
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4262
	struct intel_crtc *intel_crtc = to_intel_crtc(pipe_config->uapi.crtc);
4263
	enum transcoder cpu_transcoder = pipe_config->cpu_transcoder;
4264
	struct intel_dp *intel_dp = enc_to_intel_dp(encoder);
4265 4266
	u32 temp, flags = 0;

J
Jani Nikula 已提交
4267
	/* XXX: DSI transcoder paranoia */
4268
	if (drm_WARN_ON(&dev_priv->drm, transcoder_is_dsi(cpu_transcoder)))
J
Jani Nikula 已提交
4269 4270
		return;

4271 4272
	intel_dsc_get_config(encoder, pipe_config);

4273
	temp = intel_de_read(dev_priv, TRANS_DDI_FUNC_CTL(cpu_transcoder));
4274 4275 4276 4277 4278 4279 4280 4281 4282
	if (temp & TRANS_DDI_PHSYNC)
		flags |= DRM_MODE_FLAG_PHSYNC;
	else
		flags |= DRM_MODE_FLAG_NHSYNC;
	if (temp & TRANS_DDI_PVSYNC)
		flags |= DRM_MODE_FLAG_PVSYNC;
	else
		flags |= DRM_MODE_FLAG_NVSYNC;

4283
	pipe_config->hw.adjusted_mode.flags |= flags;
4284 4285 4286 4287 4288 4289 4290 4291 4292 4293 4294 4295 4296 4297 4298 4299 4300

	switch (temp & TRANS_DDI_BPC_MASK) {
	case TRANS_DDI_BPC_6:
		pipe_config->pipe_bpp = 18;
		break;
	case TRANS_DDI_BPC_8:
		pipe_config->pipe_bpp = 24;
		break;
	case TRANS_DDI_BPC_10:
		pipe_config->pipe_bpp = 30;
		break;
	case TRANS_DDI_BPC_12:
		pipe_config->pipe_bpp = 36;
		break;
	default:
		break;
	}
4301 4302 4303

	switch (temp & TRANS_DDI_MODE_SELECT_MASK) {
	case TRANS_DDI_MODE_SELECT_HDMI:
4304
		pipe_config->has_hdmi_sink = true;
4305

4306 4307 4308 4309
		pipe_config->infoframes.enable |=
			intel_hdmi_infoframes_enabled(encoder, pipe_config);

		if (pipe_config->infoframes.enable)
4310
			pipe_config->has_infoframe = true;
S
Shashank Sharma 已提交
4311

4312
		if (temp & TRANS_DDI_HDMI_SCRAMBLING)
S
Shashank Sharma 已提交
4313 4314 4315
			pipe_config->hdmi_scrambling = true;
		if (temp & TRANS_DDI_HIGH_TMDS_CHAR_RATE)
			pipe_config->hdmi_high_tmds_clock_ratio = true;
4316
		/* fall through */
4317
	case TRANS_DDI_MODE_SELECT_DVI:
4318
		pipe_config->output_types |= BIT(INTEL_OUTPUT_HDMI);
4319 4320
		pipe_config->lane_count = 4;
		break;
4321
	case TRANS_DDI_MODE_SELECT_FDI:
4322
		pipe_config->output_types |= BIT(INTEL_OUTPUT_ANALOG);
4323 4324
		break;
	case TRANS_DDI_MODE_SELECT_DP_SST:
4325 4326 4327 4328 4329 4330 4331
		if (encoder->type == INTEL_OUTPUT_EDP)
			pipe_config->output_types |= BIT(INTEL_OUTPUT_EDP);
		else
			pipe_config->output_types |= BIT(INTEL_OUTPUT_DP);
		pipe_config->lane_count =
			((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1;
		intel_dp_get_m_n(intel_crtc, pipe_config);
4332 4333 4334 4335 4336 4337 4338 4339 4340 4341

		if (INTEL_GEN(dev_priv) >= 11) {
			i915_reg_t dp_tp_ctl;

			if (IS_GEN(dev_priv, 11))
				dp_tp_ctl = DP_TP_CTL(encoder->port);
			else
				dp_tp_ctl = TGL_DP_TP_CTL(pipe_config->cpu_transcoder);

			pipe_config->fec_enable =
4342
				intel_de_read(dev_priv, dp_tp_ctl) & DP_TP_CTL_FEC_ENABLE;
4343

4344 4345 4346 4347
			drm_dbg_kms(&dev_priv->drm,
				    "[ENCODER:%d:%s] Fec status: %u\n",
				    encoder->base.base.id, encoder->base.name,
				    pipe_config->fec_enable);
4348 4349
		}

4350 4351 4352
		pipe_config->infoframes.enable |=
			intel_hdmi_infoframes_enabled(encoder, pipe_config);

4353
		break;
4354
	case TRANS_DDI_MODE_SELECT_DP_MST:
4355
		pipe_config->output_types |= BIT(INTEL_OUTPUT_DP_MST);
4356 4357
		pipe_config->lane_count =
			((temp & DDI_PORT_WIDTH_MASK) >> DDI_PORT_WIDTH_SHIFT) + 1;
4358 4359 4360 4361 4362

		if (INTEL_GEN(dev_priv) >= 12)
			pipe_config->mst_master_transcoder =
					REG_FIELD_GET(TRANS_DDI_MST_TRANSPORT_SELECT_MASK, temp);

4363
		intel_dp_get_m_n(intel_crtc, pipe_config);
4364 4365 4366

		pipe_config->infoframes.enable |=
			intel_hdmi_infoframes_enabled(encoder, pipe_config);
4367 4368 4369 4370
		break;
	default:
		break;
	}
4371

4372 4373 4374 4375 4376 4377 4378 4379 4380 4381
	if (INTEL_GEN(dev_priv) >= 12) {
		enum transcoder transcoder =
			intel_dp_mst_is_slave_trans(pipe_config) ?
			pipe_config->mst_master_transcoder :
			pipe_config->cpu_transcoder;

		intel_dp->regs.dp_tp_ctl = TGL_DP_TP_CTL(transcoder);
		intel_dp->regs.dp_tp_status = TGL_DP_TP_STATUS(transcoder);
	}

4382
	pipe_config->has_audio =
4383
		intel_ddi_is_audio_enabled(dev_priv, cpu_transcoder);
4384

4385 4386
	if (encoder->type == INTEL_OUTPUT_EDP && dev_priv->vbt.edp.bpp &&
	    pipe_config->pipe_bpp > dev_priv->vbt.edp.bpp) {
4387 4388 4389 4390 4391 4392 4393 4394 4395 4396 4397 4398 4399
		/*
		 * This is a big fat ugly hack.
		 *
		 * Some machines in UEFI boot mode provide us a VBT that has 18
		 * bpp and 1.62 GHz link bandwidth for eDP, which for reasons
		 * unknown we fail to light up. Yet the same BIOS boots up with
		 * 24 bpp and 2.7 GHz link. Use the same bpp as the BIOS uses as
		 * max, not what it tells us to use.
		 *
		 * Note: This will still be broken if the eDP panel is not lit
		 * up by the BIOS, and thus we can't get the mode at module
		 * load.
		 */
4400 4401 4402
		drm_dbg_kms(&dev_priv->drm,
			    "pipe has %d bpp for eDP panel, overriding BIOS-provided max %d bpp\n",
			    pipe_config->pipe_bpp, dev_priv->vbt.edp.bpp);
4403
		dev_priv->vbt.edp.bpp = pipe_config->pipe_bpp;
4404
	}
4405

4406
	intel_ddi_clock_get(encoder, pipe_config);
4407

4408
	if (IS_GEN9_LP(dev_priv))
4409 4410
		pipe_config->lane_lat_optim_mask =
			bxt_ddi_phy_get_lane_lat_optim_mask(encoder);
4411 4412

	intel_ddi_compute_min_voltage_level(dev_priv, pipe_config);
4413 4414 4415 4416 4417 4418 4419 4420 4421 4422 4423 4424

	intel_hdmi_read_gcp_infoframe(encoder, pipe_config);

	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_AVI,
			     &pipe_config->infoframes.avi);
	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_SPD,
			     &pipe_config->infoframes.spd);
	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_VENDOR,
			     &pipe_config->infoframes.hdmi);
4425 4426 4427
	intel_read_infoframe(encoder, pipe_config,
			     HDMI_INFOFRAME_TYPE_DRM,
			     &pipe_config->infoframes.drm);
4428

4429 4430
	if (INTEL_GEN(dev_priv) >= 8)
		bdw_get_trans_port_sync_config(pipe_config);
4431 4432

	intel_read_dp_sdp(encoder, pipe_config, HDMI_PACKET_TYPE_GAMUT_METADATA);
4433
	intel_read_dp_sdp(encoder, pipe_config, DP_SDP_VSC);
4434 4435
}

4436 4437 4438 4439 4440 4441 4442 4443 4444 4445 4446 4447 4448 4449 4450 4451 4452 4453
static enum intel_output_type
intel_ddi_compute_output_type(struct intel_encoder *encoder,
			      struct intel_crtc_state *crtc_state,
			      struct drm_connector_state *conn_state)
{
	switch (conn_state->connector->connector_type) {
	case DRM_MODE_CONNECTOR_HDMIA:
		return INTEL_OUTPUT_HDMI;
	case DRM_MODE_CONNECTOR_eDP:
		return INTEL_OUTPUT_EDP;
	case DRM_MODE_CONNECTOR_DisplayPort:
		return INTEL_OUTPUT_DP;
	default:
		MISSING_CASE(conn_state->connector->connector_type);
		return INTEL_OUTPUT_UNUSED;
	}
}

4454 4455 4456
static int intel_ddi_compute_config(struct intel_encoder *encoder,
				    struct intel_crtc_state *pipe_config,
				    struct drm_connector_state *conn_state)
P
Paulo Zanoni 已提交
4457
{
4458
	struct intel_crtc *crtc = to_intel_crtc(pipe_config->uapi.crtc);
4459
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4460
	enum port port = encoder->port;
4461
	int ret;
P
Paulo Zanoni 已提交
4462

4463
	if (HAS_TRANSCODER(dev_priv, TRANSCODER_EDP) && port == PORT_A)
4464 4465
		pipe_config->cpu_transcoder = TRANSCODER_EDP;

4466
	if (intel_crtc_has_type(pipe_config, INTEL_OUTPUT_HDMI)) {
4467
		ret = intel_hdmi_compute_config(encoder, pipe_config, conn_state);
4468
	} else {
4469
		ret = intel_dp_compute_config(encoder, pipe_config, conn_state);
4470 4471
	}

4472 4473
	if (ret)
		return ret;
4474

4475 4476 4477 4478 4479 4480
	if (IS_HASWELL(dev_priv) && crtc->pipe == PIPE_A &&
	    pipe_config->cpu_transcoder == TRANSCODER_EDP)
		pipe_config->pch_pfit.force_thru =
			pipe_config->pch_pfit.enabled ||
			pipe_config->crc_enabled;

4481
	if (IS_GEN9_LP(dev_priv))
4482
		pipe_config->lane_lat_optim_mask =
4483
			bxt_ddi_phy_calc_lane_lat_optim_mask(pipe_config->lane_count);
4484

4485 4486
	intel_ddi_compute_min_voltage_level(dev_priv, pipe_config);

4487
	return 0;
P
Paulo Zanoni 已提交
4488 4489
}

4490 4491 4492 4493 4494 4495 4496 4497 4498 4499 4500 4501 4502 4503 4504 4505 4506 4507 4508 4509 4510 4511 4512 4513 4514 4515 4516 4517 4518 4519 4520 4521 4522 4523 4524 4525 4526 4527 4528 4529 4530 4531 4532 4533 4534
static bool mode_equal(const struct drm_display_mode *mode1,
		       const struct drm_display_mode *mode2)
{
	return drm_mode_match(mode1, mode2,
			      DRM_MODE_MATCH_TIMINGS |
			      DRM_MODE_MATCH_FLAGS |
			      DRM_MODE_MATCH_3D_FLAGS) &&
		mode1->clock == mode2->clock; /* we want an exact match */
}

static bool m_n_equal(const struct intel_link_m_n *m_n_1,
		      const struct intel_link_m_n *m_n_2)
{
	return m_n_1->tu == m_n_2->tu &&
		m_n_1->gmch_m == m_n_2->gmch_m &&
		m_n_1->gmch_n == m_n_2->gmch_n &&
		m_n_1->link_m == m_n_2->link_m &&
		m_n_1->link_n == m_n_2->link_n;
}

static bool crtcs_port_sync_compatible(const struct intel_crtc_state *crtc_state1,
				       const struct intel_crtc_state *crtc_state2)
{
	return crtc_state1->hw.active && crtc_state2->hw.active &&
		crtc_state1->output_types == crtc_state2->output_types &&
		crtc_state1->output_format == crtc_state2->output_format &&
		crtc_state1->lane_count == crtc_state2->lane_count &&
		crtc_state1->port_clock == crtc_state2->port_clock &&
		mode_equal(&crtc_state1->hw.adjusted_mode,
			   &crtc_state2->hw.adjusted_mode) &&
		m_n_equal(&crtc_state1->dp_m_n, &crtc_state2->dp_m_n);
}

static u8
intel_ddi_port_sync_transcoders(const struct intel_crtc_state *ref_crtc_state,
				int tile_group_id)
{
	struct drm_connector *connector;
	const struct drm_connector_state *conn_state;
	struct drm_i915_private *dev_priv = to_i915(ref_crtc_state->uapi.crtc->dev);
	struct intel_atomic_state *state =
		to_intel_atomic_state(ref_crtc_state->uapi.state);
	u8 transcoders = 0;
	int i;

4535 4536 4537 4538 4539
	/*
	 * We don't enable port sync on BDW due to missing w/as and
	 * due to not having adjusted the modeset sequence appropriately.
	 */
	if (INTEL_GEN(dev_priv) < 9)
4540 4541 4542 4543 4544 4545 4546 4547 4548 4549 4550 4551 4552 4553 4554 4555 4556 4557 4558 4559 4560 4561 4562 4563 4564 4565 4566 4567 4568 4569 4570
		return 0;

	if (!intel_crtc_has_type(ref_crtc_state, INTEL_OUTPUT_DP))
		return 0;

	for_each_new_connector_in_state(&state->base, connector, conn_state, i) {
		struct intel_crtc *crtc = to_intel_crtc(conn_state->crtc);
		const struct intel_crtc_state *crtc_state;

		if (!crtc)
			continue;

		if (!connector->has_tile ||
		    connector->tile_group->id !=
		    tile_group_id)
			continue;
		crtc_state = intel_atomic_get_new_crtc_state(state,
							     crtc);
		if (!crtcs_port_sync_compatible(ref_crtc_state,
						crtc_state))
			continue;
		transcoders |= BIT(crtc_state->cpu_transcoder);
	}

	return transcoders;
}

static int intel_ddi_compute_config_late(struct intel_encoder *encoder,
					 struct intel_crtc_state *crtc_state,
					 struct drm_connector_state *conn_state)
{
4571
	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
4572 4573 4574
	struct drm_connector *connector = conn_state->connector;
	u8 port_sync_transcoders = 0;

4575 4576 4577
	drm_dbg_kms(&i915->drm, "[ENCODER:%d:%s] [CRTC:%d:%s]",
		    encoder->base.base.id, encoder->base.name,
		    crtc_state->uapi.crtc->base.id, crtc_state->uapi.crtc->name);
4578 4579 4580 4581 4582 4583 4584 4585 4586 4587 4588 4589 4590 4591 4592 4593 4594 4595 4596 4597 4598 4599 4600

	if (connector->has_tile)
		port_sync_transcoders = intel_ddi_port_sync_transcoders(crtc_state,
									connector->tile_group->id);

	/*
	 * EDP Transcoders cannot be ensalved
	 * make them a master always when present
	 */
	if (port_sync_transcoders & BIT(TRANSCODER_EDP))
		crtc_state->master_transcoder = TRANSCODER_EDP;
	else
		crtc_state->master_transcoder = ffs(port_sync_transcoders) - 1;

	if (crtc_state->master_transcoder == crtc_state->cpu_transcoder) {
		crtc_state->master_transcoder = INVALID_TRANSCODER;
		crtc_state->sync_mode_slaves_mask =
			port_sync_transcoders & ~BIT(crtc_state->cpu_transcoder);
	}

	return 0;
}

4601 4602
static void intel_ddi_encoder_destroy(struct drm_encoder *encoder)
{
4603
	struct intel_digital_port *dig_port = enc_to_dig_port(to_intel_encoder(encoder));
4604 4605 4606 4607 4608 4609 4610

	intel_dp_encoder_flush_work(encoder);

	drm_encoder_cleanup(encoder);
	kfree(dig_port);
}

P
Paulo Zanoni 已提交
4611
static const struct drm_encoder_funcs intel_ddi_funcs = {
4612
	.reset = intel_dp_encoder_reset,
4613
	.destroy = intel_ddi_encoder_destroy,
P
Paulo Zanoni 已提交
4614 4615
};

4616
static struct intel_connector *
4617
intel_ddi_init_dp_connector(struct intel_digital_port *dig_port)
4618
{
4619
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
4620
	struct intel_connector *connector;
4621
	enum port port = dig_port->base.port;
4622

4623
	connector = intel_connector_alloc();
4624 4625 4626
	if (!connector)
		return NULL;

4627 4628 4629 4630
	dig_port->dp.output_reg = DDI_BUF_CTL(port);
	dig_port->dp.prepare_link_retrain = intel_ddi_prepare_link_retrain;
	dig_port->dp.set_link_train = intel_ddi_set_link_train;
	dig_port->dp.set_idle_link_train = intel_ddi_set_idle_link_train;
4631

4632
	if (INTEL_GEN(dev_priv) >= 12)
4633
		dig_port->dp.set_signal_levels = tgl_set_signal_levels;
4634
	else if (INTEL_GEN(dev_priv) >= 11)
4635
		dig_port->dp.set_signal_levels = icl_set_signal_levels;
4636
	else if (IS_CANNONLAKE(dev_priv))
4637
		dig_port->dp.set_signal_levels = cnl_set_signal_levels;
4638
	else if (IS_GEN9_LP(dev_priv))
4639
		dig_port->dp.set_signal_levels = bxt_set_signal_levels;
4640
	else
4641
		dig_port->dp.set_signal_levels = hsw_set_signal_levels;
4642

4643 4644
	dig_port->dp.voltage_max = intel_ddi_dp_voltage_max;
	dig_port->dp.preemph_max = intel_ddi_dp_preemph_max;
4645

4646
	if (INTEL_GEN(dev_priv) < 12) {
4647 4648
		dig_port->dp.regs.dp_tp_ctl = DP_TP_CTL(port);
		dig_port->dp.regs.dp_tp_status = DP_TP_STATUS(port);
4649
	}
4650

4651
	if (!intel_dp_init_connector(dig_port, connector)) {
4652 4653 4654 4655 4656 4657 4658
		kfree(connector);
		return NULL;
	}

	return connector;
}

4659 4660 4661 4662 4663 4664 4665 4666 4667 4668 4669 4670 4671 4672 4673 4674 4675 4676 4677
static int modeset_pipe(struct drm_crtc *crtc,
			struct drm_modeset_acquire_ctx *ctx)
{
	struct drm_atomic_state *state;
	struct drm_crtc_state *crtc_state;
	int ret;

	state = drm_atomic_state_alloc(crtc->dev);
	if (!state)
		return -ENOMEM;

	state->acquire_ctx = ctx;

	crtc_state = drm_atomic_get_crtc_state(state, crtc);
	if (IS_ERR(crtc_state)) {
		ret = PTR_ERR(crtc_state);
		goto out;
	}

4678
	crtc_state->connectors_changed = true;
4679 4680

	ret = drm_atomic_commit(state);
4681
out:
4682 4683 4684 4685 4686 4687 4688 4689 4690
	drm_atomic_state_put(state);

	return ret;
}

static int intel_hdmi_reset_link(struct intel_encoder *encoder,
				 struct drm_modeset_acquire_ctx *ctx)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4691
	struct intel_hdmi *hdmi = enc_to_intel_hdmi(encoder);
4692 4693 4694 4695 4696 4697 4698 4699 4700 4701 4702 4703 4704 4705 4706 4707 4708 4709 4710 4711 4712 4713 4714 4715 4716 4717 4718 4719 4720
	struct intel_connector *connector = hdmi->attached_connector;
	struct i2c_adapter *adapter =
		intel_gmbus_get_adapter(dev_priv, hdmi->ddc_bus);
	struct drm_connector_state *conn_state;
	struct intel_crtc_state *crtc_state;
	struct intel_crtc *crtc;
	u8 config;
	int ret;

	if (!connector || connector->base.status != connector_status_connected)
		return 0;

	ret = drm_modeset_lock(&dev_priv->drm.mode_config.connection_mutex,
			       ctx);
	if (ret)
		return ret;

	conn_state = connector->base.state;

	crtc = to_intel_crtc(conn_state->crtc);
	if (!crtc)
		return 0;

	ret = drm_modeset_lock(&crtc->base.mutex, ctx);
	if (ret)
		return ret;

	crtc_state = to_intel_crtc_state(crtc->base.state);

4721 4722
	drm_WARN_ON(&dev_priv->drm,
		    !intel_crtc_has_type(crtc_state, INTEL_OUTPUT_HDMI));
4723

4724
	if (!crtc_state->hw.active)
4725 4726 4727 4728 4729 4730 4731 4732 4733 4734 4735 4736
		return 0;

	if (!crtc_state->hdmi_high_tmds_clock_ratio &&
	    !crtc_state->hdmi_scrambling)
		return 0;

	if (conn_state->commit &&
	    !try_wait_for_completion(&conn_state->commit->hw_done))
		return 0;

	ret = drm_scdc_readb(adapter, SCDC_TMDS_CONFIG, &config);
	if (ret < 0) {
4737 4738
		drm_err(&dev_priv->drm, "Failed to read TMDS config: %d\n",
			ret);
4739 4740 4741 4742 4743 4744 4745 4746 4747 4748 4749 4750 4751 4752 4753 4754 4755 4756 4757 4758 4759
		return 0;
	}

	if (!!(config & SCDC_TMDS_BIT_CLOCK_RATIO_BY_40) ==
	    crtc_state->hdmi_high_tmds_clock_ratio &&
	    !!(config & SCDC_SCRAMBLING_ENABLE) ==
	    crtc_state->hdmi_scrambling)
		return 0;

	/*
	 * HDMI 2.0 says that one should not send scrambled data
	 * prior to configuring the sink scrambling, and that
	 * TMDS clock/data transmission should be suspended when
	 * changing the TMDS clock rate in the sink. So let's
	 * just do a full modeset here, even though some sinks
	 * would be perfectly happy if were to just reconfigure
	 * the SCDC settings on the fly.
	 */
	return modeset_pipe(&crtc->base, ctx);
}

4760 4761
static enum intel_hotplug_state
intel_ddi_hotplug(struct intel_encoder *encoder,
4762
		  struct intel_connector *connector)
4763
{
4764
	struct drm_i915_private *i915 = to_i915(encoder->base.dev);
4765
	struct intel_digital_port *dig_port = enc_to_dig_port(encoder);
4766 4767
	enum phy phy = intel_port_to_phy(i915, encoder->port);
	bool is_tc = intel_phy_is_tc(i915, phy);
4768
	struct drm_modeset_acquire_ctx ctx;
4769
	enum intel_hotplug_state state;
4770 4771
	int ret;

4772
	state = intel_encoder_hotplug(encoder, connector);
4773 4774 4775 4776

	drm_modeset_acquire_init(&ctx, 0);

	for (;;) {
4777 4778 4779 4780
		if (connector->base.connector_type == DRM_MODE_CONNECTOR_HDMIA)
			ret = intel_hdmi_reset_link(encoder, &ctx);
		else
			ret = intel_dp_retrain_link(encoder, &ctx);
4781 4782 4783 4784 4785 4786 4787 4788 4789 4790 4791

		if (ret == -EDEADLK) {
			drm_modeset_backoff(&ctx);
			continue;
		}

		break;
	}

	drm_modeset_drop_locks(&ctx);
	drm_modeset_acquire_fini(&ctx);
4792 4793
	drm_WARN(encoder->base.dev, ret,
		 "Acquiring modeset locks failed with %i\n", ret);
4794

4795 4796 4797 4798 4799 4800 4801 4802 4803 4804 4805 4806 4807 4808 4809
	/*
	 * Unpowered type-c dongles can take some time to boot and be
	 * responsible, so here giving some time to those dongles to power up
	 * and then retrying the probe.
	 *
	 * On many platforms the HDMI live state signal is known to be
	 * unreliable, so we can't use it to detect if a sink is connected or
	 * not. Instead we detect if it's connected based on whether we can
	 * read the EDID or not. That in turn has a problem during disconnect,
	 * since the HPD interrupt may be raised before the DDC lines get
	 * disconnected (due to how the required length of DDC vs. HPD
	 * connector pins are specified) and so we'll still be able to get a
	 * valid EDID. To solve this schedule another detection cycle if this
	 * time around we didn't detect any change in the sink's connection
	 * status.
4810 4811 4812 4813 4814 4815
	 *
	 * Type-c connectors which get their HPD signal deasserted then
	 * reasserted, without unplugging/replugging the sink from the
	 * connector, introduce a delay until the AUX channel communication
	 * becomes functional. Retry the detection for 5 seconds on type-c
	 * connectors to account for this delay.
4816
	 */
4817 4818
	if (state == INTEL_HOTPLUG_UNCHANGED &&
	    connector->hotplug_retries < (is_tc ? 5 : 1) &&
4819 4820 4821
	    !dig_port->dp.is_mst)
		state = INTEL_HOTPLUG_RETRY;

4822
	return state;
4823 4824
}

4825 4826 4827
static bool lpt_digital_port_connected(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4828
	u32 bit = dev_priv->hotplug.pch_hpd[encoder->hpd_pin];
4829 4830 4831 4832 4833 4834 4835

	return intel_de_read(dev_priv, SDEISR) & bit;
}

static bool hsw_digital_port_connected(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4836
	u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin];
4837

4838
	return intel_de_read(dev_priv, DEISR) & bit;
4839 4840 4841 4842 4843
}

static bool bdw_digital_port_connected(struct intel_encoder *encoder)
{
	struct drm_i915_private *dev_priv = to_i915(encoder->base.dev);
4844
	u32 bit = dev_priv->hotplug.hpd[encoder->hpd_pin];
4845 4846 4847 4848

	return intel_de_read(dev_priv, GEN8_DE_PORT_ISR) & bit;
}

4849
static struct intel_connector *
4850
intel_ddi_init_hdmi_connector(struct intel_digital_port *dig_port)
4851 4852
{
	struct intel_connector *connector;
4853
	enum port port = dig_port->base.port;
4854

4855
	connector = intel_connector_alloc();
4856 4857 4858
	if (!connector)
		return NULL;

4859 4860
	dig_port->hdmi.hdmi_reg = DDI_BUF_CTL(port);
	intel_hdmi_init_connector(dig_port, connector);
4861 4862 4863 4864

	return connector;
}

4865
static bool intel_ddi_a_force_4_lanes(struct intel_digital_port *dig_port)
4866
{
4867
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
4868

4869
	if (dig_port->base.port != PORT_A)
4870 4871
		return false;

4872
	if (dig_port->saved_port_bits & DDI_A_4_LANES)
4873 4874 4875 4876 4877 4878 4879 4880 4881 4882 4883 4884 4885 4886 4887 4888 4889 4890 4891 4892
		return false;

	/* Broxton/Geminilake: Bspec says that DDI_A_4_LANES is the only
	 *                     supported configuration
	 */
	if (IS_GEN9_LP(dev_priv))
		return true;

	/* Cannonlake: Most of SKUs don't support DDI_E, and the only
	 *             one who does also have a full A/E split called
	 *             DDI_F what makes DDI_E useless. However for this
	 *             case let's trust VBT info.
	 */
	if (IS_CANNONLAKE(dev_priv) &&
	    !intel_bios_is_port_present(dev_priv, PORT_E))
		return true;

	return false;
}

4893
static int
4894
intel_ddi_max_lanes(struct intel_digital_port *dig_port)
4895
{
4896 4897
	struct drm_i915_private *dev_priv = to_i915(dig_port->base.base.dev);
	enum port port = dig_port->base.port;
4898 4899 4900 4901 4902 4903
	int max_lanes = 4;

	if (INTEL_GEN(dev_priv) >= 11)
		return max_lanes;

	if (port == PORT_A || port == PORT_E) {
4904
		if (intel_de_read(dev_priv, DDI_BUF_CTL(PORT_A)) & DDI_A_4_LANES)
4905 4906 4907 4908 4909 4910 4911 4912 4913 4914 4915
			max_lanes = port == PORT_A ? 4 : 0;
		else
			/* Both A and E share 2 lanes */
			max_lanes = 2;
	}

	/*
	 * Some BIOS might fail to set this bit on port A if eDP
	 * wasn't lit up at boot.  Force this bit set when needed
	 * so we use the proper lane count for our calculations.
	 */
4916
	if (intel_ddi_a_force_4_lanes(dig_port)) {
4917 4918
		drm_dbg_kms(&dev_priv->drm,
			    "Forcing DDI_A_4_LANES for port A\n");
4919
		dig_port->saved_port_bits |= DDI_A_4_LANES;
4920 4921 4922 4923 4924 4925
		max_lanes = 4;
	}

	return max_lanes;
}

M
Matt Roper 已提交
4926 4927 4928 4929 4930 4931 4932
static bool hti_uses_phy(struct drm_i915_private *i915, enum phy phy)
{
	return i915->hti_state & HDPORT_ENABLED &&
		(i915->hti_state & HDPORT_PHY_USED_DP(phy) ||
		 i915->hti_state & HDPORT_PHY_USED_HDMI(phy));
}

4933
void intel_ddi_init(struct drm_i915_private *dev_priv, enum port port)
P
Paulo Zanoni 已提交
4934
{
4935
	struct intel_digital_port *dig_port;
4936
	struct intel_encoder *encoder;
4937
	bool init_hdmi, init_dp, init_lspcon = false;
4938
	enum phy phy = intel_port_to_phy(dev_priv, port);
4939

M
Matt Roper 已提交
4940 4941 4942 4943 4944 4945 4946 4947 4948 4949 4950 4951
	/*
	 * On platforms with HTI (aka HDPORT), if it's enabled at boot it may
	 * have taken over some of the PHYs and made them unavailable to the
	 * driver.  In that case we should skip initializing the corresponding
	 * outputs.
	 */
	if (hti_uses_phy(dev_priv, phy)) {
		drm_dbg_kms(&dev_priv->drm, "PORT %c / PHY %c reserved by HTI\n",
			    port_name(port), phy_name(phy));
		return;
	}

4952 4953 4954
	init_hdmi = intel_bios_port_supports_dvi(dev_priv, port) ||
		intel_bios_port_supports_hdmi(dev_priv, port);
	init_dp = intel_bios_port_supports_dp(dev_priv, port);
4955 4956 4957 4958 4959 4960 4961 4962 4963 4964

	if (intel_bios_is_lspcon_present(dev_priv, port)) {
		/*
		 * Lspcon device needs to be driven with DP connector
		 * with special detection sequence. So make sure DP
		 * is initialized before lspcon.
		 */
		init_dp = true;
		init_lspcon = true;
		init_hdmi = false;
4965 4966
		drm_dbg_kms(&dev_priv->drm, "VBT says port %c has lspcon\n",
			    port_name(port));
4967 4968
	}

4969
	if (!init_dp && !init_hdmi) {
4970 4971 4972
		drm_dbg_kms(&dev_priv->drm,
			    "VBT says port %c is not DVI/HDMI/DP compatible, respect it\n",
			    port_name(port));
4973
		return;
4974
	}
P
Paulo Zanoni 已提交
4975

4976 4977
	dig_port = kzalloc(sizeof(*dig_port), GFP_KERNEL);
	if (!dig_port)
P
Paulo Zanoni 已提交
4978 4979
		return;

4980
	encoder = &dig_port->base;
P
Paulo Zanoni 已提交
4981

4982
	drm_encoder_init(&dev_priv->drm, &encoder->base, &intel_ddi_funcs,
4983
			 DRM_MODE_ENCODER_TMDS, "DDI %c", port_name(port));
P
Paulo Zanoni 已提交
4984

4985 4986 4987
	encoder->hotplug = intel_ddi_hotplug;
	encoder->compute_output_type = intel_ddi_compute_output_type;
	encoder->compute_config = intel_ddi_compute_config;
4988
	encoder->compute_config_late = intel_ddi_compute_config_late;
4989 4990 4991 4992 4993 4994 4995 4996 4997 4998 4999 5000 5001 5002 5003 5004
	encoder->enable = intel_enable_ddi;
	encoder->pre_pll_enable = intel_ddi_pre_pll_enable;
	encoder->pre_enable = intel_ddi_pre_enable;
	encoder->disable = intel_disable_ddi;
	encoder->post_disable = intel_ddi_post_disable;
	encoder->update_pipe = intel_ddi_update_pipe;
	encoder->get_hw_state = intel_ddi_get_hw_state;
	encoder->get_config = intel_ddi_get_config;
	encoder->suspend = intel_dp_encoder_suspend;
	encoder->get_power_domains = intel_ddi_get_power_domains;

	encoder->type = INTEL_OUTPUT_DDI;
	encoder->power_domain = intel_port_to_power_domain(port);
	encoder->port = port;
	encoder->cloneable = 0;
	encoder->pipe_mask = ~0;
P
Paulo Zanoni 已提交
5005

5006
	if (INTEL_GEN(dev_priv) >= 11)
5007 5008 5009
		dig_port->saved_port_bits =
			intel_de_read(dev_priv, DDI_BUF_CTL(port))
			& DDI_BUF_PORT_REVERSAL;
5010
	else
5011 5012 5013
		dig_port->saved_port_bits =
			intel_de_read(dev_priv, DDI_BUF_CTL(port))
			& (DDI_BUF_PORT_REVERSAL | DDI_A_4_LANES);
5014

5015 5016 5017
	dig_port->dp.output_reg = INVALID_MMIO_REG;
	dig_port->max_lanes = intel_ddi_max_lanes(dig_port);
	dig_port->aux_ch = intel_bios_port_aux_ch(dev_priv, port);
P
Paulo Zanoni 已提交
5018

5019
	if (intel_phy_is_tc(dev_priv, phy)) {
5020 5021 5022
		bool is_legacy =
			!intel_bios_port_supports_typec_usb(dev_priv, port) &&
			!intel_bios_port_supports_tbt(dev_priv, port);
5023

5024
		intel_tc_port_init(dig_port, is_legacy);
5025

5026 5027
		encoder->update_prepare = intel_ddi_update_prepare;
		encoder->update_complete = intel_ddi_update_complete;
5028
	}
5029

5030
	drm_WARN_ON(&dev_priv->drm, port > PORT_I);
5031
	dig_port->ddi_io_power_domain = POWER_DOMAIN_PORT_DDI_A_IO +
5032
					      port - PORT_A;
5033

5034
	if (init_dp) {
5035
		if (!intel_ddi_init_dp_connector(dig_port))
5036
			goto err;
5037

5038
		dig_port->hpd_pulse = intel_dp_hpd_pulse;
5039
	}
5040

5041 5042
	/* In theory we don't need the encoder->type check, but leave it just in
	 * case we have some really bad VBTs... */
5043
	if (encoder->type != INTEL_OUTPUT_EDP && init_hdmi) {
5044
		if (!intel_ddi_init_hdmi_connector(dig_port))
5045
			goto err;
5046
	}
5047

5048
	if (init_lspcon) {
5049
		if (lspcon_init(dig_port))
5050
			/* TODO: handle hdmi info frame part */
5051 5052 5053
			drm_dbg_kms(&dev_priv->drm,
				    "LSPCON init success on port %c\n",
				    port_name(port));
5054 5055 5056 5057 5058
		else
			/*
			 * LSPCON init faied, but DP init was success, so
			 * lets try to drive as DP++ port.
			 */
5059 5060
			drm_err(&dev_priv->drm,
				"LSPCON init failed on port %c\n",
5061 5062 5063
				port_name(port));
	}

5064 5065
	if (INTEL_GEN(dev_priv) >= 11) {
		if (intel_phy_is_tc(dev_priv, phy))
5066
			dig_port->connected = intel_tc_port_connected;
5067
		else
5068
			dig_port->connected = lpt_digital_port_connected;
5069 5070
	} else if (INTEL_GEN(dev_priv) >= 8) {
		if (port == PORT_A || IS_GEN9_LP(dev_priv))
5071
			dig_port->connected = bdw_digital_port_connected;
5072
		else
5073
			dig_port->connected = lpt_digital_port_connected;
5074
	} else {
5075
		if (port == PORT_A)
5076
			dig_port->connected = hsw_digital_port_connected;
5077
		else
5078
			dig_port->connected = lpt_digital_port_connected;
5079 5080
	}

5081
	intel_infoframe_init(dig_port);
5082

5083 5084 5085
	return;

err:
5086
	drm_encoder_cleanup(&encoder->base);
5087
	kfree(dig_port);
P
Paulo Zanoni 已提交
5088
}