提交 387df878 编写于 作者: D Dave Airlie

Merge tag 'amd-drm-fixes-6.0-2022-09-21' of...

Merge tag 'amd-drm-fixes-6.0-2022-09-21' of https://gitlab.freedesktop.org/agd5f/linux into drm-fixes

amd-drm-fixes-6.0-2022-09-21:

amdgpu:
- SDMA 6.x fix
- GPUVM TF fix
- DCN 3.2.x fixes
- DCN 3.1.x fixes
- SMU 13.x fixes
- Clang stack size fixes for recently enabled DML code
- Fix drm dirty callback change on non-atomic cases
- USB4 display fix
Signed-off-by: NDave Airlie <airlied@redhat.com>
From: Alex Deucher <alexander.deucher@amd.com>
Link: https://patchwork.freedesktop.org/patch/msgid/20220921220605.6136-1-alexander.deucher@amd.com
...@@ -39,6 +39,7 @@ ...@@ -39,6 +39,7 @@
#include <linux/pm_runtime.h> #include <linux/pm_runtime.h>
#include <drm/drm_crtc_helper.h> #include <drm/drm_crtc_helper.h>
#include <drm/drm_damage_helper.h> #include <drm/drm_damage_helper.h>
#include <drm/drm_drv.h>
#include <drm/drm_edid.h> #include <drm/drm_edid.h>
#include <drm/drm_gem_framebuffer_helper.h> #include <drm/drm_gem_framebuffer_helper.h>
#include <drm/drm_fb_helper.h> #include <drm/drm_fb_helper.h>
...@@ -497,6 +498,11 @@ bool amdgpu_display_ddc_probe(struct amdgpu_connector *amdgpu_connector, ...@@ -497,6 +498,11 @@ bool amdgpu_display_ddc_probe(struct amdgpu_connector *amdgpu_connector,
static const struct drm_framebuffer_funcs amdgpu_fb_funcs = { static const struct drm_framebuffer_funcs amdgpu_fb_funcs = {
.destroy = drm_gem_fb_destroy, .destroy = drm_gem_fb_destroy,
.create_handle = drm_gem_fb_create_handle, .create_handle = drm_gem_fb_create_handle,
};
static const struct drm_framebuffer_funcs amdgpu_fb_funcs_atomic = {
.destroy = drm_gem_fb_destroy,
.create_handle = drm_gem_fb_create_handle,
.dirty = drm_atomic_helper_dirtyfb, .dirty = drm_atomic_helper_dirtyfb,
}; };
...@@ -1102,7 +1108,10 @@ static int amdgpu_display_gem_fb_verify_and_init(struct drm_device *dev, ...@@ -1102,7 +1108,10 @@ static int amdgpu_display_gem_fb_verify_and_init(struct drm_device *dev,
if (ret) if (ret)
goto err; goto err;
ret = drm_framebuffer_init(dev, &rfb->base, &amdgpu_fb_funcs); if (drm_drv_uses_atomic_modeset(dev))
ret = drm_framebuffer_init(dev, &rfb->base, &amdgpu_fb_funcs_atomic);
else
ret = drm_framebuffer_init(dev, &rfb->base, &amdgpu_fb_funcs);
if (ret) if (ret)
goto err; goto err;
......
...@@ -181,6 +181,9 @@ int amdgpu_mes_init(struct amdgpu_device *adev) ...@@ -181,6 +181,9 @@ int amdgpu_mes_init(struct amdgpu_device *adev)
for (i = 0; i < AMDGPU_MES_MAX_SDMA_PIPES; i++) { for (i = 0; i < AMDGPU_MES_MAX_SDMA_PIPES; i++) {
if (adev->ip_versions[SDMA0_HWIP][0] < IP_VERSION(6, 0, 0)) if (adev->ip_versions[SDMA0_HWIP][0] < IP_VERSION(6, 0, 0))
adev->mes.sdma_hqd_mask[i] = i ? 0 : 0x3fc; adev->mes.sdma_hqd_mask[i] = i ? 0 : 0x3fc;
/* zero sdma_hqd_mask for non-existent engine */
else if (adev->sdma.num_instances == 1)
adev->mes.sdma_hqd_mask[i] = i ? 0 : 0xfc;
else else
adev->mes.sdma_hqd_mask[i] = 0xfc; adev->mes.sdma_hqd_mask[i] = 0xfc;
} }
......
...@@ -2484,8 +2484,7 @@ bool amdgpu_vm_handle_fault(struct amdgpu_device *adev, u32 pasid, ...@@ -2484,8 +2484,7 @@ bool amdgpu_vm_handle_fault(struct amdgpu_device *adev, u32 pasid,
/* Intentionally setting invalid PTE flag /* Intentionally setting invalid PTE flag
* combination to force a no-retry-fault * combination to force a no-retry-fault
*/ */
flags = AMDGPU_PTE_EXECUTABLE | AMDGPU_PDE_PTE | flags = AMDGPU_PTE_SNOOPED | AMDGPU_PTE_PRT;
AMDGPU_PTE_TF;
value = 0; value = 0;
} else if (amdgpu_vm_fault_stop == AMDGPU_VM_FAULT_STOP_NEVER) { } else if (amdgpu_vm_fault_stop == AMDGPU_VM_FAULT_STOP_NEVER) {
/* Redirect the access to the dummy page */ /* Redirect the access to the dummy page */
......
...@@ -1103,10 +1103,13 @@ static void gmc_v9_0_get_vm_pde(struct amdgpu_device *adev, int level, ...@@ -1103,10 +1103,13 @@ static void gmc_v9_0_get_vm_pde(struct amdgpu_device *adev, int level,
*flags |= AMDGPU_PDE_BFS(0x9); *flags |= AMDGPU_PDE_BFS(0x9);
} else if (level == AMDGPU_VM_PDB0) { } else if (level == AMDGPU_VM_PDB0) {
if (*flags & AMDGPU_PDE_PTE) if (*flags & AMDGPU_PDE_PTE) {
*flags &= ~AMDGPU_PDE_PTE; *flags &= ~AMDGPU_PDE_PTE;
else if (!(*flags & AMDGPU_PTE_VALID))
*addr |= 1 << PAGE_SHIFT;
} else {
*flags |= AMDGPU_PTE_TF; *flags |= AMDGPU_PTE_TF;
}
} }
} }
......
...@@ -4759,7 +4759,7 @@ fill_dc_plane_info_and_addr(struct amdgpu_device *adev, ...@@ -4759,7 +4759,7 @@ fill_dc_plane_info_and_addr(struct amdgpu_device *adev,
plane_info->visible = true; plane_info->visible = true;
plane_info->stereo_format = PLANE_STEREO_FORMAT_NONE; plane_info->stereo_format = PLANE_STEREO_FORMAT_NONE;
plane_info->layer_index = 0; plane_info->layer_index = plane_state->normalized_zpos;
ret = fill_plane_color_attributes(plane_state, plane_info->format, ret = fill_plane_color_attributes(plane_state, plane_info->format,
&plane_info->color_space); &plane_info->color_space);
...@@ -4827,7 +4827,7 @@ static int fill_dc_plane_attributes(struct amdgpu_device *adev, ...@@ -4827,7 +4827,7 @@ static int fill_dc_plane_attributes(struct amdgpu_device *adev,
dc_plane_state->global_alpha = plane_info.global_alpha; dc_plane_state->global_alpha = plane_info.global_alpha;
dc_plane_state->global_alpha_value = plane_info.global_alpha_value; dc_plane_state->global_alpha_value = plane_info.global_alpha_value;
dc_plane_state->dcc = plane_info.dcc; dc_plane_state->dcc = plane_info.dcc;
dc_plane_state->layer_index = plane_info.layer_index; // Always returns 0 dc_plane_state->layer_index = plane_info.layer_index;
dc_plane_state->flip_int_enabled = true; dc_plane_state->flip_int_enabled = true;
/* /*
...@@ -9485,6 +9485,14 @@ static int amdgpu_dm_atomic_check(struct drm_device *dev, ...@@ -9485,6 +9485,14 @@ static int amdgpu_dm_atomic_check(struct drm_device *dev,
} }
} }
/*
* DC consults the zpos (layer_index in DC terminology) to determine the
* hw plane on which to enable the hw cursor (see
* `dcn10_can_pipe_disable_cursor`). By now, all modified planes are in
* atomic state, so call drm helper to normalize zpos.
*/
drm_atomic_normalize_zpos(dev, state);
/* Remove exiting planes if they are modified */ /* Remove exiting planes if they are modified */
for_each_oldnew_plane_in_state_reverse(state, plane, old_plane_state, new_plane_state, i) { for_each_oldnew_plane_in_state_reverse(state, plane, old_plane_state, new_plane_state, i) {
ret = dm_update_plane_state(dc, state, plane, ret = dm_update_plane_state(dc, state, plane,
......
...@@ -99,7 +99,7 @@ static int dcn31_get_active_display_cnt_wa( ...@@ -99,7 +99,7 @@ static int dcn31_get_active_display_cnt_wa(
return display_count; return display_count;
} }
static void dcn31_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable) static void dcn31_disable_otg_wa(struct clk_mgr *clk_mgr_base, struct dc_state *context, bool disable)
{ {
struct dc *dc = clk_mgr_base->ctx->dc; struct dc *dc = clk_mgr_base->ctx->dc;
int i; int i;
...@@ -110,9 +110,10 @@ static void dcn31_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable) ...@@ -110,9 +110,10 @@ static void dcn31_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable)
if (pipe->top_pipe || pipe->prev_odm_pipe) if (pipe->top_pipe || pipe->prev_odm_pipe)
continue; continue;
if (pipe->stream && (pipe->stream->dpms_off || dc_is_virtual_signal(pipe->stream->signal))) { if (pipe->stream && (pipe->stream->dpms_off || dc_is_virtual_signal(pipe->stream->signal))) {
if (disable) if (disable) {
pipe->stream_res.tg->funcs->immediate_disable_crtc(pipe->stream_res.tg); pipe->stream_res.tg->funcs->immediate_disable_crtc(pipe->stream_res.tg);
else reset_sync_context_for_pipe(dc, context, i);
} else
pipe->stream_res.tg->funcs->enable_crtc(pipe->stream_res.tg); pipe->stream_res.tg->funcs->enable_crtc(pipe->stream_res.tg);
} }
} }
...@@ -211,11 +212,11 @@ void dcn31_update_clocks(struct clk_mgr *clk_mgr_base, ...@@ -211,11 +212,11 @@ void dcn31_update_clocks(struct clk_mgr *clk_mgr_base,
} }
if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) { if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) {
dcn31_disable_otg_wa(clk_mgr_base, true); dcn31_disable_otg_wa(clk_mgr_base, context, true);
clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz; clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz;
dcn31_smu_set_dispclk(clk_mgr, clk_mgr_base->clks.dispclk_khz); dcn31_smu_set_dispclk(clk_mgr, clk_mgr_base->clks.dispclk_khz);
dcn31_disable_otg_wa(clk_mgr_base, false); dcn31_disable_otg_wa(clk_mgr_base, context, false);
update_dispclk = true; update_dispclk = true;
} }
......
...@@ -119,7 +119,7 @@ static int dcn314_get_active_display_cnt_wa( ...@@ -119,7 +119,7 @@ static int dcn314_get_active_display_cnt_wa(
return display_count; return display_count;
} }
static void dcn314_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable) static void dcn314_disable_otg_wa(struct clk_mgr *clk_mgr_base, struct dc_state *context, bool disable)
{ {
struct dc *dc = clk_mgr_base->ctx->dc; struct dc *dc = clk_mgr_base->ctx->dc;
int i; int i;
...@@ -129,11 +129,11 @@ static void dcn314_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable) ...@@ -129,11 +129,11 @@ static void dcn314_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable)
if (pipe->top_pipe || pipe->prev_odm_pipe) if (pipe->top_pipe || pipe->prev_odm_pipe)
continue; continue;
if (pipe->stream && (pipe->stream->dpms_off || pipe->plane_state == NULL || if (pipe->stream && (pipe->stream->dpms_off || dc_is_virtual_signal(pipe->stream->signal))) {
dc_is_virtual_signal(pipe->stream->signal))) { if (disable) {
if (disable)
pipe->stream_res.tg->funcs->immediate_disable_crtc(pipe->stream_res.tg); pipe->stream_res.tg->funcs->immediate_disable_crtc(pipe->stream_res.tg);
else reset_sync_context_for_pipe(dc, context, i);
} else
pipe->stream_res.tg->funcs->enable_crtc(pipe->stream_res.tg); pipe->stream_res.tg->funcs->enable_crtc(pipe->stream_res.tg);
} }
} }
...@@ -233,11 +233,11 @@ void dcn314_update_clocks(struct clk_mgr *clk_mgr_base, ...@@ -233,11 +233,11 @@ void dcn314_update_clocks(struct clk_mgr *clk_mgr_base,
} }
if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) { if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) {
dcn314_disable_otg_wa(clk_mgr_base, true); dcn314_disable_otg_wa(clk_mgr_base, context, true);
clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz; clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz;
dcn314_smu_set_dispclk(clk_mgr, clk_mgr_base->clks.dispclk_khz); dcn314_smu_set_dispclk(clk_mgr, clk_mgr_base->clks.dispclk_khz);
dcn314_disable_otg_wa(clk_mgr_base, false); dcn314_disable_otg_wa(clk_mgr_base, context, false);
update_dispclk = true; update_dispclk = true;
} }
......
...@@ -46,6 +46,9 @@ ...@@ -46,6 +46,9 @@
#define TO_CLK_MGR_DCN315(clk_mgr)\ #define TO_CLK_MGR_DCN315(clk_mgr)\
container_of(clk_mgr, struct clk_mgr_dcn315, base) container_of(clk_mgr, struct clk_mgr_dcn315, base)
#define UNSUPPORTED_DCFCLK 10000000
#define MIN_DPP_DISP_CLK 100000
static int dcn315_get_active_display_cnt_wa( static int dcn315_get_active_display_cnt_wa(
struct dc *dc, struct dc *dc,
struct dc_state *context) struct dc_state *context)
...@@ -79,7 +82,7 @@ static int dcn315_get_active_display_cnt_wa( ...@@ -79,7 +82,7 @@ static int dcn315_get_active_display_cnt_wa(
return display_count; return display_count;
} }
static void dcn315_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable) static void dcn315_disable_otg_wa(struct clk_mgr *clk_mgr_base, struct dc_state *context, bool disable)
{ {
struct dc *dc = clk_mgr_base->ctx->dc; struct dc *dc = clk_mgr_base->ctx->dc;
int i; int i;
...@@ -91,9 +94,10 @@ static void dcn315_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable) ...@@ -91,9 +94,10 @@ static void dcn315_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable)
continue; continue;
if (pipe->stream && (pipe->stream->dpms_off || pipe->plane_state == NULL || if (pipe->stream && (pipe->stream->dpms_off || pipe->plane_state == NULL ||
dc_is_virtual_signal(pipe->stream->signal))) { dc_is_virtual_signal(pipe->stream->signal))) {
if (disable) if (disable) {
pipe->stream_res.tg->funcs->immediate_disable_crtc(pipe->stream_res.tg); pipe->stream_res.tg->funcs->immediate_disable_crtc(pipe->stream_res.tg);
else reset_sync_context_for_pipe(dc, context, i);
} else
pipe->stream_res.tg->funcs->enable_crtc(pipe->stream_res.tg); pipe->stream_res.tg->funcs->enable_crtc(pipe->stream_res.tg);
} }
} }
...@@ -146,6 +150,9 @@ static void dcn315_update_clocks(struct clk_mgr *clk_mgr_base, ...@@ -146,6 +150,9 @@ static void dcn315_update_clocks(struct clk_mgr *clk_mgr_base,
} }
} }
/* Lock pstate by requesting unsupported dcfclk if change is unsupported */
if (!new_clocks->p_state_change_support)
new_clocks->dcfclk_khz = UNSUPPORTED_DCFCLK;
if (should_set_clock(safe_to_lower, new_clocks->dcfclk_khz, clk_mgr_base->clks.dcfclk_khz)) { if (should_set_clock(safe_to_lower, new_clocks->dcfclk_khz, clk_mgr_base->clks.dcfclk_khz)) {
clk_mgr_base->clks.dcfclk_khz = new_clocks->dcfclk_khz; clk_mgr_base->clks.dcfclk_khz = new_clocks->dcfclk_khz;
dcn315_smu_set_hard_min_dcfclk(clk_mgr, clk_mgr_base->clks.dcfclk_khz); dcn315_smu_set_hard_min_dcfclk(clk_mgr, clk_mgr_base->clks.dcfclk_khz);
...@@ -159,10 +166,10 @@ static void dcn315_update_clocks(struct clk_mgr *clk_mgr_base, ...@@ -159,10 +166,10 @@ static void dcn315_update_clocks(struct clk_mgr *clk_mgr_base,
// workaround: Limit dppclk to 100Mhz to avoid lower eDP panel switch to plus 4K monitor underflow. // workaround: Limit dppclk to 100Mhz to avoid lower eDP panel switch to plus 4K monitor underflow.
if (!IS_DIAG_DC(dc->ctx->dce_environment)) { if (!IS_DIAG_DC(dc->ctx->dce_environment)) {
if (new_clocks->dppclk_khz < 100000) if (new_clocks->dppclk_khz < MIN_DPP_DISP_CLK)
new_clocks->dppclk_khz = 100000; new_clocks->dppclk_khz = MIN_DPP_DISP_CLK;
if (new_clocks->dispclk_khz < 100000) if (new_clocks->dispclk_khz < MIN_DPP_DISP_CLK)
new_clocks->dispclk_khz = 100000; new_clocks->dispclk_khz = MIN_DPP_DISP_CLK;
} }
if (should_set_clock(safe_to_lower, new_clocks->dppclk_khz, clk_mgr->base.clks.dppclk_khz)) { if (should_set_clock(safe_to_lower, new_clocks->dppclk_khz, clk_mgr->base.clks.dppclk_khz)) {
...@@ -175,12 +182,12 @@ static void dcn315_update_clocks(struct clk_mgr *clk_mgr_base, ...@@ -175,12 +182,12 @@ static void dcn315_update_clocks(struct clk_mgr *clk_mgr_base,
if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) { if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) {
/* No need to apply the w/a if we haven't taken over from bios yet */ /* No need to apply the w/a if we haven't taken over from bios yet */
if (clk_mgr_base->clks.dispclk_khz) if (clk_mgr_base->clks.dispclk_khz)
dcn315_disable_otg_wa(clk_mgr_base, true); dcn315_disable_otg_wa(clk_mgr_base, context, true);
clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz; clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz;
dcn315_smu_set_dispclk(clk_mgr, clk_mgr_base->clks.dispclk_khz); dcn315_smu_set_dispclk(clk_mgr, clk_mgr_base->clks.dispclk_khz);
if (clk_mgr_base->clks.dispclk_khz) if (clk_mgr_base->clks.dispclk_khz)
dcn315_disable_otg_wa(clk_mgr_base, false); dcn315_disable_otg_wa(clk_mgr_base, context, false);
update_dispclk = true; update_dispclk = true;
} }
...@@ -275,7 +282,7 @@ static struct wm_table ddr5_wm_table = { ...@@ -275,7 +282,7 @@ static struct wm_table ddr5_wm_table = {
{ {
.wm_inst = WM_A, .wm_inst = WM_A,
.wm_type = WM_TYPE_PSTATE_CHG, .wm_type = WM_TYPE_PSTATE_CHG,
.pstate_latency_us = 64.0, .pstate_latency_us = 129.0,
.sr_exit_time_us = 11.5, .sr_exit_time_us = 11.5,
.sr_enter_plus_exit_time_us = 14.5, .sr_enter_plus_exit_time_us = 14.5,
.valid = true, .valid = true,
...@@ -283,7 +290,7 @@ static struct wm_table ddr5_wm_table = { ...@@ -283,7 +290,7 @@ static struct wm_table ddr5_wm_table = {
{ {
.wm_inst = WM_B, .wm_inst = WM_B,
.wm_type = WM_TYPE_PSTATE_CHG, .wm_type = WM_TYPE_PSTATE_CHG,
.pstate_latency_us = 64.0, .pstate_latency_us = 129.0,
.sr_exit_time_us = 11.5, .sr_exit_time_us = 11.5,
.sr_enter_plus_exit_time_us = 14.5, .sr_enter_plus_exit_time_us = 14.5,
.valid = true, .valid = true,
...@@ -291,7 +298,7 @@ static struct wm_table ddr5_wm_table = { ...@@ -291,7 +298,7 @@ static struct wm_table ddr5_wm_table = {
{ {
.wm_inst = WM_C, .wm_inst = WM_C,
.wm_type = WM_TYPE_PSTATE_CHG, .wm_type = WM_TYPE_PSTATE_CHG,
.pstate_latency_us = 64.0, .pstate_latency_us = 129.0,
.sr_exit_time_us = 11.5, .sr_exit_time_us = 11.5,
.sr_enter_plus_exit_time_us = 14.5, .sr_enter_plus_exit_time_us = 14.5,
.valid = true, .valid = true,
...@@ -299,7 +306,7 @@ static struct wm_table ddr5_wm_table = { ...@@ -299,7 +306,7 @@ static struct wm_table ddr5_wm_table = {
{ {
.wm_inst = WM_D, .wm_inst = WM_D,
.wm_type = WM_TYPE_PSTATE_CHG, .wm_type = WM_TYPE_PSTATE_CHG,
.pstate_latency_us = 64.0, .pstate_latency_us = 129.0,
.sr_exit_time_us = 11.5, .sr_exit_time_us = 11.5,
.sr_enter_plus_exit_time_us = 14.5, .sr_enter_plus_exit_time_us = 14.5,
.valid = true, .valid = true,
...@@ -556,8 +563,7 @@ static void dcn315_clk_mgr_helper_populate_bw_params( ...@@ -556,8 +563,7 @@ static void dcn315_clk_mgr_helper_populate_bw_params(
ASSERT(bw_params->clk_table.entries[i-1].dcfclk_mhz); ASSERT(bw_params->clk_table.entries[i-1].dcfclk_mhz);
bw_params->vram_type = bios_info->memory_type; bw_params->vram_type = bios_info->memory_type;
bw_params->num_channels = bios_info->ma_channel_number; bw_params->num_channels = bios_info->ma_channel_number;
if (!bw_params->num_channels) bw_params->dram_channel_width_bytes = bios_info->memory_type == 0x22 ? 8 : 4;
bw_params->num_channels = 2;
for (i = 0; i < WM_SET_COUNT; i++) { for (i = 0; i < WM_SET_COUNT; i++) {
bw_params->wm_table.entries[i].wm_inst = i; bw_params->wm_table.entries[i].wm_inst = i;
......
...@@ -112,7 +112,7 @@ static int dcn316_get_active_display_cnt_wa( ...@@ -112,7 +112,7 @@ static int dcn316_get_active_display_cnt_wa(
return display_count; return display_count;
} }
static void dcn316_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable) static void dcn316_disable_otg_wa(struct clk_mgr *clk_mgr_base, struct dc_state *context, bool disable)
{ {
struct dc *dc = clk_mgr_base->ctx->dc; struct dc *dc = clk_mgr_base->ctx->dc;
int i; int i;
...@@ -124,9 +124,10 @@ static void dcn316_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable) ...@@ -124,9 +124,10 @@ static void dcn316_disable_otg_wa(struct clk_mgr *clk_mgr_base, bool disable)
continue; continue;
if (pipe->stream && (pipe->stream->dpms_off || pipe->plane_state == NULL || if (pipe->stream && (pipe->stream->dpms_off || pipe->plane_state == NULL ||
dc_is_virtual_signal(pipe->stream->signal))) { dc_is_virtual_signal(pipe->stream->signal))) {
if (disable) if (disable) {
pipe->stream_res.tg->funcs->immediate_disable_crtc(pipe->stream_res.tg); pipe->stream_res.tg->funcs->immediate_disable_crtc(pipe->stream_res.tg);
else reset_sync_context_for_pipe(dc, context, i);
} else
pipe->stream_res.tg->funcs->enable_crtc(pipe->stream_res.tg); pipe->stream_res.tg->funcs->enable_crtc(pipe->stream_res.tg);
} }
} }
...@@ -221,11 +222,11 @@ static void dcn316_update_clocks(struct clk_mgr *clk_mgr_base, ...@@ -221,11 +222,11 @@ static void dcn316_update_clocks(struct clk_mgr *clk_mgr_base,
} }
if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) { if (should_set_clock(safe_to_lower, new_clocks->dispclk_khz, clk_mgr_base->clks.dispclk_khz)) {
dcn316_disable_otg_wa(clk_mgr_base, true); dcn316_disable_otg_wa(clk_mgr_base, context, true);
clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz; clk_mgr_base->clks.dispclk_khz = new_clocks->dispclk_khz;
dcn316_smu_set_dispclk(clk_mgr, clk_mgr_base->clks.dispclk_khz); dcn316_smu_set_dispclk(clk_mgr, clk_mgr_base->clks.dispclk_khz);
dcn316_disable_otg_wa(clk_mgr_base, false); dcn316_disable_otg_wa(clk_mgr_base, context, false);
update_dispclk = true; update_dispclk = true;
} }
......
...@@ -2758,8 +2758,14 @@ bool perform_link_training_with_retries( ...@@ -2758,8 +2758,14 @@ bool perform_link_training_with_retries(
skip_video_pattern); skip_video_pattern);
/* Transmit idle pattern once training successful. */ /* Transmit idle pattern once training successful. */
if (status == LINK_TRAINING_SUCCESS && !is_link_bw_low) if (status == LINK_TRAINING_SUCCESS && !is_link_bw_low) {
dp_set_hw_test_pattern(link, &pipe_ctx->link_res, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0); dp_set_hw_test_pattern(link, &pipe_ctx->link_res, DP_TEST_PATTERN_VIDEO_MODE, NULL, 0);
/* Update verified link settings to current one
* Because DPIA LT might fallback to lower link setting.
*/
link->verified_link_cap.link_rate = link->cur_link_settings.link_rate;
link->verified_link_cap.lane_count = link->cur_link_settings.lane_count;
}
} else { } else {
status = dc_link_dp_perform_link_training(link, status = dc_link_dp_perform_link_training(link,
&pipe_ctx->link_res, &pipe_ctx->link_res,
...@@ -5121,6 +5127,14 @@ bool dp_retrieve_lttpr_cap(struct dc_link *link) ...@@ -5121,6 +5127,14 @@ bool dp_retrieve_lttpr_cap(struct dc_link *link)
lttpr_dpcd_data[DP_PHY_REPEATER_128B132B_RATES - lttpr_dpcd_data[DP_PHY_REPEATER_128B132B_RATES -
DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV]; DP_LT_TUNABLE_PHY_REPEATER_FIELD_DATA_STRUCTURE_REV];
/* If this chip cap is set, at least one retimer must exist in the chain
* Override count to 1 if we receive a known bad count (0 or an invalid value) */
if (link->chip_caps & EXT_DISPLAY_PATH_CAPS__DP_FIXED_VS_EN &&
(dp_convert_to_count(link->dpcd_caps.lttpr_caps.phy_repeater_cnt) == 0)) {
ASSERT(0);
link->dpcd_caps.lttpr_caps.phy_repeater_cnt = 0x80;
}
/* Attempt to train in LTTPR transparent mode if repeater count exceeds 8. */ /* Attempt to train in LTTPR transparent mode if repeater count exceeds 8. */
is_lttpr_present = (link->dpcd_caps.lttpr_caps.max_lane_count > 0 && is_lttpr_present = (link->dpcd_caps.lttpr_caps.max_lane_count > 0 &&
link->dpcd_caps.lttpr_caps.max_lane_count <= 4 && link->dpcd_caps.lttpr_caps.max_lane_count <= 4 &&
......
...@@ -3584,6 +3584,23 @@ void check_syncd_pipes_for_disabled_master_pipe(struct dc *dc, ...@@ -3584,6 +3584,23 @@ void check_syncd_pipes_for_disabled_master_pipe(struct dc *dc,
} }
} }
void reset_sync_context_for_pipe(const struct dc *dc,
struct dc_state *context,
uint8_t pipe_idx)
{
int i;
struct pipe_ctx *pipe_ctx_reset;
/* reset the otg sync context for the pipe and its slave pipes if any */
for (i = 0; i < dc->res_pool->pipe_count; i++) {
pipe_ctx_reset = &context->res_ctx.pipe_ctx[i];
if (((GET_PIPE_SYNCD_FROM_PIPE(pipe_ctx_reset) == pipe_idx) &&
IS_PIPE_SYNCD_VALID(pipe_ctx_reset)) || (i == pipe_idx))
SET_PIPE_SYNCD_TO_PIPE(pipe_ctx_reset, i);
}
}
uint8_t resource_transmitter_to_phy_idx(const struct dc *dc, enum transmitter transmitter) uint8_t resource_transmitter_to_phy_idx(const struct dc *dc, enum transmitter transmitter)
{ {
/* TODO - get transmitter to phy idx mapping from DMUB */ /* TODO - get transmitter to phy idx mapping from DMUB */
......
...@@ -2164,7 +2164,8 @@ static void dce110_setup_audio_dto( ...@@ -2164,7 +2164,8 @@ static void dce110_setup_audio_dto(
continue; continue;
if (pipe_ctx->stream->signal != SIGNAL_TYPE_HDMI_TYPE_A) if (pipe_ctx->stream->signal != SIGNAL_TYPE_HDMI_TYPE_A)
continue; continue;
if (pipe_ctx->stream_res.audio != NULL) { if (pipe_ctx->stream_res.audio != NULL &&
pipe_ctx->stream_res.audio->enabled == false) {
struct audio_output audio_output; struct audio_output audio_output;
build_audio_output(context, pipe_ctx, &audio_output); build_audio_output(context, pipe_ctx, &audio_output);
...@@ -2204,7 +2205,8 @@ static void dce110_setup_audio_dto( ...@@ -2204,7 +2205,8 @@ static void dce110_setup_audio_dto(
if (!dc_is_dp_signal(pipe_ctx->stream->signal)) if (!dc_is_dp_signal(pipe_ctx->stream->signal))
continue; continue;
if (pipe_ctx->stream_res.audio != NULL) { if (pipe_ctx->stream_res.audio != NULL &&
pipe_ctx->stream_res.audio->enabled == false) {
struct audio_output audio_output; struct audio_output audio_output;
build_audio_output(context, pipe_ctx, &audio_output); build_audio_output(context, pipe_ctx, &audio_output);
......
...@@ -445,226 +445,6 @@ ...@@ -445,226 +445,6 @@
type DSCRM_DSC_FORWARD_EN; \ type DSCRM_DSC_FORWARD_EN; \
type DSCRM_DSC_OPP_PIPE_SOURCE type DSCRM_DSC_OPP_PIPE_SOURCE
#define DSC_REG_LIST_DCN314(id) \
SRI(DSC_TOP_CONTROL, DSC_TOP, id),\
SRI(DSC_DEBUG_CONTROL, DSC_TOP, id),\
SRI(DSCC_CONFIG0, DSCC, id),\
SRI(DSCC_CONFIG1, DSCC, id),\
SRI(DSCC_STATUS, DSCC, id),\
SRI(DSCC_INTERRUPT_CONTROL_STATUS, DSCC, id),\
SRI(DSCC_PPS_CONFIG0, DSCC, id),\
SRI(DSCC_PPS_CONFIG1, DSCC, id),\
SRI(DSCC_PPS_CONFIG2, DSCC, id),\
SRI(DSCC_PPS_CONFIG3, DSCC, id),\
SRI(DSCC_PPS_CONFIG4, DSCC, id),\
SRI(DSCC_PPS_CONFIG5, DSCC, id),\
SRI(DSCC_PPS_CONFIG6, DSCC, id),\
SRI(DSCC_PPS_CONFIG7, DSCC, id),\
SRI(DSCC_PPS_CONFIG8, DSCC, id),\
SRI(DSCC_PPS_CONFIG9, DSCC, id),\
SRI(DSCC_PPS_CONFIG10, DSCC, id),\
SRI(DSCC_PPS_CONFIG11, DSCC, id),\
SRI(DSCC_PPS_CONFIG12, DSCC, id),\
SRI(DSCC_PPS_CONFIG13, DSCC, id),\
SRI(DSCC_PPS_CONFIG14, DSCC, id),\
SRI(DSCC_PPS_CONFIG15, DSCC, id),\
SRI(DSCC_PPS_CONFIG16, DSCC, id),\
SRI(DSCC_PPS_CONFIG17, DSCC, id),\
SRI(DSCC_PPS_CONFIG18, DSCC, id),\
SRI(DSCC_PPS_CONFIG19, DSCC, id),\
SRI(DSCC_PPS_CONFIG20, DSCC, id),\
SRI(DSCC_PPS_CONFIG21, DSCC, id),\
SRI(DSCC_PPS_CONFIG22, DSCC, id),\
SRI(DSCC_MEM_POWER_CONTROL, DSCC, id),\
SRI(DSCC_R_Y_SQUARED_ERROR_LOWER, DSCC, id),\
SRI(DSCC_R_Y_SQUARED_ERROR_UPPER, DSCC, id),\
SRI(DSCC_G_CB_SQUARED_ERROR_LOWER, DSCC, id),\
SRI(DSCC_G_CB_SQUARED_ERROR_UPPER, DSCC, id),\
SRI(DSCC_B_CR_SQUARED_ERROR_LOWER, DSCC, id),\
SRI(DSCC_B_CR_SQUARED_ERROR_UPPER, DSCC, id),\
SRI(DSCC_MAX_ABS_ERROR0, DSCC, id),\
SRI(DSCC_MAX_ABS_ERROR1, DSCC, id),\
SRI(DSCC_RATE_BUFFER0_MAX_FULLNESS_LEVEL, DSCC, id),\
SRI(DSCC_RATE_BUFFER1_MAX_FULLNESS_LEVEL, DSCC, id),\
SRI(DSCC_RATE_BUFFER2_MAX_FULLNESS_LEVEL, DSCC, id),\
SRI(DSCC_RATE_BUFFER3_MAX_FULLNESS_LEVEL, DSCC, id),\
SRI(DSCC_RATE_CONTROL_BUFFER0_MAX_FULLNESS_LEVEL, DSCC, id),\
SRI(DSCC_RATE_CONTROL_BUFFER1_MAX_FULLNESS_LEVEL, DSCC, id),\
SRI(DSCC_RATE_CONTROL_BUFFER2_MAX_FULLNESS_LEVEL, DSCC, id),\
SRI(DSCC_RATE_CONTROL_BUFFER3_MAX_FULLNESS_LEVEL, DSCC, id),\
SRI(DSCCIF_CONFIG0, DSCCIF, id),\
SRI(DSCCIF_CONFIG1, DSCCIF, id),\
SRI(DSCRM_DSC_FORWARD_CONFIG, DSCRM, id)
#define DSC_REG_LIST_SH_MASK_DCN314(mask_sh)\
DSC_SF(DSC_TOP0_DSC_TOP_CONTROL, DSC_CLOCK_EN, mask_sh), \
DSC_SF(DSC_TOP0_DSC_TOP_CONTROL, DSC_DISPCLK_R_GATE_DIS, mask_sh), \
DSC_SF(DSC_TOP0_DSC_TOP_CONTROL, DSC_DSCCLK_R_GATE_DIS, mask_sh), \
DSC_SF(DSC_TOP0_DSC_DEBUG_CONTROL, DSC_DBG_EN, mask_sh), \
DSC_SF(DSC_TOP0_DSC_DEBUG_CONTROL, DSC_TEST_CLOCK_MUX_SEL, mask_sh), \
DSC_SF(DSCC0_DSCC_CONFIG0, NUMBER_OF_SLICES_PER_LINE, mask_sh), \
DSC_SF(DSCC0_DSCC_CONFIG0, ALTERNATE_ICH_ENCODING_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_CONFIG0, NUMBER_OF_SLICES_IN_VERTICAL_DIRECTION, mask_sh), \
DSC_SF(DSCC0_DSCC_CONFIG1, DSCC_RATE_CONTROL_BUFFER_MODEL_SIZE, mask_sh), \
/*DSC_SF(DSCC0_DSCC_CONFIG1, DSCC_DISABLE_ICH, mask_sh),*/ \
DSC_SF(DSCC0_DSCC_STATUS, DSCC_DOUBLE_BUFFER_REG_UPDATE_PENDING, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER0_OVERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER1_OVERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER2_OVERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER3_OVERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER0_UNDERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER1_UNDERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER2_UNDERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER3_UNDERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_CONTROL_BUFFER_MODEL0_OVERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_CONTROL_BUFFER_MODEL1_OVERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_CONTROL_BUFFER_MODEL2_OVERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_CONTROL_BUFFER_MODEL3_OVERFLOW_OCCURRED, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER0_OVERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER1_OVERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER2_OVERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER3_OVERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER0_UNDERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER1_UNDERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER2_UNDERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_BUFFER3_UNDERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_CONTROL_BUFFER_MODEL0_OVERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_CONTROL_BUFFER_MODEL1_OVERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_CONTROL_BUFFER_MODEL2_OVERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_INTERRUPT_CONTROL_STATUS, DSCC_RATE_CONTROL_BUFFER_MODEL3_OVERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG0, DSC_VERSION_MINOR, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG0, DSC_VERSION_MAJOR, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG0, PPS_IDENTIFIER, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG0, LINEBUF_DEPTH, mask_sh), \
DSC2_SF(DSCC0, DSCC_PPS_CONFIG0__BITS_PER_COMPONENT, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG1, BITS_PER_PIXEL, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG1, VBR_ENABLE, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG1, SIMPLE_422, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG1, CONVERT_RGB, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG1, BLOCK_PRED_ENABLE, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG1, NATIVE_422, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG1, NATIVE_420, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG1, CHUNK_SIZE, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG2, PIC_WIDTH, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG2, PIC_HEIGHT, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG3, SLICE_WIDTH, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG3, SLICE_HEIGHT, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG4, INITIAL_XMIT_DELAY, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG4, INITIAL_DEC_DELAY, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG5, INITIAL_SCALE_VALUE, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG5, SCALE_INCREMENT_INTERVAL, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG6, SCALE_DECREMENT_INTERVAL, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG6, FIRST_LINE_BPG_OFFSET, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG6, SECOND_LINE_BPG_OFFSET, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG7, NFL_BPG_OFFSET, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG7, SLICE_BPG_OFFSET, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG8, NSL_BPG_OFFSET, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG8, SECOND_LINE_OFFSET_ADJ, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG9, INITIAL_OFFSET, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG9, FINAL_OFFSET, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG10, FLATNESS_MIN_QP, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG10, FLATNESS_MAX_QP, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG10, RC_MODEL_SIZE, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG11, RC_EDGE_FACTOR, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG11, RC_QUANT_INCR_LIMIT0, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG11, RC_QUANT_INCR_LIMIT1, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG11, RC_TGT_OFFSET_LO, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG11, RC_TGT_OFFSET_HI, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG12, RC_BUF_THRESH0, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG12, RC_BUF_THRESH1, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG12, RC_BUF_THRESH2, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG12, RC_BUF_THRESH3, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG13, RC_BUF_THRESH4, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG13, RC_BUF_THRESH5, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG13, RC_BUF_THRESH6, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG13, RC_BUF_THRESH7, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG14, RC_BUF_THRESH8, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG14, RC_BUF_THRESH9, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG14, RC_BUF_THRESH10, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG14, RC_BUF_THRESH11, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG15, RC_BUF_THRESH12, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG15, RC_BUF_THRESH13, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG15, RANGE_MIN_QP0, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG15, RANGE_MAX_QP0, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG15, RANGE_BPG_OFFSET0, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG16, RANGE_MIN_QP1, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG16, RANGE_MAX_QP1, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG16, RANGE_BPG_OFFSET1, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG16, RANGE_MIN_QP2, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG16, RANGE_MAX_QP2, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG16, RANGE_BPG_OFFSET2, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG17, RANGE_MIN_QP3, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG17, RANGE_MAX_QP3, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG17, RANGE_BPG_OFFSET3, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG17, RANGE_MIN_QP4, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG17, RANGE_MAX_QP4, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG17, RANGE_BPG_OFFSET4, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG18, RANGE_MIN_QP5, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG18, RANGE_MAX_QP5, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG18, RANGE_BPG_OFFSET5, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG18, RANGE_MIN_QP6, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG18, RANGE_MAX_QP6, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG18, RANGE_BPG_OFFSET6, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG19, RANGE_MIN_QP7, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG19, RANGE_MAX_QP7, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG19, RANGE_BPG_OFFSET7, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG19, RANGE_MIN_QP8, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG19, RANGE_MAX_QP8, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG19, RANGE_BPG_OFFSET8, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG20, RANGE_MIN_QP9, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG20, RANGE_MAX_QP9, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG20, RANGE_BPG_OFFSET9, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG20, RANGE_MIN_QP10, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG20, RANGE_MAX_QP10, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG20, RANGE_BPG_OFFSET10, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG21, RANGE_MIN_QP11, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG21, RANGE_MAX_QP11, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG21, RANGE_BPG_OFFSET11, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG21, RANGE_MIN_QP12, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG21, RANGE_MAX_QP12, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG21, RANGE_BPG_OFFSET12, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG22, RANGE_MIN_QP13, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG22, RANGE_MAX_QP13, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG22, RANGE_BPG_OFFSET13, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG22, RANGE_MIN_QP14, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG22, RANGE_MAX_QP14, mask_sh), \
DSC_SF(DSCC0_DSCC_PPS_CONFIG22, RANGE_BPG_OFFSET14, mask_sh), \
DSC_SF(DSCC0_DSCC_MEM_POWER_CONTROL, DSCC_DEFAULT_MEM_LOW_POWER_STATE, mask_sh), \
DSC_SF(DSCC0_DSCC_MEM_POWER_CONTROL, DSCC_MEM_PWR_FORCE, mask_sh), \
DSC_SF(DSCC0_DSCC_MEM_POWER_CONTROL, DSCC_MEM_PWR_DIS, mask_sh), \
DSC_SF(DSCC0_DSCC_MEM_POWER_CONTROL, DSCC_MEM_PWR_STATE, mask_sh), \
DSC_SF(DSCC0_DSCC_MEM_POWER_CONTROL, DSCC_NATIVE_422_MEM_PWR_FORCE, mask_sh), \
DSC_SF(DSCC0_DSCC_MEM_POWER_CONTROL, DSCC_NATIVE_422_MEM_PWR_DIS, mask_sh), \
DSC_SF(DSCC0_DSCC_MEM_POWER_CONTROL, DSCC_NATIVE_422_MEM_PWR_STATE, mask_sh), \
DSC_SF(DSCC0_DSCC_R_Y_SQUARED_ERROR_LOWER, DSCC_R_Y_SQUARED_ERROR_LOWER, mask_sh), \
DSC_SF(DSCC0_DSCC_R_Y_SQUARED_ERROR_UPPER, DSCC_R_Y_SQUARED_ERROR_UPPER, mask_sh), \
DSC_SF(DSCC0_DSCC_G_CB_SQUARED_ERROR_LOWER, DSCC_G_CB_SQUARED_ERROR_LOWER, mask_sh), \
DSC_SF(DSCC0_DSCC_G_CB_SQUARED_ERROR_UPPER, DSCC_G_CB_SQUARED_ERROR_UPPER, mask_sh), \
DSC_SF(DSCC0_DSCC_B_CR_SQUARED_ERROR_LOWER, DSCC_B_CR_SQUARED_ERROR_LOWER, mask_sh), \
DSC_SF(DSCC0_DSCC_B_CR_SQUARED_ERROR_UPPER, DSCC_B_CR_SQUARED_ERROR_UPPER, mask_sh), \
DSC_SF(DSCC0_DSCC_MAX_ABS_ERROR0, DSCC_R_Y_MAX_ABS_ERROR, mask_sh), \
DSC_SF(DSCC0_DSCC_MAX_ABS_ERROR0, DSCC_G_CB_MAX_ABS_ERROR, mask_sh), \
DSC_SF(DSCC0_DSCC_MAX_ABS_ERROR1, DSCC_B_CR_MAX_ABS_ERROR, mask_sh), \
DSC_SF(DSCC0_DSCC_RATE_BUFFER0_MAX_FULLNESS_LEVEL, DSCC_RATE_BUFFER0_MAX_FULLNESS_LEVEL, mask_sh), \
DSC_SF(DSCC0_DSCC_RATE_BUFFER1_MAX_FULLNESS_LEVEL, DSCC_RATE_BUFFER1_MAX_FULLNESS_LEVEL, mask_sh), \
DSC_SF(DSCC0_DSCC_RATE_BUFFER2_MAX_FULLNESS_LEVEL, DSCC_RATE_BUFFER2_MAX_FULLNESS_LEVEL, mask_sh), \
DSC_SF(DSCC0_DSCC_RATE_BUFFER3_MAX_FULLNESS_LEVEL, DSCC_RATE_BUFFER3_MAX_FULLNESS_LEVEL, mask_sh), \
DSC_SF(DSCC0_DSCC_RATE_CONTROL_BUFFER0_MAX_FULLNESS_LEVEL, DSCC_RATE_CONTROL_BUFFER0_MAX_FULLNESS_LEVEL, mask_sh), \
DSC_SF(DSCC0_DSCC_RATE_CONTROL_BUFFER1_MAX_FULLNESS_LEVEL, DSCC_RATE_CONTROL_BUFFER1_MAX_FULLNESS_LEVEL, mask_sh), \
DSC_SF(DSCC0_DSCC_RATE_CONTROL_BUFFER2_MAX_FULLNESS_LEVEL, DSCC_RATE_CONTROL_BUFFER2_MAX_FULLNESS_LEVEL, mask_sh), \
DSC_SF(DSCC0_DSCC_RATE_CONTROL_BUFFER3_MAX_FULLNESS_LEVEL, DSCC_RATE_CONTROL_BUFFER3_MAX_FULLNESS_LEVEL, mask_sh), \
DSC_SF(DSCCIF0_DSCCIF_CONFIG0, INPUT_INTERFACE_UNDERFLOW_RECOVERY_EN, mask_sh), \
DSC_SF(DSCCIF0_DSCCIF_CONFIG0, INPUT_INTERFACE_UNDERFLOW_OCCURRED_INT_EN, mask_sh), \
DSC_SF(DSCCIF0_DSCCIF_CONFIG0, INPUT_INTERFACE_UNDERFLOW_OCCURRED_STATUS, mask_sh), \
DSC_SF(DSCCIF0_DSCCIF_CONFIG0, INPUT_PIXEL_FORMAT, mask_sh), \
DSC2_SF(DSCCIF0, DSCCIF_CONFIG0__BITS_PER_COMPONENT, mask_sh), \
DSC_SF(DSCCIF0_DSCCIF_CONFIG0, DOUBLE_BUFFER_REG_UPDATE_PENDING, mask_sh), \
DSC_SF(DSCCIF0_DSCCIF_CONFIG1, PIC_WIDTH, mask_sh), \
DSC_SF(DSCCIF0_DSCCIF_CONFIG1, PIC_HEIGHT, mask_sh), \
DSC_SF(DSCRM0_DSCRM_DSC_FORWARD_CONFIG, DSCRM_DSC_FORWARD_EN, mask_sh), \
DSC_SF(DSCRM0_DSCRM_DSC_FORWARD_CONFIG, DSCRM_DSC_OPP_PIPE_SOURCE, mask_sh)
struct dcn20_dsc_registers { struct dcn20_dsc_registers {
uint32_t DSC_TOP_CONTROL; uint32_t DSC_TOP_CONTROL;
uint32_t DSC_DEBUG_CONTROL; uint32_t DSC_DEBUG_CONTROL;
......
...@@ -1565,6 +1565,7 @@ static void dcn20_update_dchubp_dpp( ...@@ -1565,6 +1565,7 @@ static void dcn20_update_dchubp_dpp(
/* Any updates are handled in dc interface, just need /* Any updates are handled in dc interface, just need
* to apply existing for plane enable / opp change */ * to apply existing for plane enable / opp change */
if (pipe_ctx->update_flags.bits.enable || pipe_ctx->update_flags.bits.opp_changed if (pipe_ctx->update_flags.bits.enable || pipe_ctx->update_flags.bits.opp_changed
|| pipe_ctx->update_flags.bits.plane_changed
|| pipe_ctx->stream->update_flags.bits.gamut_remap || pipe_ctx->stream->update_flags.bits.gamut_remap
|| pipe_ctx->stream->update_flags.bits.out_csc) { || pipe_ctx->stream->update_flags.bits.out_csc) {
/* dpp/cm gamut remap*/ /* dpp/cm gamut remap*/
......
...@@ -343,7 +343,6 @@ unsigned int dcn314_calculate_dccg_k1_k2_values(struct pipe_ctx *pipe_ctx, unsig ...@@ -343,7 +343,6 @@ unsigned int dcn314_calculate_dccg_k1_k2_values(struct pipe_ctx *pipe_ctx, unsig
{ {
struct dc_stream_state *stream = pipe_ctx->stream; struct dc_stream_state *stream = pipe_ctx->stream;
unsigned int odm_combine_factor = 0; unsigned int odm_combine_factor = 0;
struct dc *dc = pipe_ctx->stream->ctx->dc;
bool two_pix_per_container = false; bool two_pix_per_container = false;
two_pix_per_container = optc2_is_two_pixels_per_containter(&stream->timing); two_pix_per_container = optc2_is_two_pixels_per_containter(&stream->timing);
...@@ -364,7 +363,7 @@ unsigned int dcn314_calculate_dccg_k1_k2_values(struct pipe_ctx *pipe_ctx, unsig ...@@ -364,7 +363,7 @@ unsigned int dcn314_calculate_dccg_k1_k2_values(struct pipe_ctx *pipe_ctx, unsig
} else { } else {
*k1_div = PIXEL_RATE_DIV_BY_1; *k1_div = PIXEL_RATE_DIV_BY_1;
*k2_div = PIXEL_RATE_DIV_BY_4; *k2_div = PIXEL_RATE_DIV_BY_4;
if ((odm_combine_factor == 2) || dc->debug.enable_dp_dig_pixel_rate_div_policy) if (odm_combine_factor == 2)
*k2_div = PIXEL_RATE_DIV_BY_2; *k2_div = PIXEL_RATE_DIV_BY_2;
} }
} }
...@@ -384,21 +383,10 @@ void dcn314_set_pixels_per_cycle(struct pipe_ctx *pipe_ctx) ...@@ -384,21 +383,10 @@ void dcn314_set_pixels_per_cycle(struct pipe_ctx *pipe_ctx)
return; return;
odm_combine_factor = get_odm_config(pipe_ctx, NULL); odm_combine_factor = get_odm_config(pipe_ctx, NULL);
if (optc2_is_two_pixels_per_containter(&pipe_ctx->stream->timing) || odm_combine_factor > 1 if (optc2_is_two_pixels_per_containter(&pipe_ctx->stream->timing) || odm_combine_factor > 1)
|| dcn314_is_dp_dig_pixel_rate_div_policy(pipe_ctx))
pix_per_cycle = 2; pix_per_cycle = 2;
if (pipe_ctx->stream_res.stream_enc->funcs->set_input_mode) if (pipe_ctx->stream_res.stream_enc->funcs->set_input_mode)
pipe_ctx->stream_res.stream_enc->funcs->set_input_mode(pipe_ctx->stream_res.stream_enc, pipe_ctx->stream_res.stream_enc->funcs->set_input_mode(pipe_ctx->stream_res.stream_enc,
pix_per_cycle); pix_per_cycle);
} }
bool dcn314_is_dp_dig_pixel_rate_div_policy(struct pipe_ctx *pipe_ctx)
{
struct dc *dc = pipe_ctx->stream->ctx->dc;
if (dc_is_dp_signal(pipe_ctx->stream->signal) && !is_dp_128b_132b_signal(pipe_ctx) &&
dc->debug.enable_dp_dig_pixel_rate_div_policy)
return true;
return false;
}
...@@ -41,6 +41,4 @@ unsigned int dcn314_calculate_dccg_k1_k2_values(struct pipe_ctx *pipe_ctx, unsig ...@@ -41,6 +41,4 @@ unsigned int dcn314_calculate_dccg_k1_k2_values(struct pipe_ctx *pipe_ctx, unsig
void dcn314_set_pixels_per_cycle(struct pipe_ctx *pipe_ctx); void dcn314_set_pixels_per_cycle(struct pipe_ctx *pipe_ctx);
bool dcn314_is_dp_dig_pixel_rate_div_policy(struct pipe_ctx *pipe_ctx);
#endif /* __DC_HWSS_DCN314_H__ */ #endif /* __DC_HWSS_DCN314_H__ */
...@@ -146,7 +146,6 @@ static const struct hwseq_private_funcs dcn314_private_funcs = { ...@@ -146,7 +146,6 @@ static const struct hwseq_private_funcs dcn314_private_funcs = {
.setup_hpo_hw_control = dcn31_setup_hpo_hw_control, .setup_hpo_hw_control = dcn31_setup_hpo_hw_control,
.calculate_dccg_k1_k2_values = dcn314_calculate_dccg_k1_k2_values, .calculate_dccg_k1_k2_values = dcn314_calculate_dccg_k1_k2_values,
.set_pixels_per_cycle = dcn314_set_pixels_per_cycle, .set_pixels_per_cycle = dcn314_set_pixels_per_cycle,
.is_dp_dig_pixel_rate_div_policy = dcn314_is_dp_dig_pixel_rate_div_policy,
}; };
void dcn314_hw_sequencer_construct(struct dc *dc) void dcn314_hw_sequencer_construct(struct dc *dc)
......
...@@ -87,6 +87,9 @@ ...@@ -87,6 +87,9 @@
#define DCHUBBUB_DEBUG_CTRL_0__DET_DEPTH__SHIFT 0x10 #define DCHUBBUB_DEBUG_CTRL_0__DET_DEPTH__SHIFT 0x10
#define DCHUBBUB_DEBUG_CTRL_0__DET_DEPTH_MASK 0x01FF0000L #define DCHUBBUB_DEBUG_CTRL_0__DET_DEPTH_MASK 0x01FF0000L
#define DSCC0_DSCC_CONFIG0__ICH_RESET_AT_END_OF_LINE__SHIFT 0x0
#define DSCC0_DSCC_CONFIG0__ICH_RESET_AT_END_OF_LINE_MASK 0x0000000FL
#include "reg_helper.h" #include "reg_helper.h"
#include "dce/dmub_abm.h" #include "dce/dmub_abm.h"
#include "dce/dmub_psr.h" #include "dce/dmub_psr.h"
...@@ -579,7 +582,7 @@ static const struct dcn30_mmhubbub_mask mcif_wb30_mask = { ...@@ -579,7 +582,7 @@ static const struct dcn30_mmhubbub_mask mcif_wb30_mask = {
#define dsc_regsDCN314(id)\ #define dsc_regsDCN314(id)\
[id] = {\ [id] = {\
DSC_REG_LIST_DCN314(id)\ DSC_REG_LIST_DCN20(id)\
} }
static const struct dcn20_dsc_registers dsc_regs[] = { static const struct dcn20_dsc_registers dsc_regs[] = {
...@@ -590,11 +593,11 @@ static const struct dcn20_dsc_registers dsc_regs[] = { ...@@ -590,11 +593,11 @@ static const struct dcn20_dsc_registers dsc_regs[] = {
}; };
static const struct dcn20_dsc_shift dsc_shift = { static const struct dcn20_dsc_shift dsc_shift = {
DSC_REG_LIST_SH_MASK_DCN314(__SHIFT) DSC_REG_LIST_SH_MASK_DCN20(__SHIFT)
}; };
static const struct dcn20_dsc_mask dsc_mask = { static const struct dcn20_dsc_mask dsc_mask = {
DSC_REG_LIST_SH_MASK_DCN314(_MASK) DSC_REG_LIST_SH_MASK_DCN20(_MASK)
}; };
static const struct dcn30_mpc_registers mpc_regs = { static const struct dcn30_mpc_registers mpc_regs = {
...@@ -844,7 +847,7 @@ static const struct resource_caps res_cap_dcn314 = { ...@@ -844,7 +847,7 @@ static const struct resource_caps res_cap_dcn314 = {
.num_ddc = 5, .num_ddc = 5,
.num_vmid = 16, .num_vmid = 16,
.num_mpc_3dlut = 2, .num_mpc_3dlut = 2,
.num_dsc = 4, .num_dsc = 3,
}; };
static const struct dc_plane_cap plane_cap = { static const struct dc_plane_cap plane_cap = {
......
...@@ -291,6 +291,7 @@ static struct _vcs_dpi_soc_bounding_box_st dcn3_15_soc = { ...@@ -291,6 +291,7 @@ static struct _vcs_dpi_soc_bounding_box_st dcn3_15_soc = {
.do_urgent_latency_adjustment = false, .do_urgent_latency_adjustment = false,
.urgent_latency_adjustment_fabric_clock_component_us = 0, .urgent_latency_adjustment_fabric_clock_component_us = 0,
.urgent_latency_adjustment_fabric_clock_reference_mhz = 0, .urgent_latency_adjustment_fabric_clock_reference_mhz = 0,
.num_chans = 4,
}; };
struct _vcs_dpi_ip_params_st dcn3_16_ip = { struct _vcs_dpi_ip_params_st dcn3_16_ip = {
...@@ -680,7 +681,11 @@ void dcn315_update_bw_bounding_box(struct dc *dc, struct clk_bw_params *bw_param ...@@ -680,7 +681,11 @@ void dcn315_update_bw_bounding_box(struct dc *dc, struct clk_bw_params *bw_param
dcn3_15_ip.max_num_otg = dc->res_pool->res_cap->num_timing_generator; dcn3_15_ip.max_num_otg = dc->res_pool->res_cap->num_timing_generator;
dcn3_15_ip.max_num_dpp = dc->res_pool->pipe_count; dcn3_15_ip.max_num_dpp = dc->res_pool->pipe_count;
dcn3_15_soc.num_chans = bw_params->num_channels;
if (bw_params->num_channels > 0)
dcn3_15_soc.num_chans = bw_params->num_channels;
if (bw_params->dram_channel_width_bytes > 0)
dcn3_15_soc.dram_channel_width_bytes = bw_params->dram_channel_width_bytes;
ASSERT(clk_table->num_entries); ASSERT(clk_table->num_entries);
......
...@@ -243,6 +243,50 @@ void dcn32_build_wm_range_table_fpu(struct clk_mgr_internal *clk_mgr) ...@@ -243,6 +243,50 @@ void dcn32_build_wm_range_table_fpu(struct clk_mgr_internal *clk_mgr)
clk_mgr->base.bw_params->wm_table.nv_entries[WM_D].pmfw_breakdown.max_uclk = 0xFFFF; clk_mgr->base.bw_params->wm_table.nv_entries[WM_D].pmfw_breakdown.max_uclk = 0xFFFF;
} }
/**
* Finds dummy_latency_index when MCLK switching using firmware based
* vblank stretch is enabled. This function will iterate through the
* table of dummy pstate latencies until the lowest value that allows
* dm_allow_self_refresh_and_mclk_switch to happen is found
*/
int dcn32_find_dummy_latency_index_for_fw_based_mclk_switch(struct dc *dc,
struct dc_state *context,
display_e2e_pipe_params_st *pipes,
int pipe_cnt,
int vlevel)
{
const int max_latency_table_entries = 4;
const struct vba_vars_st *vba = &context->bw_ctx.dml.vba;
int dummy_latency_index = 0;
dc_assert_fp_enabled();
while (dummy_latency_index < max_latency_table_entries) {
context->bw_ctx.dml.soc.dram_clock_change_latency_us =
dc->clk_mgr->bw_params->dummy_pstate_table[dummy_latency_index].dummy_pstate_latency_us;
dcn32_internal_validate_bw(dc, context, pipes, &pipe_cnt, &vlevel, false);
if (vlevel < context->bw_ctx.dml.vba.soc.num_states &&
vba->DRAMClockChangeSupport[vlevel][vba->maxMpcComb] != dm_dram_clock_change_unsupported)
break;
dummy_latency_index++;
}
if (dummy_latency_index == max_latency_table_entries) {
ASSERT(dummy_latency_index != max_latency_table_entries);
/* If the execution gets here, it means dummy p_states are
* not possible. This should never happen and would mean
* something is severely wrong.
* Here we reset dummy_latency_index to 3, because it is
* better to have underflows than system crashes.
*/
dummy_latency_index = max_latency_table_entries - 1;
}
return dummy_latency_index;
}
/** /**
* dcn32_helper_populate_phantom_dlg_params - Get DLG params for phantom pipes * dcn32_helper_populate_phantom_dlg_params - Get DLG params for phantom pipes
* and populate pipe_ctx with those params. * and populate pipe_ctx with those params.
...@@ -1646,7 +1690,7 @@ void dcn32_calculate_wm_and_dlg_fpu(struct dc *dc, struct dc_state *context, ...@@ -1646,7 +1690,7 @@ void dcn32_calculate_wm_and_dlg_fpu(struct dc *dc, struct dc_state *context,
dcn30_can_support_mclk_switch_using_fw_based_vblank_stretch(dc, context); dcn30_can_support_mclk_switch_using_fw_based_vblank_stretch(dc, context);
if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching) { if (context->bw_ctx.bw.dcn.clk.fw_based_mclk_switching) {
dummy_latency_index = dcn30_find_dummy_latency_index_for_fw_based_mclk_switch(dc, dummy_latency_index = dcn32_find_dummy_latency_index_for_fw_based_mclk_switch(dc,
context, pipes, pipe_cnt, vlevel); context, pipes, pipe_cnt, vlevel);
/* After calling dcn30_find_dummy_latency_index_for_fw_based_mclk_switch /* After calling dcn30_find_dummy_latency_index_for_fw_based_mclk_switch
......
...@@ -71,4 +71,10 @@ void dcn32_calculate_wm_and_dlg_fpu(struct dc *dc, struct dc_state *context, ...@@ -71,4 +71,10 @@ void dcn32_calculate_wm_and_dlg_fpu(struct dc *dc, struct dc_state *context,
void dcn32_update_bw_bounding_box_fpu(struct dc *dc, struct clk_bw_params *bw_params); void dcn32_update_bw_bounding_box_fpu(struct dc *dc, struct clk_bw_params *bw_params);
int dcn32_find_dummy_latency_index_for_fw_based_mclk_switch(struct dc *dc,
struct dc_state *context,
display_e2e_pipe_params_st *pipes,
int pipe_cnt,
int vlevel);
#endif #endif
...@@ -1992,6 +1992,7 @@ void dml32_ModeSupportAndSystemConfigurationFull(struct display_mode_lib *mode_l ...@@ -1992,6 +1992,7 @@ void dml32_ModeSupportAndSystemConfigurationFull(struct display_mode_lib *mode_l
dml32_CalculateODMMode( dml32_CalculateODMMode(
mode_lib->vba.MaximumPixelsPerLinePerDSCUnit, mode_lib->vba.MaximumPixelsPerLinePerDSCUnit,
mode_lib->vba.HActive[k], mode_lib->vba.HActive[k],
mode_lib->vba.OutputFormat[k],
mode_lib->vba.Output[k], mode_lib->vba.Output[k],
mode_lib->vba.ODMUse[k], mode_lib->vba.ODMUse[k],
mode_lib->vba.MaxDispclk[i], mode_lib->vba.MaxDispclk[i],
...@@ -2013,6 +2014,7 @@ void dml32_ModeSupportAndSystemConfigurationFull(struct display_mode_lib *mode_l ...@@ -2013,6 +2014,7 @@ void dml32_ModeSupportAndSystemConfigurationFull(struct display_mode_lib *mode_l
dml32_CalculateODMMode( dml32_CalculateODMMode(
mode_lib->vba.MaximumPixelsPerLinePerDSCUnit, mode_lib->vba.MaximumPixelsPerLinePerDSCUnit,
mode_lib->vba.HActive[k], mode_lib->vba.HActive[k],
mode_lib->vba.OutputFormat[k],
mode_lib->vba.Output[k], mode_lib->vba.Output[k],
mode_lib->vba.ODMUse[k], mode_lib->vba.ODMUse[k],
mode_lib->vba.MaxDispclk[i], mode_lib->vba.MaxDispclk[i],
......
...@@ -27,6 +27,8 @@ ...@@ -27,6 +27,8 @@
#include "display_mode_vba_32.h" #include "display_mode_vba_32.h"
#include "../display_mode_lib.h" #include "../display_mode_lib.h"
#define DCN32_MAX_FMT_420_BUFFER_WIDTH 4096
unsigned int dml32_dscceComputeDelay( unsigned int dml32_dscceComputeDelay(
unsigned int bpc, unsigned int bpc,
double BPP, double BPP,
...@@ -1182,6 +1184,7 @@ void dml32_CalculateDETBufferSize( ...@@ -1182,6 +1184,7 @@ void dml32_CalculateDETBufferSize(
void dml32_CalculateODMMode( void dml32_CalculateODMMode(
unsigned int MaximumPixelsPerLinePerDSCUnit, unsigned int MaximumPixelsPerLinePerDSCUnit,
unsigned int HActive, unsigned int HActive,
enum output_format_class OutFormat,
enum output_encoder_class Output, enum output_encoder_class Output,
enum odm_combine_policy ODMUse, enum odm_combine_policy ODMUse,
double StateDispclk, double StateDispclk,
...@@ -1253,6 +1256,29 @@ void dml32_CalculateODMMode( ...@@ -1253,6 +1256,29 @@ void dml32_CalculateODMMode(
else else
*TotalAvailablePipesSupport = false; *TotalAvailablePipesSupport = false;
} }
if (OutFormat == dm_420 && HActive > DCN32_MAX_FMT_420_BUFFER_WIDTH &&
ODMUse != dm_odm_combine_policy_4to1) {
if (HActive > DCN32_MAX_FMT_420_BUFFER_WIDTH * 4) {
*ODMMode = dm_odm_combine_mode_disabled;
*NumberOfDPP = 0;
*TotalAvailablePipesSupport = false;
} else if (HActive > DCN32_MAX_FMT_420_BUFFER_WIDTH * 2 ||
*ODMMode == dm_odm_combine_mode_4to1) {
*ODMMode = dm_odm_combine_mode_4to1;
*RequiredDISPCLKPerSurface = SurfaceRequiredDISPCLKWithODMCombineFourToOne;
*NumberOfDPP = 4;
} else {
*ODMMode = dm_odm_combine_mode_2to1;
*RequiredDISPCLKPerSurface = SurfaceRequiredDISPCLKWithODMCombineTwoToOne;
*NumberOfDPP = 2;
}
}
if (Output == dm_hdmi && OutFormat == dm_420 &&
HActive > DCN32_MAX_FMT_420_BUFFER_WIDTH) {
*ODMMode = dm_odm_combine_mode_disabled;
*NumberOfDPP = 0;
*TotalAvailablePipesSupport = false;
}
} }
double dml32_CalculateRequiredDispclk( double dml32_CalculateRequiredDispclk(
......
...@@ -216,6 +216,7 @@ void dml32_CalculateDETBufferSize( ...@@ -216,6 +216,7 @@ void dml32_CalculateDETBufferSize(
void dml32_CalculateODMMode( void dml32_CalculateODMMode(
unsigned int MaximumPixelsPerLinePerDSCUnit, unsigned int MaximumPixelsPerLinePerDSCUnit,
unsigned int HActive, unsigned int HActive,
enum output_format_class OutFormat,
enum output_encoder_class Output, enum output_encoder_class Output,
enum odm_combine_policy ODMUse, enum odm_combine_policy ODMUse,
double StateDispclk, double StateDispclk,
......
...@@ -219,6 +219,10 @@ void check_syncd_pipes_for_disabled_master_pipe(struct dc *dc, ...@@ -219,6 +219,10 @@ void check_syncd_pipes_for_disabled_master_pipe(struct dc *dc,
struct dc_state *context, struct dc_state *context,
uint8_t disabled_master_pipe_idx); uint8_t disabled_master_pipe_idx);
void reset_sync_context_for_pipe(const struct dc *dc,
struct dc_state *context,
uint8_t pipe_idx);
uint8_t resource_transmitter_to_phy_idx(const struct dc *dc, enum transmitter transmitter); uint8_t resource_transmitter_to_phy_idx(const struct dc *dc, enum transmitter transmitter);
const struct link_hwss *get_link_hwss(const struct dc_link *link, const struct link_hwss *get_link_hwss(const struct dc_link *link,
......
...@@ -209,7 +209,8 @@ int smu_v13_0_init_pptable_microcode(struct smu_context *smu) ...@@ -209,7 +209,8 @@ int smu_v13_0_init_pptable_microcode(struct smu_context *smu)
if (!adev->scpm_enabled) if (!adev->scpm_enabled)
return 0; return 0;
if (adev->ip_versions[MP1_HWIP][0] == IP_VERSION(13, 0, 7)) if ((adev->ip_versions[MP1_HWIP][0] == IP_VERSION(13, 0, 7)) ||
(adev->ip_versions[MP1_HWIP][0] == IP_VERSION(13, 0, 0)))
return 0; return 0;
/* override pptable_id from driver parameter */ /* override pptable_id from driver parameter */
...@@ -218,27 +219,6 @@ int smu_v13_0_init_pptable_microcode(struct smu_context *smu) ...@@ -218,27 +219,6 @@ int smu_v13_0_init_pptable_microcode(struct smu_context *smu)
dev_info(adev->dev, "override pptable id %d\n", pptable_id); dev_info(adev->dev, "override pptable id %d\n", pptable_id);
} else { } else {
pptable_id = smu->smu_table.boot_values.pp_table_id; pptable_id = smu->smu_table.boot_values.pp_table_id;
/*
* Temporary solution for SMU V13.0.0 with SCPM enabled:
* - use vbios carried pptable when pptable_id is 3664, 3715 or 3795
* - use 36831 soft pptable when pptable_id is 3683
*/
if (adev->ip_versions[MP1_HWIP][0] == IP_VERSION(13, 0, 0)) {
switch (pptable_id) {
case 3664:
case 3715:
case 3795:
pptable_id = 0;
break;
case 3683:
pptable_id = 36831;
break;
default:
dev_err(adev->dev, "Unsupported pptable id %d\n", pptable_id);
return -EINVAL;
}
}
} }
/* "pptable_id == 0" means vbios carries the pptable. */ /* "pptable_id == 0" means vbios carries the pptable. */
...@@ -471,26 +451,6 @@ int smu_v13_0_setup_pptable(struct smu_context *smu) ...@@ -471,26 +451,6 @@ int smu_v13_0_setup_pptable(struct smu_context *smu)
} else { } else {
pptable_id = smu->smu_table.boot_values.pp_table_id; pptable_id = smu->smu_table.boot_values.pp_table_id;
/*
* Temporary solution for SMU V13.0.0 with SCPM disabled:
* - use 3664, 3683 or 3715 on request
* - use 3664 when pptable_id is 0
* TODO: drop these when the pptable carried in vbios is ready.
*/
if (adev->ip_versions[MP1_HWIP][0] == IP_VERSION(13, 0, 0)) {
switch (pptable_id) {
case 0:
pptable_id = 3664;
break;
case 3664:
case 3683:
case 3715:
break;
default:
dev_err(adev->dev, "Unsupported pptable id %d\n", pptable_id);
return -EINVAL;
}
}
} }
/* force using vbios pptable in sriov mode */ /* force using vbios pptable in sriov mode */
......
...@@ -410,58 +410,11 @@ static int smu_v13_0_0_setup_pptable(struct smu_context *smu) ...@@ -410,58 +410,11 @@ static int smu_v13_0_0_setup_pptable(struct smu_context *smu)
{ {
struct smu_table_context *smu_table = &smu->smu_table; struct smu_table_context *smu_table = &smu->smu_table;
struct amdgpu_device *adev = smu->adev; struct amdgpu_device *adev = smu->adev;
uint32_t pptable_id;
int ret = 0; int ret = 0;
/* ret = smu_v13_0_0_get_pptable_from_pmfw(smu,
* With SCPM enabled, the pptable used will be signed. It cannot &smu_table->power_play_table,
* be used directly by driver. To get the raw pptable, we need to &smu_table->power_play_table_size);
* rely on the combo pptable(and its revelant SMU message).
*/
if (adev->scpm_enabled) {
ret = smu_v13_0_0_get_pptable_from_pmfw(smu,
&smu_table->power_play_table,
&smu_table->power_play_table_size);
} else {
/* override pptable_id from driver parameter */
if (amdgpu_smu_pptable_id >= 0) {
pptable_id = amdgpu_smu_pptable_id;
dev_info(adev->dev, "override pptable id %d\n", pptable_id);
} else {
pptable_id = smu_table->boot_values.pp_table_id;
}
/*
* Temporary solution for SMU V13.0.0 with SCPM disabled:
* - use vbios carried pptable when pptable_id is 3664, 3715 or 3795
* - use soft pptable when pptable_id is 3683
*/
if (adev->ip_versions[MP1_HWIP][0] == IP_VERSION(13, 0, 0)) {
switch (pptable_id) {
case 3664:
case 3715:
case 3795:
pptable_id = 0;
break;
case 3683:
break;
default:
dev_err(adev->dev, "Unsupported pptable id %d\n", pptable_id);
return -EINVAL;
}
}
/* force using vbios pptable in sriov mode */
if ((amdgpu_sriov_vf(adev) || !pptable_id) && (amdgpu_emu_mode != 1))
ret = smu_v13_0_0_get_pptable_from_pmfw(smu,
&smu_table->power_play_table,
&smu_table->power_play_table_size);
else
ret = smu_v13_0_get_pptable_from_firmware(smu,
&smu_table->power_play_table,
&smu_table->power_play_table_size,
pptable_id);
}
if (ret) if (ret)
return ret; return ret;
......
Markdown is supported
0% .
You are about to add 0 people to the discussion. Proceed with caution.
先完成此消息的编辑!
想要评论请 注册