From: Eric Huang Date: Mon, 23 Nov 2015 16:20:36 +0000 (-0500) Subject: drm/amd/amdgpu: add uvd6.0 clock gating support. (v2) X-Git-Url: https://git.stricted.de/?a=commitdiff_plain;h=9b08a306476d25c9f5721eccbc43e90bb23c5f58;p=GitHub%2FLineageOS%2Fandroid_kernel_motorola_exynos9610.git drm/amd/amdgpu: add uvd6.0 clock gating support. (v2) v2: fix bug in register mask setting. Reviewed-by: Alex Deucher Reviewed-by: Christian König Acked-by: Jammy Zhou Signed-off-by: Eric Huang --- diff --git a/drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c b/drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c index 121915bbc3b6..3d5913926436 100644 --- a/drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c +++ b/drivers/gpu/drm/amd/amdgpu/uvd_v6_0.c @@ -279,6 +279,234 @@ static void uvd_v6_0_mc_resume(struct amdgpu_device *adev) WREG32(mmUVD_VCPU_CACHE_SIZE2, size); } +static void cz_set_uvd_clock_gating_branches(struct amdgpu_device *adev, + bool enable) +{ + u32 data, data1; + + data = RREG32(mmUVD_CGC_GATE); + data1 = RREG32(mmUVD_SUVD_CGC_GATE); + if (enable) { + data |= UVD_CGC_GATE__SYS_MASK | + UVD_CGC_GATE__UDEC_MASK | + UVD_CGC_GATE__MPEG2_MASK | + UVD_CGC_GATE__RBC_MASK | + UVD_CGC_GATE__LMI_MC_MASK | + UVD_CGC_GATE__IDCT_MASK | + UVD_CGC_GATE__MPRD_MASK | + UVD_CGC_GATE__MPC_MASK | + UVD_CGC_GATE__LBSI_MASK | + UVD_CGC_GATE__LRBBM_MASK | + UVD_CGC_GATE__UDEC_RE_MASK | + UVD_CGC_GATE__UDEC_CM_MASK | + UVD_CGC_GATE__UDEC_IT_MASK | + UVD_CGC_GATE__UDEC_DB_MASK | + UVD_CGC_GATE__UDEC_MP_MASK | + UVD_CGC_GATE__WCB_MASK | + UVD_CGC_GATE__VCPU_MASK | + UVD_CGC_GATE__SCPU_MASK; + data1 |= UVD_SUVD_CGC_GATE__SRE_MASK | + UVD_SUVD_CGC_GATE__SIT_MASK | + UVD_SUVD_CGC_GATE__SMP_MASK | + UVD_SUVD_CGC_GATE__SCM_MASK | + UVD_SUVD_CGC_GATE__SDB_MASK | + UVD_SUVD_CGC_GATE__SRE_H264_MASK | + UVD_SUVD_CGC_GATE__SRE_HEVC_MASK | + UVD_SUVD_CGC_GATE__SIT_H264_MASK | + UVD_SUVD_CGC_GATE__SIT_HEVC_MASK | + UVD_SUVD_CGC_GATE__SCM_H264_MASK | + UVD_SUVD_CGC_GATE__SCM_HEVC_MASK | + UVD_SUVD_CGC_GATE__SDB_H264_MASK | + UVD_SUVD_CGC_GATE__SDB_HEVC_MASK; + } else { + data &= ~(UVD_CGC_GATE__SYS_MASK | + UVD_CGC_GATE__UDEC_MASK | + UVD_CGC_GATE__MPEG2_MASK | + UVD_CGC_GATE__RBC_MASK | + UVD_CGC_GATE__LMI_MC_MASK | + UVD_CGC_GATE__LMI_UMC_MASK | + UVD_CGC_GATE__IDCT_MASK | + UVD_CGC_GATE__MPRD_MASK | + UVD_CGC_GATE__MPC_MASK | + UVD_CGC_GATE__LBSI_MASK | + UVD_CGC_GATE__LRBBM_MASK | + UVD_CGC_GATE__UDEC_RE_MASK | + UVD_CGC_GATE__UDEC_CM_MASK | + UVD_CGC_GATE__UDEC_IT_MASK | + UVD_CGC_GATE__UDEC_DB_MASK | + UVD_CGC_GATE__UDEC_MP_MASK | + UVD_CGC_GATE__WCB_MASK | + UVD_CGC_GATE__VCPU_MASK | + UVD_CGC_GATE__SCPU_MASK); + data1 &= ~(UVD_SUVD_CGC_GATE__SRE_MASK | + UVD_SUVD_CGC_GATE__SIT_MASK | + UVD_SUVD_CGC_GATE__SMP_MASK | + UVD_SUVD_CGC_GATE__SCM_MASK | + UVD_SUVD_CGC_GATE__SDB_MASK | + UVD_SUVD_CGC_GATE__SRE_H264_MASK | + UVD_SUVD_CGC_GATE__SRE_HEVC_MASK | + UVD_SUVD_CGC_GATE__SIT_H264_MASK | + UVD_SUVD_CGC_GATE__SIT_HEVC_MASK | + UVD_SUVD_CGC_GATE__SCM_H264_MASK | + UVD_SUVD_CGC_GATE__SCM_HEVC_MASK | + UVD_SUVD_CGC_GATE__SDB_H264_MASK | + UVD_SUVD_CGC_GATE__SDB_HEVC_MASK); + } + WREG32(mmUVD_CGC_GATE, data); + WREG32(mmUVD_SUVD_CGC_GATE, data1); +} + +static void tonga_set_uvd_clock_gating_branches(struct amdgpu_device *adev, + bool enable) +{ + u32 data, data1; + + data = RREG32(mmUVD_CGC_GATE); + data1 = RREG32(mmUVD_SUVD_CGC_GATE); + if (enable) { + data |= UVD_CGC_GATE__SYS_MASK | + UVD_CGC_GATE__UDEC_MASK | + UVD_CGC_GATE__MPEG2_MASK | + UVD_CGC_GATE__RBC_MASK | + UVD_CGC_GATE__LMI_MC_MASK | + UVD_CGC_GATE__IDCT_MASK | + UVD_CGC_GATE__MPRD_MASK | + UVD_CGC_GATE__MPC_MASK | + UVD_CGC_GATE__LBSI_MASK | + UVD_CGC_GATE__LRBBM_MASK | + UVD_CGC_GATE__UDEC_RE_MASK | + UVD_CGC_GATE__UDEC_CM_MASK | + UVD_CGC_GATE__UDEC_IT_MASK | + UVD_CGC_GATE__UDEC_DB_MASK | + UVD_CGC_GATE__UDEC_MP_MASK | + UVD_CGC_GATE__WCB_MASK | + UVD_CGC_GATE__VCPU_MASK | + UVD_CGC_GATE__SCPU_MASK; + data1 |= UVD_SUVD_CGC_GATE__SRE_MASK | + UVD_SUVD_CGC_GATE__SIT_MASK | + UVD_SUVD_CGC_GATE__SMP_MASK | + UVD_SUVD_CGC_GATE__SCM_MASK | + UVD_SUVD_CGC_GATE__SDB_MASK; + } else { + data &= ~(UVD_CGC_GATE__SYS_MASK | + UVD_CGC_GATE__UDEC_MASK | + UVD_CGC_GATE__MPEG2_MASK | + UVD_CGC_GATE__RBC_MASK | + UVD_CGC_GATE__LMI_MC_MASK | + UVD_CGC_GATE__LMI_UMC_MASK | + UVD_CGC_GATE__IDCT_MASK | + UVD_CGC_GATE__MPRD_MASK | + UVD_CGC_GATE__MPC_MASK | + UVD_CGC_GATE__LBSI_MASK | + UVD_CGC_GATE__LRBBM_MASK | + UVD_CGC_GATE__UDEC_RE_MASK | + UVD_CGC_GATE__UDEC_CM_MASK | + UVD_CGC_GATE__UDEC_IT_MASK | + UVD_CGC_GATE__UDEC_DB_MASK | + UVD_CGC_GATE__UDEC_MP_MASK | + UVD_CGC_GATE__WCB_MASK | + UVD_CGC_GATE__VCPU_MASK | + UVD_CGC_GATE__SCPU_MASK); + data1 &= ~(UVD_SUVD_CGC_GATE__SRE_MASK | + UVD_SUVD_CGC_GATE__SIT_MASK | + UVD_SUVD_CGC_GATE__SMP_MASK | + UVD_SUVD_CGC_GATE__SCM_MASK | + UVD_SUVD_CGC_GATE__SDB_MASK); + } + WREG32(mmUVD_CGC_GATE, data); + WREG32(mmUVD_SUVD_CGC_GATE, data1); +} + +static void uvd_v6_0_set_uvd_dynamic_clock_mode(struct amdgpu_device *adev, + bool swmode) +{ + u32 data, data1 = 0, data2; + + /* Always un-gate UVD REGS bit */ + data = RREG32(mmUVD_CGC_GATE); + data &= ~(UVD_CGC_GATE__REGS_MASK); + WREG32(mmUVD_CGC_GATE, data); + + data = RREG32(mmUVD_CGC_CTRL); + data &= ~(UVD_CGC_CTRL__CLK_OFF_DELAY_MASK | + UVD_CGC_CTRL__CLK_GATE_DLY_TIMER_MASK); + data |= UVD_CGC_CTRL__DYN_CLOCK_MODE_MASK | + 1 << REG_FIELD_SHIFT(UVD_CGC_CTRL, CLK_GATE_DLY_TIMER) | + 4 << REG_FIELD_SHIFT(UVD_CGC_CTRL, CLK_OFF_DELAY); + + data2 = RREG32(mmUVD_SUVD_CGC_CTRL); + if (swmode) { + data &= ~(UVD_CGC_CTRL__UDEC_RE_MODE_MASK | + UVD_CGC_CTRL__UDEC_CM_MODE_MASK | + UVD_CGC_CTRL__UDEC_IT_MODE_MASK | + UVD_CGC_CTRL__UDEC_DB_MODE_MASK | + UVD_CGC_CTRL__UDEC_MP_MODE_MASK | + UVD_CGC_CTRL__SYS_MODE_MASK | + UVD_CGC_CTRL__UDEC_MODE_MASK | + UVD_CGC_CTRL__MPEG2_MODE_MASK | + UVD_CGC_CTRL__REGS_MODE_MASK | + UVD_CGC_CTRL__RBC_MODE_MASK | + UVD_CGC_CTRL__LMI_MC_MODE_MASK | + UVD_CGC_CTRL__LMI_UMC_MODE_MASK | + UVD_CGC_CTRL__IDCT_MODE_MASK | + UVD_CGC_CTRL__MPRD_MODE_MASK | + UVD_CGC_CTRL__MPC_MODE_MASK | + UVD_CGC_CTRL__LBSI_MODE_MASK | + UVD_CGC_CTRL__LRBBM_MODE_MASK | + UVD_CGC_CTRL__WCB_MODE_MASK | + UVD_CGC_CTRL__VCPU_MODE_MASK | + UVD_CGC_CTRL__JPEG_MODE_MASK | + UVD_CGC_CTRL__SCPU_MODE_MASK); + data1 |= UVD_CGC_CTRL2__DYN_OCLK_RAMP_EN_MASK | + UVD_CGC_CTRL2__DYN_RCLK_RAMP_EN_MASK; + data1 &= ~UVD_CGC_CTRL2__GATER_DIV_ID_MASK; + data1 |= 7 << REG_FIELD_SHIFT(UVD_CGC_CTRL2, GATER_DIV_ID); + data2 &= ~(UVD_SUVD_CGC_CTRL__SRE_MODE_MASK | + UVD_SUVD_CGC_CTRL__SIT_MODE_MASK | + UVD_SUVD_CGC_CTRL__SMP_MODE_MASK | + UVD_SUVD_CGC_CTRL__SCM_MODE_MASK | + UVD_SUVD_CGC_CTRL__SDB_MODE_MASK); + } else { + data |= UVD_CGC_CTRL__UDEC_RE_MODE_MASK | + UVD_CGC_CTRL__UDEC_CM_MODE_MASK | + UVD_CGC_CTRL__UDEC_IT_MODE_MASK | + UVD_CGC_CTRL__UDEC_DB_MODE_MASK | + UVD_CGC_CTRL__UDEC_MP_MODE_MASK | + UVD_CGC_CTRL__SYS_MODE_MASK | + UVD_CGC_CTRL__UDEC_MODE_MASK | + UVD_CGC_CTRL__MPEG2_MODE_MASK | + UVD_CGC_CTRL__REGS_MODE_MASK | + UVD_CGC_CTRL__RBC_MODE_MASK | + UVD_CGC_CTRL__LMI_MC_MODE_MASK | + UVD_CGC_CTRL__LMI_UMC_MODE_MASK | + UVD_CGC_CTRL__IDCT_MODE_MASK | + UVD_CGC_CTRL__MPRD_MODE_MASK | + UVD_CGC_CTRL__MPC_MODE_MASK | + UVD_CGC_CTRL__LBSI_MODE_MASK | + UVD_CGC_CTRL__LRBBM_MODE_MASK | + UVD_CGC_CTRL__WCB_MODE_MASK | + UVD_CGC_CTRL__VCPU_MODE_MASK | + UVD_CGC_CTRL__SCPU_MODE_MASK; + data2 |= UVD_SUVD_CGC_CTRL__SRE_MODE_MASK | + UVD_SUVD_CGC_CTRL__SIT_MODE_MASK | + UVD_SUVD_CGC_CTRL__SMP_MODE_MASK | + UVD_SUVD_CGC_CTRL__SCM_MODE_MASK | + UVD_SUVD_CGC_CTRL__SDB_MODE_MASK; + } + WREG32(mmUVD_CGC_CTRL, data); + WREG32(mmUVD_SUVD_CGC_CTRL, data2); + + data = RREG32_UVD_CTX(ixUVD_CGC_CTRL2); + data &= ~(REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_OCLK_RAMP_EN) | + REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_RCLK_RAMP_EN) | + REG_FIELD_MASK(UVD_CGC_CTRL2, GATER_DIV_ID)); + data1 &= (REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_OCLK_RAMP_EN) | + REG_FIELD_MASK(UVD_CGC_CTRL2, DYN_RCLK_RAMP_EN) | + REG_FIELD_MASK(UVD_CGC_CTRL2, GATER_DIV_ID)); + data |= data1; + WREG32_UVD_CTX(ixUVD_CGC_CTRL2, data); +} + /** * uvd_v6_0_start - start UVD block * @@ -303,8 +531,19 @@ static int uvd_v6_0_start(struct amdgpu_device *adev) uvd_v6_0_mc_resume(adev); - /* disable clock gating */ - WREG32(mmUVD_CGC_GATE, 0); + /* Set dynamic clock gating in S/W control mode */ + if (adev->cg_flags & AMDGPU_CG_SUPPORT_UVD_MGCG) { + if (adev->flags & AMD_IS_APU) + cz_set_uvd_clock_gating_branches(adev, false); + else + tonga_set_uvd_clock_gating_branches(adev, false); + uvd_v6_0_set_uvd_dynamic_clock_mode(adev, true); + } else { + /* disable clock gating */ + uint32_t data = RREG32(mmUVD_CGC_CTRL); + data &= ~UVD_CGC_CTRL__DYN_CLOCK_MODE_MASK; + WREG32(mmUVD_CGC_CTRL, data); + } /* disable interupt */ WREG32_P(mmUVD_MASTINT_EN, 0, ~(1 << 1)); @@ -758,6 +997,24 @@ static int uvd_v6_0_process_interrupt(struct amdgpu_device *adev, static int uvd_v6_0_set_clockgating_state(void *handle, enum amd_clockgating_state state) { + struct amdgpu_device *adev = (struct amdgpu_device *)handle; + bool enable = (state == AMD_CG_STATE_GATE) ? true : false; + + if (!(adev->cg_flags & AMDGPU_CG_SUPPORT_UVD_MGCG)) + return 0; + + if (enable) { + if (adev->flags & AMD_IS_APU) + cz_set_uvd_clock_gating_branches(adev, enable); + else + tonga_set_uvd_clock_gating_branches(adev, enable); + uvd_v6_0_set_uvd_dynamic_clock_mode(adev, true); + } else { + uint32_t data = RREG32(mmUVD_CGC_CTRL); + data &= ~UVD_CGC_CTRL__DYN_CLOCK_MODE_MASK; + WREG32(mmUVD_CGC_CTRL, data); + } + return 0; }