}
void vlv_wait_port_ready(struct drm_i915_private *dev_priv,
- struct intel_digital_port *dport)
+ struct intel_digital_port *dport,
+ unsigned int expected_mask)
{
u32 port_mask;
int dpll_reg;
case PORT_C:
port_mask = DPLL_PORTC_READY_MASK;
dpll_reg = DPLL(0);
+ expected_mask <<= 4;
break;
case PORT_D:
port_mask = DPLL_PORTD_READY_MASK;
BUG();
}
- if (wait_for((I915_READ(dpll_reg) & port_mask) == 0, 1000))
- WARN(1, "timed out waiting for port %c ready: 0x%08x\n",
- port_name(dport->port), I915_READ(dpll_reg));
+ if (wait_for((I915_READ(dpll_reg) & port_mask) == expected_mask, 1000))
+ WARN(1, "timed out waiting for port %c ready: got 0x%x, expected 0x%x\n",
+ port_name(dport->port), I915_READ(dpll_reg) & port_mask, expected_mask);
}
static void intel_prepare_shared_dpll(struct intel_crtc *crtc)
struct drm_i915_private *dev_priv = dev->dev_private;
struct intel_crtc *crtc = to_intel_crtc(encoder->base.crtc);
uint32_t dp_reg = I915_READ(intel_dp->output_reg);
+ unsigned int lane_mask = 0x0;
if (WARN_ON(dp_reg & DP_PORT_EN))
return;
pps_unlock(intel_dp);
if (IS_VALLEYVIEW(dev))
- vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp));
+ vlv_wait_port_ready(dev_priv, dp_to_dig_port(intel_dp),
+ lane_mask);
intel_dp_sink_dpms(intel_dp, DRM_MODE_DPMS_ON);
intel_dp_start_link_train(intel_dp);
}
int ironlake_get_lanes_required(int target_clock, int link_bw, int bpp);
void vlv_wait_port_ready(struct drm_i915_private *dev_priv,
- struct intel_digital_port *dport);
+ struct intel_digital_port *dport,
+ unsigned int expected_mask);
bool intel_get_load_detect_pipe(struct drm_connector *connector,
struct drm_display_mode *mode,
struct intel_load_detect_pipe *old,
intel_enable_hdmi(encoder);
- vlv_wait_port_ready(dev_priv, dport);
+ vlv_wait_port_ready(dev_priv, dport, 0x0);
}
static void vlv_hdmi_pre_pll_enable(struct intel_encoder *encoder)
intel_enable_hdmi(encoder);
- vlv_wait_port_ready(dev_priv, dport);
+ vlv_wait_port_ready(dev_priv, dport, 0x0);
}
static void intel_hdmi_destroy(struct drm_connector *connector)