#define IS_SKL_ULX(dev) (INTEL_DEVID(dev) == 0x190E || \
INTEL_DEVID(dev) == 0x1915 || \
INTEL_DEVID(dev) == 0x191E)
+#define IS_KBL_ULT(dev) (INTEL_DEVID(dev) == 0x5906 || \
+ INTEL_DEVID(dev) == 0x5913 || \
+ INTEL_DEVID(dev) == 0x5916 || \
+ INTEL_DEVID(dev) == 0x5921 || \
+ INTEL_DEVID(dev) == 0x5926)
+#define IS_KBL_ULX(dev) (INTEL_DEVID(dev) == 0x590E || \
+ INTEL_DEVID(dev) == 0x5915 || \
+ INTEL_DEVID(dev) == 0x591E)
#define IS_SKL_GT3(dev) (IS_SKYLAKE(dev) && \
(INTEL_DEVID(dev) & 0x00F0) == 0x0020)
#define IS_SKL_GT4(dev) (IS_SKYLAKE(dev) && \
{
const struct ddi_buf_trans *ddi_translations;
- if (IS_SKL_ULX(dev)) {
+ if (IS_SKL_ULX(dev) || IS_KBL_ULX(dev)) {
ddi_translations = skl_y_ddi_translations_dp;
*n_entries = ARRAY_SIZE(skl_y_ddi_translations_dp);
- } else if (IS_SKL_ULT(dev)) {
+ } else if (IS_SKL_ULT(dev) || IS_KBL_ULT(dev)) {
ddi_translations = skl_u_ddi_translations_dp;
*n_entries = ARRAY_SIZE(skl_u_ddi_translations_dp);
} else {
struct drm_i915_private *dev_priv = dev->dev_private;
const struct ddi_buf_trans *ddi_translations;
- if (IS_SKL_ULX(dev)) {
+ if (IS_SKL_ULX(dev) || IS_KBL_ULX(dev)) {
if (dev_priv->edp_low_vswing) {
ddi_translations = skl_y_ddi_translations_edp;
*n_entries = ARRAY_SIZE(skl_y_ddi_translations_edp);
ddi_translations = skl_y_ddi_translations_dp;
*n_entries = ARRAY_SIZE(skl_y_ddi_translations_dp);
}
- } else if (IS_SKL_ULT(dev)) {
+ } else if (IS_SKL_ULT(dev) || IS_KBL_ULT(dev)) {
if (dev_priv->edp_low_vswing) {
ddi_translations = skl_u_ddi_translations_edp;
*n_entries = ARRAY_SIZE(skl_u_ddi_translations_edp);
{
const struct ddi_buf_trans *ddi_translations;
- if (IS_SKL_ULX(dev)) {
+ if (IS_SKL_ULX(dev) || IS_KBL_ULX(dev)) {
ddi_translations = skl_y_ddi_translations_hdmi;
*n_entries = ARRAY_SIZE(skl_y_ddi_translations_hdmi);
} else {