int model; /* V4L2_IDENT_MT9M001* codes from v4l2-chip-ident.h */
unsigned int gain;
unsigned int exposure;
+ unsigned short y_skip_top; /* Lines to skip at the top */
unsigned char autoexposure;
};
soc_camera_limit_side(&rect.top, &rect.height,
MT9M001_ROW_SKIP, MT9M001_MIN_HEIGHT, MT9M001_MAX_HEIGHT);
- total_h = rect.height + icd->y_skip_top + vblank;
+ total_h = rect.height + mt9m001->y_skip_top + vblank;
/* Blanking and start values - default... */
ret = reg_write(client, MT9M001_HORIZONTAL_BLANKING, hblank);
ret = reg_write(client, MT9M001_WINDOW_WIDTH, rect.width - 1);
if (!ret)
ret = reg_write(client, MT9M001_WINDOW_HEIGHT,
- rect.height + icd->y_skip_top - 1);
+ rect.height + mt9m001->y_skip_top - 1);
if (!ret && mt9m001->autoexposure) {
ret = reg_write(client, MT9M001_SHUTTER_WIDTH, total_h);
if (!ret) {
static int mt9m001_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
- struct soc_camera_device *icd = client->dev.platform_data;
+ struct mt9m001 *mt9m001 = to_mt9m001(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
v4l_bound_align_image(&pix->width, MT9M001_MIN_WIDTH,
MT9M001_MAX_WIDTH, 1,
- &pix->height, MT9M001_MIN_HEIGHT + icd->y_skip_top,
- MT9M001_MAX_HEIGHT + icd->y_skip_top, 0, 0);
+ &pix->height, MT9M001_MIN_HEIGHT + mt9m001->y_skip_top,
+ MT9M001_MAX_HEIGHT + mt9m001->y_skip_top, 0, 0);
if (pix->pixelformat == V4L2_PIX_FMT_SBGGR8 ||
pix->pixelformat == V4L2_PIX_FMT_SBGGR16)
if (ctrl->value) {
const u16 vblank = 25;
unsigned int total_h = mt9m001->rect.height +
- icd->y_skip_top + vblank;
+ mt9m001->y_skip_top + vblank;
if (reg_write(client, MT9M001_SHUTTER_WIDTH,
total_h) < 0)
return -EIO;
icl->free_bus(icl);
}
+static int mt9m001_g_skip_top_lines(struct v4l2_subdev *sd, u32 *lines)
+{
+ struct i2c_client *client = sd->priv;
+ struct mt9m001 *mt9m001 = to_mt9m001(client);
+
+ *lines = mt9m001->y_skip_top;
+
+ return 0;
+}
+
static struct v4l2_subdev_core_ops mt9m001_subdev_core_ops = {
.g_ctrl = mt9m001_g_ctrl,
.s_ctrl = mt9m001_s_ctrl,
.cropcap = mt9m001_cropcap,
};
+static struct v4l2_subdev_sensor_ops mt9m001_subdev_sensor_ops = {
+ .g_skip_top_lines = mt9m001_g_skip_top_lines,
+};
+
static struct v4l2_subdev_ops mt9m001_subdev_ops = {
.core = &mt9m001_subdev_core_ops,
.video = &mt9m001_subdev_video_ops,
+ .sensor = &mt9m001_subdev_sensor_ops,
};
static int mt9m001_probe(struct i2c_client *client,
/* Second stage probe - when a capture adapter is there */
icd->ops = &mt9m001_ops;
- icd->y_skip_top = 0;
+ mt9m001->y_skip_top = 0;
mt9m001->rect.left = MT9M001_COLUMN_SKIP;
mt9m001->rect.top = MT9M001_ROW_SKIP;
mt9m001->rect.width = MT9M001_MAX_WIDTH;
/* Second stage probe - when a capture adapter is there */
icd->ops = &mt9m111_ops;
- icd->y_skip_top = 0;
mt9m111->rect.left = MT9M111_MIN_DARK_COLS;
mt9m111->rect.top = MT9M111_MIN_DARK_ROWS;
u16 xskip;
u16 yskip;
unsigned int gain;
+ unsigned short y_skip_top; /* Lines to skip at the top */
unsigned int exposure;
unsigned char autoexposure;
};
ret = reg_write(client, MT9T031_WINDOW_WIDTH, rect->width - 1);
if (ret >= 0)
ret = reg_write(client, MT9T031_WINDOW_HEIGHT,
- rect->height + icd->y_skip_top - 1);
+ rect->height + mt9t031->y_skip_top - 1);
if (ret >= 0 && mt9t031->autoexposure) {
- unsigned int total_h = rect->height + icd->y_skip_top + vblank;
+ unsigned int total_h = rect->height + mt9t031->y_skip_top + vblank;
ret = set_shutter(client, total_h);
if (ret >= 0) {
const u32 shutter_max = MT9T031_MAX_HEIGHT + vblank;
const u16 vblank = MT9T031_VERTICAL_BLANK;
const u32 shutter_max = MT9T031_MAX_HEIGHT + vblank;
unsigned int total_h = mt9t031->rect.height +
- icd->y_skip_top + vblank;
+ mt9t031->y_skip_top + vblank;
if (set_shutter(client, total_h) < 0)
return -EIO;
return ret;
}
+static int mt9t031_g_skip_top_lines(struct v4l2_subdev *sd, u32 *lines)
+{
+ struct i2c_client *client = sd->priv;
+ struct mt9t031 *mt9t031 = to_mt9t031(client);
+
+ *lines = mt9t031->y_skip_top;
+
+ return 0;
+}
+
static struct v4l2_subdev_core_ops mt9t031_subdev_core_ops = {
.g_ctrl = mt9t031_g_ctrl,
.s_ctrl = mt9t031_s_ctrl,
.cropcap = mt9t031_cropcap,
};
+static struct v4l2_subdev_sensor_ops mt9t031_subdev_sensor_ops = {
+ .g_skip_top_lines = mt9t031_g_skip_top_lines,
+};
+
static struct v4l2_subdev_ops mt9t031_subdev_ops = {
.core = &mt9t031_subdev_core_ops,
.video = &mt9t031_subdev_video_ops,
+ .sensor = &mt9t031_subdev_sensor_ops,
};
static int mt9t031_probe(struct i2c_client *client,
/* Second stage probe - when a capture adapter is there */
icd->ops = &mt9t031_ops;
- icd->y_skip_top = 0;
+ mt9t031->y_skip_top = 0;
mt9t031->rect.left = MT9T031_COLUMN_SKIP;
mt9t031->rect.top = MT9T031_ROW_SKIP;
mt9t031->rect.width = MT9T031_MAX_WIDTH;
__u32 fourcc;
int model; /* V4L2_IDENT_MT9V022* codes from v4l2-chip-ident.h */
u16 chip_control;
+ unsigned short y_skip_top; /* Lines to skip at the top */
};
static struct mt9v022 *to_mt9v022(const struct i2c_client *client)
struct i2c_client *client = sd->priv;
struct mt9v022 *mt9v022 = to_mt9v022(client);
struct v4l2_rect rect = a->c;
- struct soc_camera_device *icd = client->dev.platform_data;
int ret;
/* Bayer format - even size lengths */
if (ret >= 0) {
if (ret & 1) /* Autoexposure */
ret = reg_write(client, MT9V022_MAX_TOTAL_SHUTTER_WIDTH,
- rect.height + icd->y_skip_top + 43);
+ rect.height + mt9v022->y_skip_top + 43);
else
ret = reg_write(client, MT9V022_TOTAL_SHUTTER_WIDTH,
- rect.height + icd->y_skip_top + 43);
+ rect.height + mt9v022->y_skip_top + 43);
}
/* Setup frame format: defaults apart from width and height */
if (!ret)
ret = reg_write(client, MT9V022_WINDOW_WIDTH, rect.width);
if (!ret)
ret = reg_write(client, MT9V022_WINDOW_HEIGHT,
- rect.height + icd->y_skip_top);
+ rect.height + mt9v022->y_skip_top);
if (ret < 0)
return ret;
static int mt9v022_try_fmt(struct v4l2_subdev *sd, struct v4l2_format *f)
{
struct i2c_client *client = sd->priv;
- struct soc_camera_device *icd = client->dev.platform_data;
+ struct mt9v022 *mt9v022 = to_mt9v022(client);
struct v4l2_pix_format *pix = &f->fmt.pix;
int align = pix->pixelformat == V4L2_PIX_FMT_SBGGR8 ||
pix->pixelformat == V4L2_PIX_FMT_SBGGR16;
v4l_bound_align_image(&pix->width, MT9V022_MIN_WIDTH,
MT9V022_MAX_WIDTH, align,
- &pix->height, MT9V022_MIN_HEIGHT + icd->y_skip_top,
- MT9V022_MAX_HEIGHT + icd->y_skip_top, align, 0);
+ &pix->height, MT9V022_MIN_HEIGHT + mt9v022->y_skip_top,
+ MT9V022_MAX_HEIGHT + mt9v022->y_skip_top, align, 0);
return 0;
}
icl->free_bus(icl);
}
+static int mt9v022_g_skip_top_lines(struct v4l2_subdev *sd, u32 *lines)
+{
+ struct i2c_client *client = sd->priv;
+ struct mt9v022 *mt9v022 = to_mt9v022(client);
+
+ *lines = mt9v022->y_skip_top;
+
+ return 0;
+}
+
static struct v4l2_subdev_core_ops mt9v022_subdev_core_ops = {
.g_ctrl = mt9v022_g_ctrl,
.s_ctrl = mt9v022_s_ctrl,
.cropcap = mt9v022_cropcap,
};
+static struct v4l2_subdev_sensor_ops mt9v022_subdev_sensor_ops = {
+ .g_skip_top_lines = mt9v022_g_skip_top_lines,
+};
+
static struct v4l2_subdev_ops mt9v022_subdev_ops = {
.core = &mt9v022_subdev_core_ops,
.video = &mt9v022_subdev_video_ops,
+ .sensor = &mt9v022_subdev_sensor_ops,
};
static int mt9v022_probe(struct i2c_client *client,
* MT9V022 _really_ corrupts the first read out line.
* TODO: verify on i.MX31
*/
- icd->y_skip_top = 1;
-
+ mt9v022->y_skip_top = 1;
mt9v022->rect.left = MT9V022_COLUMN_SKIP;
mt9v022->rect.top = MT9V022_ROW_SKIP;
mt9v022->rect.width = MT9V022_MAX_WIDTH;
{
struct soc_camera_host *ici = to_soc_camera_host(icd->dev.parent);
struct pxa_camera_dev *pcdev = ici->priv;
+ struct v4l2_subdev *sd = soc_camera_to_subdev(icd);
unsigned long dw, bpp;
- u32 cicr0, cicr1, cicr2, cicr3, cicr4 = 0;
+ u32 cicr0, cicr1, cicr2, cicr3, cicr4 = 0, y_skip_top;
+ int ret = v4l2_subdev_call(sd, sensor, g_skip_top_lines, &y_skip_top);
+
+ if (ret < 0)
+ y_skip_top = 0;
/* Datawidth is now guaranteed to be equal to one of the three values.
* We fix bit-per-pixel equal to data-width... */
cicr2 = 0;
cicr3 = CICR3_LPF_VAL(icd->user_height - 1) |
- CICR3_BFW_VAL(min((unsigned short)255, icd->y_skip_top));
+ CICR3_BFW_VAL(min((u32)255, y_skip_top));
cicr4 |= pcdev->mclk_divisor;
__raw_writel(cicr1, pcdev->base + CICR1);
/* Set the control device reference */
dev_set_drvdata(&icd->dev, &pdev->dev);
- icd->y_skip_top = 0;
icd->ops = &soc_camera_platform_ops;
ici = to_soc_camera_host(icd->dev.parent);
struct device *pdev; /* Platform device */
s32 user_width;
s32 user_height;
- unsigned short y_skip_top; /* Lines to skip at the top */
unsigned char iface; /* Host number */
unsigned char devnum; /* Device number per host */
unsigned char buswidth; /* See comment in .c */
struct v4l2_dv_timings *timings);
};
+/**
+ * struct v4l2_subdev_sensor_ops - v4l2-subdev sensor operations
+ * @g_skip_top_lines: number of lines at the top of the image to be skipped.
+ * This is needed for some sensors, which always corrupt
+ * several top lines of the output image, or which send their
+ * metadata in them.
+ */
+struct v4l2_subdev_sensor_ops {
+ int (*g_skip_top_lines)(struct v4l2_subdev *sd, u32 *lines);
+};
+
/*
interrupt_service_routine: Called by the bridge chip's interrupt service
handler, when an IR interrupt status has be raised due to this subdev,
};
struct v4l2_subdev_ops {
- const struct v4l2_subdev_core_ops *core;
- const struct v4l2_subdev_tuner_ops *tuner;
- const struct v4l2_subdev_audio_ops *audio;
- const struct v4l2_subdev_video_ops *video;
- const struct v4l2_subdev_ir_ops *ir;
+ const struct v4l2_subdev_core_ops *core;
+ const struct v4l2_subdev_tuner_ops *tuner;
+ const struct v4l2_subdev_audio_ops *audio;
+ const struct v4l2_subdev_video_ops *video;
+ const struct v4l2_subdev_ir_ops *ir;
+ const struct v4l2_subdev_sensor_ops *sensor;
};
#define V4L2_SUBDEV_NAME_SIZE 32