/* RPFs have no source entities, just connect their source pad
* to their video device.
*/
- return media_create_pad_link(&rpf->entity.video->video.entity,
- 0, &rpf->entity.subdev.entity,
+ return media_create_pad_link(&rpf->video->video.entity, 0,
+ &rpf->entity.subdev.entity,
RWPF_PAD_SINK,
MEDIA_LNK_FL_ENABLED |
MEDIA_LNK_FL_IMMUTABLE);
return media_create_pad_link(&wpf->entity.subdev.entity,
RWPF_PAD_SOURCE,
- &wpf->entity.video->video.entity,
+ &wpf->video->video.entity,
0, flags);
}
#include <media/v4l2-subdev.h>
struct vsp1_device;
-struct vsp1_video;
enum vsp1_entity_type {
VSP1_ENTITY_BRU,
struct v4l2_subdev subdev;
struct v4l2_mbus_framefmt *formats;
- struct vsp1_video *video;
-
spinlock_t lock; /* Protects the streaming field */
bool streaming;
};
#define RWPF_PAD_SOURCE 1
struct vsp1_rwpf;
+struct vsp1_video;
struct vsp1_rwpf_memory {
unsigned int num_planes;
struct vsp1_entity entity;
struct v4l2_ctrl_handler ctrls;
+ struct vsp1_video *video;
+
const struct vsp1_rwpf_operations *ops;
unsigned int max_width;
if (e->type == VSP1_ENTITY_RPF) {
rwpf = to_rwpf(subdev);
pipe->inputs[pipe->num_inputs++] = rwpf;
- rwpf->entity.video->pipe_index = pipe->num_inputs;
+ rwpf->video->pipe_index = pipe->num_inputs;
} else if (e->type == VSP1_ENTITY_WPF) {
rwpf = to_rwpf(subdev);
pipe->output = to_rwpf(subdev);
- rwpf->entity.video->pipe_index = 0;
+ rwpf->video->pipe_index = 0;
} else if (e->type == VSP1_ENTITY_LIF) {
pipe->lif = e;
} else if (e->type == VSP1_ENTITY_BRU) {
/* Complete buffers on all video nodes. */
for (i = 0; i < pipe->num_inputs; ++i)
- vsp1_video_frame_end(pipe, pipe->inputs[i]->entity.video);
+ vsp1_video_frame_end(pipe, pipe->inputs[i]->video);
if (!pipe->lif)
- vsp1_video_frame_end(pipe, pipe->output->entity.video);
+ vsp1_video_frame_end(pipe, pipe->output->video);
spin_lock_irqsave(&pipe->irqlock, flags);
if (!video)
return ERR_PTR(-ENOMEM);
- rwpf->entity.video = video;
+ rwpf->video = video;
video->vsp1 = vsp1;
video->rwpf = rwpf;