#define ON_RES_CHANGE(ctx) (((ctx)->state >= MFCINST_RES_CHANGE_INIT) && \
((ctx)->state <= MFCINST_RES_CHANGE_END))
-#define IS_BUFFER_BATCH_MODE(ctx) ((ctx)->num_bufs_in_vb > 0)
+#define IS_BUFFER_BATCH_MODE(ctx) ((ctx)->batch_mode == 1)
/* UHD resoluition */
#define MFC_UHD_RES (3840 * 2160)
int next_index;
int done_index;
int used;
+ int num_bufs_in_vb;
unsigned char *vir_addr;
};
unsigned long stream_protect_flag;
struct _otf_handle *otf_handle;
- int num_bufs_in_vb;
+ int batch_mode;
};
#endif /* __S5P_MFC_DATA_STRUCT_H */
if (!dmabuf)
return -ENOMEM;
- ctx->num_bufs_in_vb = s5p_mfc_bufcon_get_buf_count(dmabuf);
- mfc_debug(3, "bufcon count:%d\n", ctx->num_bufs_in_vb);
+ buf->num_bufs_in_vb = s5p_mfc_bufcon_get_buf_count(dmabuf);
+ mfc_debug(3, "bufcon count:%d\n", buf->num_bufs_in_vb);
+ if (!ctx->batch_mode && buf->num_bufs_in_vb > 0) {
+ ctx->batch_mode = 1;
+ mfc_debug(3, "buffer batch mode enabled\n");
+ }
- if (IS_BUFFER_BATCH_MODE(ctx)) {
+ if (buf->num_bufs_in_vb > 0) {
int count = 0;
- ctx->framerate = ctx->num_bufs_in_vb * ENC_DEFAULT_CAM_CAPTURE_FPS;
+ ctx->framerate = buf->num_bufs_in_vb * ENC_DEFAULT_CAM_CAPTURE_FPS;
mfc_debug(3, "framerate: %ld\n", ctx->framerate);
count = s5p_mfc_bufcon_get_daddr(ctx, buf, dmabuf, i);
- if (count != ctx->num_bufs_in_vb) {
+ if (count != buf->num_bufs_in_vb) {
s5p_mfc_mem_put_dmabuf(dmabuf);
mfc_err_ctx("invalid buffer count %d != num_bufs_in_vb %d\n",
- count, ctx->num_bufs_in_vb);
+ count, buf->num_bufs_in_vb);
return -EFAULT;
}
buf->addr[0][i] = s5p_mfc_mem_get_daddr_vb(vb, i);
}
- if (call_cop(ctx, init_buf_ctrls, ctx, MFC_CTRL_TYPE_SRC,
- vb->index) < 0)
- mfc_err_ctx("failed in init_buf_ctrls\n");
}
}
+ if (call_cop(ctx, init_buf_ctrls, ctx, MFC_CTRL_TYPE_SRC,
+ vb->index) < 0)
+ mfc_err_ctx("failed in init_buf_ctrls\n");
} else {
mfc_err_ctx("inavlid queue type: %d\n", vq->type);
return -EINVAL;
return;
}
- ref_mb = s5p_mfc_find_buf(&ctx->buf_queue_lock, &ctx->ref_buf_queue, dec_y_addr, 0);
+ ref_mb = s5p_mfc_find_buf(&ctx->buf_queue_lock, &ctx->ref_buf_queue, dec_y_addr);
if (ref_mb)
ref_mb->vb.vb2_buf.timestamp = src_mb->vb.vb2_buf.timestamp;
}
}
ref_mb = s5p_mfc_find_del_buf(&ctx->buf_queue_lock,
- &ctx->ref_buf_queue, dspl_y_addr, 0);
+ &ctx->ref_buf_queue, dspl_y_addr);
if (ref_mb) {
mfc_debug(2, "Listing: %d\n", ref_mb->vb.vb2_buf.index);
/* Check if this is the buffer we're looking for */
if (IS_BUFFER_BATCH_MODE(ctx)) {
src_mb = s5p_mfc_find_first_buf(&ctx->buf_queue_lock,
- &ctx->src_buf_queue, enc_addr[0], ctx->num_bufs_in_vb);
+ &ctx->src_buf_queue, enc_addr[0]);
if (src_mb) {
src_mb->done_index++;
mfc_debug(4, "batch buf done_index: %d\n", src_mb->done_index);
mfc_handle_stream_copy_timestamp(ctx, src_mb);
- /* last image in a buffer container */
- if (src_mb->done_index == ctx->num_bufs_in_vb) {
+ /* single buffer || last image in a buffer container */
+ if (!src_mb->num_bufs_in_vb || src_mb->done_index == src_mb->num_bufs_in_vb) {
src_mb = s5p_mfc_find_del_buf(&ctx->buf_queue_lock,
- &ctx->src_buf_queue, enc_addr[0],
- ctx->num_bufs_in_vb);
+ &ctx->src_buf_queue, enc_addr[0]);
if (src_mb)
vb2_buffer_done(&src_mb->vb.vb2_buf, VB2_BUF_STATE_DONE);
}
} else {
/* normal single buffer */
src_mb = s5p_mfc_find_del_buf(&ctx->buf_queue_lock,
- &ctx->src_buf_queue, enc_addr[0], 0);
+ &ctx->src_buf_queue, enc_addr[0]);
if (src_mb) {
index = src_mb->vb.vb2_buf.index;
if (call_cop(ctx, recover_buf_ctrls_val, ctx,
}
ref_mb = s5p_mfc_find_del_buf(&ctx->buf_queue_lock,
- &ctx->ref_buf_queue, enc_addr[0], 0);
+ &ctx->ref_buf_queue, enc_addr[0]);
if (ref_mb) {
vb2_buffer_done(&ref_mb->vb.vb2_buf, VB2_BUF_STATE_DONE);
{
int i;
- for (i = 0; i < ctx->num_bufs_in_vb; i++) {
+ for (i = 0; i < mfc_buf->num_bufs_in_vb; i++) {
if (mfc_buf->addr[i][plane]) {
mfc_debug(4, "put batch buf addr[%d][%d]: 0x%08llx\n",
i, plane, mfc_buf->addr[i][plane]);
struct s5p_mfc_raw_info *raw = &ctx->raw_buf;
int i;
- for (i = 0; i < ctx->num_bufs_in_vb; i++) {
+ for (i = 0; i < mfc_buf->num_bufs_in_vb; i++) {
mfc_buf->dmabufs[i][plane] = dmabuf_container_get_buffer(bufcon_dmabuf, i);
if (IS_ERR(mfc_buf->dmabufs[i][plane])) {
mfc_err_ctx("Failed to get dma_buf (err %ld)",
/* last image in a buffer container */
/* move src_queue -> src_queue_nal_q */
- if (src_mb->next_index == (ctx->num_bufs_in_vb - 1)) {
+ if (src_mb->next_index == (src_mb->num_bufs_in_vb - 1)) {
src_mb = s5p_mfc_get_move_buf(&ctx->buf_queue_lock,
&ctx->src_buf_nal_queue, &ctx->src_buf_queue,
MFC_BUF_SET_USED, MFC_QUEUE_ADD_BOTTOM);
if (IS_BUFFER_BATCH_MODE(ctx)) {
src_mb = s5p_mfc_find_first_buf(&ctx->buf_queue_lock,
- &ctx->src_buf_queue, enc_addr[0], ctx->num_bufs_in_vb);
+ &ctx->src_buf_queue, enc_addr[0]);
if (src_mb) {
src_mb->done_index++;
mfc_debug(4, "batch buf done_index: %d\n", src_mb->done_index);
mfc_nal_q_handle_stream_copy_timestamp(ctx, src_mb);
} else {
src_mb = s5p_mfc_find_first_buf(&ctx->buf_queue_lock,
- &ctx->src_buf_nal_queue, enc_addr[0], ctx->num_bufs_in_vb);
+ &ctx->src_buf_nal_queue, enc_addr[0]);
if (src_mb) {
src_mb->done_index++;
mfc_debug(4, "batch buf done_index: %d\n", src_mb->done_index);
mfc_nal_q_handle_stream_copy_timestamp(ctx, src_mb);
/* last image in a buffer container */
- if (src_mb->done_index == ctx->num_bufs_in_vb) {
+ if (src_mb->done_index == src_mb->num_bufs_in_vb) {
src_mb = s5p_mfc_find_del_buf(&ctx->buf_queue_lock,
- &ctx->src_buf_nal_queue, enc_addr[0],
- ctx->num_bufs_in_vb);
+ &ctx->src_buf_nal_queue, enc_addr[0]);
if (src_mb)
vb2_buffer_done(&src_mb->vb.vb2_buf, VB2_BUF_STATE_DONE);
}
}
} else {
src_mb = s5p_mfc_find_del_buf(&ctx->buf_queue_lock,
- &ctx->src_buf_nal_queue, enc_addr[0], 0);
+ &ctx->src_buf_nal_queue, enc_addr[0]);
if (!src_mb) {
mfc_err_dev("NAL Q: no src buffers\n");
return;
vb2_buffer_done(&src_mb->vb.vb2_buf, VB2_BUF_STATE_DONE);
ref_mb = s5p_mfc_find_del_buf(&ctx->buf_queue_lock,
- &ctx->ref_buf_queue, enc_addr[0], 0);
+ &ctx->ref_buf_queue, enc_addr[0]);
if (ref_mb)
vb2_buffer_done(&ref_mb->vb.vb2_buf, VB2_BUF_STATE_DONE);
}
return;
}
- ref_mb = s5p_mfc_find_buf(&ctx->buf_queue_lock, &ctx->ref_buf_queue, dec_y_addr, 0);
+ ref_mb = s5p_mfc_find_buf(&ctx->buf_queue_lock, &ctx->ref_buf_queue, dec_y_addr);
if (ref_mb)
ref_mb->vb.vb2_buf.timestamp = src_mb->vb.vb2_buf.timestamp;
frame_type = pOutStr->DisplayFrameType & S5P_FIMV_DISPLAY_FRAME_MASK;
}
- ref_mb = s5p_mfc_find_del_buf(&ctx->buf_queue_lock,
- &ctx->ref_buf_queue, dspl_y_addr, 0);
+ ref_mb = s5p_mfc_find_del_buf(&ctx->buf_queue_lock, &ctx->ref_buf_queue, dspl_y_addr);
if (ref_mb) {
mfc_debug(2, "NAL Q: find display buf, index: %d\n", ref_mb->vb.vb2_buf.index);
/* Check if this is the buffer we're looking for */
return -EAGAIN;
}
- if (IS_BUFFER_BATCH_MODE(ctx)) {
+ if (src_mb->num_bufs_in_vb > 0) {
/* last image in a buffer container */
- if (src_mb->next_index == (ctx->num_bufs_in_vb - 1))
+ if (src_mb->next_index == (src_mb->num_bufs_in_vb - 1))
last_frame = mfc_check_last_frame(ctx, src_mb);
} else {
last_frame = mfc_check_last_frame(ctx, src_mb);
}
struct s5p_mfc_buf *s5p_mfc_find_first_buf(spinlock_t *plock, struct s5p_mfc_buf_queue *queue,
- dma_addr_t addr, int search_queue)
+ dma_addr_t addr)
{
unsigned long flags;
struct s5p_mfc_buf *mfc_buf = NULL;
mfc_debug(2, "Looking for this address: 0x%08llx\n", addr);
mfc_buf = list_entry(queue->head.next, struct s5p_mfc_buf, list);
- if (search_queue > 0) {
- for (i = 0; i < search_queue; i++) {
+ if (mfc_buf->num_bufs_in_vb > 0) {
+ for (i = 0; i < mfc_buf->num_bufs_in_vb; i++) {
mb_addr = mfc_buf->addr[i][0];
mfc_debug(2, "batch buf[%d] plane[0] addr: 0x%08llx\n", i, mb_addr);
if (addr == mb_addr) {
}
struct s5p_mfc_buf *s5p_mfc_find_buf(spinlock_t *plock, struct s5p_mfc_buf_queue *queue,
- dma_addr_t addr, int search_queue)
+ dma_addr_t addr)
{
unsigned long flags;
struct s5p_mfc_buf *mfc_buf = NULL;
mfc_debug(2, "Looking for this address: 0x%08llx\n", addr);
list_for_each_entry(mfc_buf, &queue->head, list) {
- if (search_queue > 0) {
- for (i = 0; i < search_queue; i++) {
+ if (mfc_buf->num_bufs_in_vb > 0) {
+ for (i = 0; i < mfc_buf->num_bufs_in_vb; i++) {
mb_addr = mfc_buf->addr[i][0];
mfc_debug(2, "batch buf[%d] plane[0] addr: 0x%08llx\n", i, mb_addr);
if (addr == mb_addr) {
}
struct s5p_mfc_buf *s5p_mfc_find_del_buf(spinlock_t *plock, struct s5p_mfc_buf_queue *queue,
- dma_addr_t addr, int search_queue)
+ dma_addr_t addr)
{
unsigned long flags;
struct s5p_mfc_buf *mfc_buf = NULL;
mfc_debug(2, "Looking for this address: 0x%08llx\n", addr);
list_for_each_entry(mfc_buf, &queue->head, list) {
- if (search_queue > 0) {
- for (i = 0; i < search_queue; i++) {
+ if (mfc_buf->num_bufs_in_vb > 0) {
+ for (i = 0; i < mfc_buf->num_bufs_in_vb; i++) {
mb_addr = mfc_buf->addr[i][0];
mfc_debug(2, "batch buf[%d] plane[0] addr: 0x%08llx\n", i, mb_addr);
src_mb->used = 0;
+ /* If it is not buffer batch mode, index is always zero */
+ if (src_mb->next_index > src_mb->done_index) {
+ mfc_debug(2, "NAL Q: batch buf next index[%d] recover to %d\n",
+ src_mb->next_index, src_mb->done_index);
+ src_mb->next_index = src_mb->done_index;
+ }
+
list_del(&src_mb->list);
ctx->src_buf_nal_queue.count--;
dma_addr_t addr);
struct s5p_mfc_buf *s5p_mfc_find_first_buf(spinlock_t *plock, struct s5p_mfc_buf_queue *queue,
- dma_addr_t addr, int search_queue);
+ dma_addr_t addr);
struct s5p_mfc_buf *s5p_mfc_find_buf(spinlock_t *plock, struct s5p_mfc_buf_queue *queue,
- dma_addr_t addr, int search_queue);
+ dma_addr_t addr);
struct s5p_mfc_buf *s5p_mfc_find_del_buf(spinlock_t *plock, struct s5p_mfc_buf_queue *queue,
- dma_addr_t addr, int search_queue);
+ dma_addr_t addr);
struct s5p_mfc_buf *s5p_mfc_find_move_buf(spinlock_t *plock,
struct s5p_mfc_buf_queue *to_queue, struct s5p_mfc_buf_queue *from_queue,
dma_addr_t addr, unsigned int released_flag);
goto buffer_set;
}
- if (IS_BUFFER_BATCH_MODE(ctx)) {
+ if (mfc_buf->num_bufs_in_vb > 0) {
for (i = 0; i < num_planes; i++) {
addr[i] = mfc_buf->addr[mfc_buf->next_index][i];
mfc_debug(2, "enc batch buf[%d] src[%d] addr: 0x%08llx\n",