Adjust to new v4l2 APIs and fix some debug logs.
Change-Id: Iafba102fa326c669efcbb0baeb8897fe660dcdd4
Signed-off-by: Jeffy Chen <jeffy.chen@rock-chips.com>
Signed-off-by: Yakir Yang <ykk@rock-chips.com>
ctx->run_ops->run_done(ctx, result);
if (!rk3288_vpu_ctx_is_dummy_encode(ctx)) {
- struct vb2_buffer *src = &ctx->run.src->b;
- struct vb2_buffer *dst = &ctx->run.dst->b;
-
- dst->v4l2_buf.timestamp = src->v4l2_buf.timestamp;
- vb2_buffer_done(&ctx->run.src->b, result);
- vb2_buffer_done(&ctx->run.dst->b, result);
+ struct vb2_v4l2_buffer *src =
+ to_vb2_v4l2_buffer(&ctx->run.src->vb.vb2_buf);
+ struct vb2_v4l2_buffer *dst =
+ to_vb2_v4l2_buffer(&ctx->run.dst->vb.vb2_buf);
+
+ dst->timestamp = src->timestamp;
+ vb2_buffer_done(&ctx->run.src->vb.vb2_buf, result);
+ vb2_buffer_done(&ctx->run.dst->vb.vb2_buf, result);
}
dev->current_ctx = NULL;
}
q->mem_ops = &vb2_dma_contig_memops;
- q->timestamp_type = V4L2_BUF_FLAG_TIMESTAMP_COPY;
+ q->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
ret = vb2_queue_init(q);
if (ret) {
q->ops = get_dec_queue_ops();
q->mem_ops = &vb2_dma_contig_memops;
- q->timestamp_type = V4L2_BUF_FLAG_TIMESTAMP_COPY;
+ q->timestamp_flags = V4L2_BUF_FLAG_TIMESTAMP_COPY;
ret = vb2_queue_init(q);
if (ret) {
/**
* struct rk3288_vpu_buf - Private data related to each VB2 buffer.
+ * @vb: Pointer to related VB2 buffer.
* @list: List head for queuing in buffer queue.
- * @b: Pointer to related VB2 buffer.
* @flags: Buffer state. See enum rk3288_vpu_buf_flags.
*/
struct rk3288_vpu_buf {
- struct vb2_buffer b;
+ struct vb2_v4l2_buffer vb;
struct list_head list;
/* Mode-specific data. */
static inline struct rk3288_vpu_buf *vb_to_buf(struct vb2_buffer *vb)
{
- return container_of(vb, struct rk3288_vpu_buf, b);
+ return container_of(to_vb2_v4l2_buffer(vb), struct rk3288_vpu_buf, vb);
}
static inline bool rk3288_vpu_ctx_is_encoder(struct rk3288_vpu_ctx *ctx)
* device capability flags are left only for backward compatibility
* and are scheduled for removal.
*/
- cap->capabilities = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING |
+ cap->device_caps = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING |
V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_OUTPUT_MPLANE;
+ cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
vpu_debug_leave();
};
static int rk3288_vpu_queue_setup(struct vb2_queue *vq,
- const struct v4l2_format *fmt,
+ const void *parg,
unsigned int *buf_count,
unsigned int *plane_count,
unsigned int psize[], void *allocators[])
vpu_debug_enter();
if (vq->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
- ctx->dst_bufs[vb->v4l2_buf.index] = vb;
+ ctx->dst_bufs[vb->index] = vb;
vpu_debug_leave();
vpu_debug_enter();
if (vq->type == V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE)
- ctx->dst_bufs[vb->v4l2_buf.index] = NULL;
+ ctx->dst_bufs[vb->index] = NULL;
vpu_debug_leave();
}
while (!list_empty(&queue)) {
b = list_first_entry(&queue, struct rk3288_vpu_buf, list);
- for (i = 0; i < b->b.num_planes; i++)
- vb2_set_plane_payload(&b->b, i, 0);
- vb2_buffer_done(&b->b, VB2_BUF_STATE_ERROR);
+ for (i = 0; i < b->vb.vb2_buf.num_planes; i++)
+ vb2_set_plane_payload(&b->vb.vb2_buf, i, 0);
+ vb2_buffer_done(&b->vb.vb2_buf, VB2_BUF_STATE_ERROR);
list_del(&b->list);
}
static void rk3288_vpu_dec_prepare_run(struct rk3288_vpu_ctx *ctx)
{
- struct v4l2_buffer *src = &ctx->run.src->b.v4l2_buf;
+ struct vb2_v4l2_buffer *src = to_vb2_v4l2_buffer(&ctx->run.src->vb.vb2_buf);
v4l2_ctrl_apply_store(&ctx->ctrl_handler, src->config_store);
enum vb2_buffer_state result)
{
struct v4l2_plane_pix_format *plane_fmts = ctx->dst_fmt.plane_fmt;
- struct vb2_buffer *dst = &ctx->run.dst->b;
+ struct vb2_buffer *dst = &ctx->run.dst->vb.vb2_buf;
int i;
if (result != VB2_BUF_STATE_DONE) {
* device capability flags are left only for backward compatibility
* and are scheduled for removal.
*/
- cap->capabilities = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING |
+ cap->device_caps = V4L2_CAP_VIDEO_M2M_MPLANE | V4L2_CAP_STREAMING |
V4L2_CAP_VIDEO_CAPTURE_MPLANE | V4L2_CAP_VIDEO_OUTPUT_MPLANE;
+ cap->capabilities = cap->device_caps | V4L2_CAP_DEVICE_CAPS;
vpu_debug_leave();
};
static int rk3288_vpu_queue_setup(struct vb2_queue *vq,
- const struct v4l2_format *fmt,
+ const void *parg,
unsigned int *buf_count,
unsigned int *plane_count,
unsigned int psize[], void *allocators[])
while (!list_empty(&queue)) {
b = list_first_entry(&queue, struct rk3288_vpu_buf, list);
- for (i = 0; i < b->b.num_planes; i++)
- vb2_set_plane_payload(&b->b, i, 0);
- vb2_buffer_done(&b->b, VB2_BUF_STATE_ERROR);
+ for (i = 0; i < b->vb.vb2_buf.num_planes; i++)
+ vb2_set_plane_payload(&b->vb.vb2_buf, i, 0);
+ vb2_buffer_done(&b->vb.vb2_buf, VB2_BUF_STATE_ERROR);
list_del(&b->list);
}
static void rk3288_vpu_enc_prepare_run(struct rk3288_vpu_ctx *ctx)
{
- struct vb2_buffer *vb2_src = &ctx->run.src->b;
- unsigned config_store = vb2_src->v4l2_buf.config_store;
+ struct vb2_v4l2_buffer *vb2_src = to_vb2_v4l2_buffer(&ctx->run.src->vb.vb2_buf);
+ unsigned config_store = vb2_src->config_store;
v4l2_ctrl_apply_store(&ctx->ctrl_handler, config_store);
reg = VDPU_REG_DEC_CTRL3_START_CODE_E
| VDPU_REG_DEC_CTRL3_INIT_QP(pps->pic_init_qp_minus26 + 26)
| VDPU_REG_DEC_CTRL3_STREAM_LEN(
- vb2_get_plane_payload(&ctx->run.src->b, 0));
+ vb2_get_plane_payload(&ctx->run.src->vb.vb2_buf, 0));
vdpu_write_relaxed(vpu, reg, VDPU_REG_DEC_CTRL3);
/* Decoder control register 4. */
&& dpb[i].buf_index < ctx->vq_dst.num_buffers)
buf = ctx->dst_bufs[dpb[i].buf_index];
else
- buf = &ctx->run.dst->b;
+ buf = &ctx->run.dst->vb.vb2_buf;
vdpu_write_relaxed(vpu, vb2_dma_contig_plane_dma_addr(buf, 0),
VDPU_REG_ADDR_REF(i));
dma_addr_t src_dma, dst_dma;
/* Source (stream) buffer. */
- src_dma = vb2_dma_contig_plane_dma_addr(&ctx->run.src->b, 0);
+ src_dma = vb2_dma_contig_plane_dma_addr(&ctx->run.src->vb.vb2_buf, 0);
vdpu_write_relaxed(vpu, src_dma, VDPU_REG_ADDR_STR);
/* Destination (decoded frame) buffer. */
- dst_dma = vb2_dma_contig_plane_dma_addr(&ctx->run.dst->b, 0);
+ dst_dma = vb2_dma_contig_plane_dma_addr(&ctx->run.dst->vb.vb2_buf, 0);
vdpu_write_relaxed(vpu, dst_dma, VDPU_REG_ADDR_DST);
/* Higher profiles require DMV buffer appended to reference frames. */
schedule_delayed_work(&vpu->watchdog_work, msecs_to_jiffies(2000));
/* Start decoding! */
- vdpu_write_relaxed(vpu, VDPU_REG_CONFIG_DEC_AXI_RD_ID(0xff)
+ vdpu_write_relaxed(vpu, VDPU_REG_CONFIG_DEC_AXI_RD_ID(0xffu)
| VDPU_REG_CONFIG_DEC_TIMEOUT_E
| VDPU_REG_CONFIG_DEC_OUT_ENDIAN
| VDPU_REG_CONFIG_DEC_STRENDIAN_E
/* stream addresses */
vpu_debug(4, "Addresses: segmap=0x%x, probs=0x%x\n",
- ctx->hw.vp8d.segment_map.dma,
- ctx->hw.vp8d.prob_tbl.dma);
+ (int)ctx->hw.vp8d.segment_map.dma,
+ (int)ctx->hw.vp8d.prob_tbl.dma);
/* reference frame info */
vpu_debug(4, "Ref frame: last=%d, golden=%d, alt=%d\n",
u32 count = 0;
u32 i;
- src_dma = vb2_dma_contig_plane_dma_addr(&ctx->run.src->b, 0);
+ src_dma = vb2_dma_contig_plane_dma_addr(&ctx->run.src->vb.vb2_buf, 0);
/*
* Calculate control partition mb data info
/* set last frame address */
if (hdr->last_frame >= ctx->vq_dst.num_buffers)
- buf = &ctx->run.dst->b;
+ buf = &ctx->run.dst->vb.vb2_buf;
else
buf = ctx->dst_bufs[hdr->last_frame];
if (!hdr->key_frame)
vdpu_write_relaxed(vpu,
- vb2_dma_contig_plane_dma_addr(&ctx->run.dst->b, 0),
+ vb2_dma_contig_plane_dma_addr(&ctx->run.dst->vb.vb2_buf, 0),
VDPU_REG_ADDR_REF(0));
else
vdpu_write_relaxed(vpu, vb2_dma_contig_plane_dma_addr(buf, 0),
/* set golden reference frame buffer address */
if (hdr->golden_frame >= ctx->vq_dst.num_buffers)
- buf = &ctx->run.dst->b;
+ buf = &ctx->run.dst->vb.vb2_buf;
else
buf = ctx->dst_bufs[hdr->golden_frame];
/* set alternate reference frame buffer address */
if (hdr->alt_frame >= ctx->vq_dst.num_buffers)
- buf = &ctx->run.dst->b;
+ buf = &ctx->run.dst->vb.vb2_buf;
else
buf = ctx->dst_bufs[hdr->alt_frame];
/* set output frame buffer address */
vdpu_write_relaxed(vpu,
- vb2_dma_contig_plane_dma_addr(&ctx->run.dst->b, 0),
+ vb2_dma_contig_plane_dma_addr(&ctx->run.dst->vb.vb2_buf, 0),
VDPU_REG_ADDR_DST);
}
void rk3288_vpu_vp8e_assemble_bitstream(struct rk3288_vpu_ctx *ctx,
struct rk3288_vpu_buf *dst_buf)
{
+ struct vb2_v4l2_buffer *vb2_dst = to_vb2_v4l2_buffer(&dst_buf->vb.vb2_buf);
size_t ext_hdr_size = dst_buf->vp8e.ext_hdr_size;
size_t dct_size = dst_buf->vp8e.dct_size;
size_t hdr_size = dst_buf->vp8e.hdr_size;
void *dst;
u32 *tag;
- dst_size = vb2_plane_size(&dst_buf->b, 0);
- dst = vb2_plane_vaddr(&dst_buf->b, 0);
+ dst_size = vb2_plane_size(&dst_buf->vb.vb2_buf, 0);
+ dst = vb2_plane_vaddr(&dst_buf->vb.vb2_buf, 0);
tag = dst; /* To access frame tag words. */
if (WARN_ON(hdr_size + ext_hdr_size + dct_size > dst_size))
if (WARN_ON(dst_buf->vp8e.dct_offset + dct_size > dst_size))
return;
- vpu_debug(1, "%s: hdr_size = %u, ext_hdr_size = %u, dct_size = %u\n",
- __func__, hdr_size, ext_hdr_size, dct_size);
-
memmove(dst + hdr_size + ext_hdr_size,
dst + dst_buf->vp8e.dct_offset, dct_size);
memcpy(dst, dst_buf->vp8e.header, hdr_size);
/* Patch frame tag at first 32-bit word of the frame. */
- if (dst_buf->b.v4l2_buf.flags & V4L2_BUF_FLAG_KEYFRAME) {
+ if (vb2_dst->flags & V4L2_BUF_FLAG_KEYFRAME) {
tag_size = VP8_KEY_FRAME_HDR_SIZE;
tag[0] &= ~VP8_FRAME_TAG_KEY_FRAME_BIT;
} else {
tag[0] |= (hdr_size + ext_hdr_size - tag_size)
<< VP8_FRAME_TAG_LENGTH_SHIFT;
- vb2_set_plane_payload(&dst_buf->b, 0,
+ vb2_set_plane_payload(&dst_buf->vb.vb2_buf, 0,
hdr_size + ext_hdr_size + dct_size);
}
static void rk3288_vpu_vp8e_set_buffers(struct rk3288_vpu_dev *vpu,
struct rk3288_vpu_ctx *ctx)
{
+ struct vb2_v4l2_buffer *vb2_dst = to_vb2_v4l2_buffer(&ctx->run.dst->vb.vb2_buf);
const struct rk3288_vp8e_reg_params *params = ctx->run.vp8e.reg_params;
dma_addr_t ref_buf_dma, rec_buf_dma;
dma_addr_t stream_dma;
dst_dma = vpu->dummy_encode_dst.dma;
dst_size = vpu->dummy_encode_dst.size;
} else {
- dst_dma = vb2_dma_contig_plane_dma_addr(&ctx->run.dst->b, 0);
- dst_size = vb2_plane_size(&ctx->run.dst->b, 0);
+ dst_dma = vb2_dma_contig_plane_dma_addr(&ctx->run.dst->vb.vb2_buf, 0);
+ dst_size = vb2_plane_size(&ctx->run.dst->vb.vb2_buf, 0);
}
/*
ctx->run.dst->vp8e.hdr_size = params->hdr_len + (start_offset >> 3);
if (params->enc_ctrl & VEPU_REG_ENC_CTRL_KEYFRAME_BIT)
- ctx->run.dst->b.v4l2_buf.flags |= V4L2_BUF_FLAG_KEYFRAME;
+ vb2_dst->flags |= V4L2_BUF_FLAG_KEYFRAME;
else
- ctx->run.dst->b.v4l2_buf.flags &= ~V4L2_BUF_FLAG_KEYFRAME;
+ vb2_dst->flags &= ~V4L2_BUF_FLAG_KEYFRAME;
/*
* We assume here that 1/10 of the buffer is enough for headers.
VEPU_REG_ADDR_IN_CR);
} else {
vepu_write_relaxed(vpu, vb2_dma_contig_plane_dma_addr(
- &ctx->run.src->b, PLANE_Y),
+ &ctx->run.src->vb.vb2_buf, PLANE_Y),
VEPU_REG_ADDR_IN_LUMA);
vepu_write_relaxed(vpu, vb2_dma_contig_plane_dma_addr(
- &ctx->run.src->b, PLANE_CB),
+ &ctx->run.src->vb.vb2_buf, PLANE_CB),
VEPU_REG_ADDR_IN_CB);
vepu_write_relaxed(vpu, vb2_dma_contig_plane_dma_addr(
- &ctx->run.src->b, PLANE_CR),
+ &ctx->run.src->vb.vb2_buf, PLANE_CR),
VEPU_REG_ADDR_IN_CR);
}
void rk3288_vpu_vp8e_run(struct rk3288_vpu_ctx *ctx)
{
+ struct vb2_v4l2_buffer *vb2_dst = to_vb2_v4l2_buffer(&ctx->run.dst->vb.vb2_buf);
struct rk3288_vpu_dev *vpu = ctx->dev;
u32 reg;
| VEPU_REG_ENC_CTRL_ENC_MODE_VP8
| VEPU_REG_ENC_CTRL_EN_BIT;
- if (ctx->run.dst->b.v4l2_buf.flags & V4L2_BUF_FLAG_KEYFRAME)
+ if (vb2_dst->flags & V4L2_BUF_FLAG_KEYFRAME)
reg |= VEPU_REG_ENC_CTRL_KEYFRAME_BIT;
vepu_write(vpu, reg, VEPU_REG_ENC_CTRL);