2 * drivers/amlogic/amports/vvp9.c
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/types.h>
21 #include <linux/errno.h>
22 #include <linux/interrupt.h>
23 #include <linux/semaphore.h>
24 #include <linux/delay.h>
25 #include <linux/timer.h>
26 #include <linux/kfifo.h>
27 #include <linux/kthread.h>
28 #include <linux/spinlock.h>
29 #include <linux/platform_device.h>
30 #include <linux/amlogic/media/vfm/vframe.h>
31 #include <linux/amlogic/media/utils/amstream.h>
32 #include <linux/amlogic/media/utils/vformat.h>
33 #include <linux/amlogic/media/frame_sync/ptsserv.h>
34 #include <linux/amlogic/media/canvas/canvas.h>
35 #include <linux/amlogic/media/vfm/vframe_provider.h>
36 #include <linux/amlogic/media/vfm/vframe_receiver.h>
37 #include <linux/dma-mapping.h>
38 #include <linux/dma-contiguous.h>
39 #include <linux/slab.h>
40 #include <linux/amlogic/tee.h>
41 #include "../../../stream_input/amports/amports_priv.h"
42 #include <linux/amlogic/media/codec_mm/codec_mm.h>
43 #include "../utils/decoder_mmu_box.h"
44 #include "../utils/decoder_bmmu_box.h"
46 #define MEM_NAME "codec_vp9"
47 /* #include <mach/am_regs.h> */
48 #include <linux/amlogic/media/utils/vdec_reg.h>
49 #include "../utils/vdec.h"
50 #include "../utils/amvdec.h"
51 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
52 #include "../utils/vdec_profile.h"
55 #include <linux/amlogic/media/video_sink/video.h>
56 #include <linux/amlogic/media/codec_mm/configs.h>
57 #include "../utils/config_parser.h"
58 #include "../utils/firmware.h"
59 #include "../../../common/chips/decoder_cpu_ver_info.h"
60 #include "../utils/vdec_v4l2_buffer_ops.h"
61 #include <media/v4l2-mem2mem.h>
63 #define MIX_STREAM_SUPPORT
68 /*#define SUPPORT_FB_DECODING*/
69 /*#define FB_DECODING_TEST_SCHEDULE*/
72 #define HW_MASK_FRONT 0x1
73 #define HW_MASK_BACK 0x2
75 #define VP9D_MPP_REFINFO_TBL_ACCCONFIG 0x3442
76 #define VP9D_MPP_REFINFO_DATA 0x3443
77 #define VP9D_MPP_REF_SCALE_ENBL 0x3441
78 #define HEVC_MPRED_CTRL4 0x324c
79 #define HEVC_CM_HEADER_START_ADDR 0x3628
80 #define HEVC_DBLK_CFGB 0x350b
81 #define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
82 #define HEVC_SAO_MMU_VH1_ADDR 0x363b
83 #define HEVC_SAO_MMU_VH0_ADDR 0x363a
85 #define HEVC_MV_INFO 0x310d
86 #define HEVC_QP_INFO 0x3137
87 #define HEVC_SKIP_INFO 0x3136
89 #define VP9_10B_DEC_IDLE 0
90 #define VP9_10B_DEC_FRAME_HEADER 1
91 #define VP9_10B_DEC_SLICE_SEGMENT 2
92 #define VP9_10B_DECODE_SLICE 5
93 #define VP9_10B_DISCARD_NAL 6
94 #define VP9_DUMP_LMEM 7
95 #define HEVC_DECPIC_DATA_DONE 0xa
96 #define HEVC_DECPIC_DATA_ERROR 0xb
97 #define HEVC_NAL_DECODE_DONE 0xe
98 #define HEVC_DECODE_BUFEMPTY 0x20
99 #define HEVC_DECODE_TIMEOUT 0x21
100 #define HEVC_SEARCH_BUFEMPTY 0x22
101 #define HEVC_DECODE_OVER_SIZE 0x23
102 #define HEVC_S2_DECODING_DONE 0x50
103 #define VP9_HEAD_PARSER_DONE 0xf0
104 #define VP9_HEAD_SEARCH_DONE 0xf1
106 #define HEVC_ACTION_DONE 0xff
108 #define VF_POOL_SIZE 32
111 #define pr_info printk
113 #define DECODE_MODE_SINGLE ((0x80 << 24) | 0)
114 #define DECODE_MODE_MULTI_STREAMBASE ((0x80 << 24) | 1)
115 #define DECODE_MODE_MULTI_FRAMEBASE ((0x80 << 24) | 2)
116 #define DECODE_MODE_SINGLE_LOW_LATENCY ((0x80 << 24) | 3)
117 #define DECODE_MODE_MULTI_FRAMEBASE_NOHEAD ((0x80 << 24) | 4)
119 #define VP9_TRIGGER_FRAME_DONE 0x100
120 #define VP9_TRIGGER_FRAME_ENABLE 0x200
122 #define MV_MEM_UNIT 0x240
123 /*---------------------------------------------------
124 * Include "parser_cmd.h"
125 *---------------------------------------------------
127 #define PARSER_CMD_SKIP_CFG_0 0x0000090b
129 #define PARSER_CMD_SKIP_CFG_1 0x1b14140f
131 #define PARSER_CMD_SKIP_CFG_2 0x001b1910
133 #define PARSER_CMD_NUMBER 37
135 /*#define HEVC_PIC_STRUCT_SUPPORT*/
136 /* to remove, fix build error */
138 /*#define CODEC_MM_FLAGS_FOR_VDECODER 0*/
140 #define MULTI_INSTANCE_SUPPORT
141 #define SUPPORT_10BIT
142 /* #define ERROR_HANDLE_DEBUG */
145 #define STAT_KTHREAD 0x40
148 #ifdef MULTI_INSTANCE_SUPPORT
149 #define MAX_DECODE_INSTANCE_NUM 9
150 #define MULTI_DRIVER_NAME "ammvdec_vp9"
151 static unsigned int max_decode_instance_num
152 = MAX_DECODE_INSTANCE_NUM
;
153 static unsigned int decode_frame_count
[MAX_DECODE_INSTANCE_NUM
];
154 static unsigned int display_frame_count
[MAX_DECODE_INSTANCE_NUM
];
155 static unsigned int max_process_time
[MAX_DECODE_INSTANCE_NUM
];
156 static unsigned int run_count
[MAX_DECODE_INSTANCE_NUM
];
157 static unsigned int input_empty
[MAX_DECODE_INSTANCE_NUM
];
158 static unsigned int not_run_ready
[MAX_DECODE_INSTANCE_NUM
];
160 static u32 decode_timeout_val
= 200;
161 static int start_decode_buf_level
= 0x8000;
162 static u32 work_buf_size
;
164 static u32 force_pts_unstable
;
166 static u32 mv_buf_margin
;
168 /* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
169 /* double_write_mode:
170 * 0, no double write;
172 * 2, (1/4):(1/4) ratio;
173 * 3, (1/4):(1/4) ratio, with both compressed frame included
174 * 4, (1/2):(1/2) ratio;
175 * 0x10, double write only
176 * 0x100, if > 1080p,use mode 4,else use mode 1;
177 * 0x200, if > 1080p,use mode 2,else use mode 1;
178 * 0x300, if > 720p, use mode 4, else use mode 1;
180 static u32 double_write_mode
;
182 #define DRIVER_NAME "amvdec_vp9"
183 #define MODULE_NAME "amvdec_vp9"
184 #define DRIVER_HEADER_NAME "amvdec_vp9_header"
187 #define PUT_INTERVAL (HZ/100)
188 #define ERROR_SYSTEM_RESET_COUNT 200
191 #define PTS_NONE_REF_USE_DURATION 1
193 #define PTS_MODE_SWITCHING_THRESHOLD 3
194 #define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
196 #define DUR2PTS(x) ((x)*90/96)
199 static int vvp9_vf_states(struct vframe_states
*states
, void *);
200 static struct vframe_s
*vvp9_vf_peek(void *);
201 static struct vframe_s
*vvp9_vf_get(void *);
202 static void vvp9_vf_put(struct vframe_s
*, void *);
203 static int vvp9_event_cb(int type
, void *data
, void *private_data
);
205 static int vvp9_stop(struct VP9Decoder_s
*pbi
);
206 #ifdef MULTI_INSTANCE_SUPPORT
207 static s32
vvp9_init(struct vdec_s
*vdec
);
209 static s32
vvp9_init(struct VP9Decoder_s
*pbi
);
211 static void vvp9_prot_init(struct VP9Decoder_s
*pbi
, u32 mask
);
212 static int vvp9_local_init(struct VP9Decoder_s
*pbi
);
213 static void vvp9_put_timer_func(unsigned long arg
);
214 static void dump_data(struct VP9Decoder_s
*pbi
, int size
);
215 static unsigned char get_data_check_sum
216 (struct VP9Decoder_s
*pbi
, int size
);
217 static void dump_pic_list(struct VP9Decoder_s
*pbi
);
218 static int vp9_alloc_mmu(
219 struct VP9Decoder_s
*pbi
,
223 unsigned short bit_depth
,
224 unsigned int *mmu_index_adr
);
227 static const char vvp9_dec_id
[] = "vvp9-dev";
229 #define PROVIDER_NAME "decoder.vp9"
230 #define MULTI_INSTANCE_PROVIDER_NAME "vdec.vp9"
232 static const struct vframe_operations_s vvp9_vf_provider
= {
233 .peek
= vvp9_vf_peek
,
236 .event_cb
= vvp9_event_cb
,
237 .vf_states
= vvp9_vf_states
,
240 static struct vframe_provider_s vvp9_vf_prov
;
242 static u32 bit_depth_luma
;
243 static u32 bit_depth_chroma
;
244 static u32 frame_width
;
245 static u32 frame_height
;
246 static u32 video_signal_type
;
248 static u32 on_no_keyframe_skiped
;
250 #define PROB_SIZE (496 * 2 * 4)
251 #define PROB_BUF_SIZE (0x5000)
252 #define COUNT_BUF_SIZE (0x300 * 4 * 4)
253 /*compute_losless_comp_body_size(4096, 2304, 1) = 18874368(0x1200000)*/
254 #define MAX_FRAME_4K_NUM 0x1200
255 #define MAX_FRAME_8K_NUM 0x4800
257 #define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
259 #ifdef SUPPORT_FB_DECODING
260 /* register define */
261 #define HEVC_ASSIST_HED_FB_W_CTL 0x3006
262 #define HEVC_ASSIST_HED_FB_R_CTL 0x3007
263 #define HEVC_ASSIST_HED_FB_ADDR 0x3008
264 #define HEVC_ASSIST_FB_MMU_MAP_ADDR 0x300a
265 #define HEVC_ASSIST_FBD_MMU_MAP_ADDR 0x300b
268 #define MAX_STAGE_PAGE_NUM 0x1200
269 #define STAGE_MMU_MAP_SIZE (MAX_STAGE_PAGE_NUM * 4)
271 static inline int div_r32(int64_t m
, int n
)
288 unsigned int alloc_flag
;
290 unsigned int cma_page_count
;
291 unsigned long alloc_addr
;
292 unsigned long start_adr
;
295 unsigned int free_start_adr
;
296 ulong v4l_ref_buf_addr
;
305 unsigned long start_adr
;
310 /* #undef BUFMGR_ONLY to enable hardware configuration */
312 /*#define TEST_WR_PTR_INC*/
313 /*#define WR_PTR_INC_NUM 128*/
314 #define WR_PTR_INC_NUM 1
318 #undef MEMORY_MAP_IN_REAL_CHIP
320 /*#undef DOS_PROJECT*/
321 /*#define MEMORY_MAP_IN_REAL_CHIP*/
323 /*#define BUFFER_MGR_ONLY*/
324 /*#define CONFIG_HEVC_CLK_FORCED_ON*/
325 /*#define ENABLE_SWAP_TEST*/
328 #define VP9_LPF_LVL_UPDATE
329 /*#define DBG_LF_PRINT*/
333 #define LOSLESS_COMPRESS_MODE
336 #define DOUBLE_WRITE_YSTART_TEMP 0x02000000
337 #define DOUBLE_WRITE_CSTART_TEMP 0x02900000
341 typedef unsigned int u32
;
342 typedef unsigned short u16
;
344 #define VP9_DEBUG_BUFMGR 0x01
345 #define VP9_DEBUG_BUFMGR_MORE 0x02
346 #define VP9_DEBUG_BUFMGR_DETAIL 0x04
347 #define VP9_DEBUG_OUT_PTS 0x10
348 #define VP9_DEBUG_SEND_PARAM_WITH_REG 0x100
349 #define VP9_DEBUG_MERGE 0x200
350 #define VP9_DEBUG_DBG_LF_PRINT 0x400
351 #define VP9_DEBUG_REG 0x800
352 #define VP9_DEBUG_2_STAGE 0x1000
353 #define VP9_DEBUG_2_STAGE_MORE 0x2000
354 #define VP9_DEBUG_QOS_INFO 0x4000
355 #define VP9_DEBUG_DIS_LOC_ERROR_PROC 0x10000
356 #define VP9_DEBUG_DIS_SYS_ERROR_PROC 0x20000
357 #define VP9_DEBUG_DUMP_PIC_LIST 0x40000
358 #define VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
359 #define VP9_DEBUG_NO_TRIGGER_FRAME 0x100000
360 #define VP9_DEBUG_LOAD_UCODE_FROM_FILE 0x200000
361 #define VP9_DEBUG_FORCE_SEND_AGAIN 0x400000
362 #define VP9_DEBUG_DUMP_DATA 0x800000
363 #define VP9_DEBUG_CACHE 0x1000000
364 #define VP9_DEBUG_CACHE_HIT_RATE 0x2000000
365 #define IGNORE_PARAM_FROM_CONFIG 0x8000000
366 #ifdef MULTI_INSTANCE_SUPPORT
367 #define PRINT_FLAG_ERROR 0x0
368 #define PRINT_FLAG_V4L_DETAIL 0x10000000
369 #define PRINT_FLAG_VDEC_STATUS 0x20000000
370 #define PRINT_FLAG_VDEC_DETAIL 0x40000000
371 #define PRINT_FLAG_VDEC_DATA 0x80000000
375 static bool is_reset
;
379 bit 0, enable ucode print
380 bit 1, enable ucode detail print
381 bit [31:16] not 0, pos to dump lmem
382 bit 2, pop bits to lmem
383 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
385 static u32 udebug_flag
;
387 when udebug_flag[1:0] is not 0
388 udebug_pause_pos not 0,
391 static u32 udebug_pause_pos
;
393 when udebug_flag[1:0] is not 0
394 and udebug_pause_pos is not 0,
395 pause only when DEBUG_REG2 is equal to this val
397 static u32 udebug_pause_val
;
399 static u32 udebug_pause_decode_idx
;
401 static u32 without_display_mode
;
404 *[3:0] 0: default use config from omx.
405 * 1: force enable fence.
407 *[7:4] 0: fence use for driver.
408 * 1: fence fd use for app.
410 static u32 force_config_fence
;
414 void WRITE_VREG_DBG2(unsigned int adr
, unsigned int val
)
416 if (debug
& VP9_DEBUG_REG
)
417 pr_info("%s(%x, %x)\n", __func__
, adr
, val
);
419 WRITE_VREG(adr
, val
);
423 #define WRITE_VREG WRITE_VREG_DBG2
426 #define FRAME_CNT_WINDOW_SIZE 59
427 #define RATE_CORRECTION_THRESHOLD 5
428 /**************************************************
430 VP9 buffer management start
432 ***************************************************/
434 #define MMU_COMPRESS_HEADER_SIZE 0x48000
435 #define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
436 #define MAX_SIZE_8K (8192 * 4608)
437 #define MAX_SIZE_4K (4096 * 2304)
438 #define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
440 #define INVALID_IDX -1 /* Invalid buffer index.*/
442 #define RPM_BEGIN 0x200
443 #define RPM_END 0x280
447 unsigned short data
[RPM_END
- RPM_BEGIN
];
450 /* from ucode lmem, do not change this struct */
451 unsigned short profile
;
452 unsigned short show_existing_frame
;
453 unsigned short frame_to_show_idx
;
454 unsigned short frame_type
; /*1 bit*/
455 unsigned short show_frame
; /*1 bit*/
456 unsigned short error_resilient_mode
; /*1 bit*/
457 unsigned short intra_only
; /*1 bit*/
458 unsigned short display_size_present
; /*1 bit*/
459 unsigned short reset_frame_context
;
460 unsigned short refresh_frame_flags
;
461 unsigned short width
;
462 unsigned short height
;
463 unsigned short display_width
;
464 unsigned short display_height
;
466 *bit[11:8] - ref_frame_info_0 (ref(3-bits), ref_frame_sign_bias(1-bit))
467 *bit[7:4] - ref_frame_info_1 (ref(3-bits), ref_frame_sign_bias(1-bit))
468 *bit[3:0] - ref_frame_info_2 (ref(3-bits), ref_frame_sign_bias(1-bit))
470 unsigned short ref_info
;
472 *bit[2]: same_frame_size0
473 *bit[1]: same_frame_size1
474 *bit[0]: same_frame_size2
476 unsigned short same_frame_size
;
478 unsigned short mode_ref_delta_enabled
;
479 unsigned short ref_deltas
[4];
480 unsigned short mode_deltas
[2];
481 unsigned short filter_level
;
482 unsigned short sharpness_level
;
483 unsigned short bit_depth
;
484 unsigned short seg_quant_info
[8];
485 unsigned short seg_enabled
;
486 unsigned short seg_abs_delta
;
487 /* bit 15: feature enabled; bit 8, sign; bit[5:0], data */
488 unsigned short seg_lf_info
[8];
493 struct vpx_codec_frame_buffer_s
{
494 uint8_t *data
; /**< Pointer to the data buffer */
495 size_t size
; /**< Size of data in bytes */
496 void *priv
; /**< Frame's private data */
499 enum vpx_color_space_t
{
500 VPX_CS_UNKNOWN
= 0, /**< Unknown */
501 VPX_CS_BT_601
= 1, /**< BT.601 */
502 VPX_CS_BT_709
= 2, /**< BT.709 */
503 VPX_CS_SMPTE_170
= 3, /**< SMPTE.170 */
504 VPX_CS_SMPTE_240
= 4, /**< SMPTE.240 */
505 VPX_CS_BT_2020
= 5, /**< BT.2020 */
506 VPX_CS_RESERVED
= 6, /**< Reserved */
507 VPX_CS_SRGB
= 7 /**< sRGB */
508 }; /**< alias for enum vpx_color_space */
510 enum vpx_bit_depth_t
{
511 VPX_BITS_8
= 8, /**< 8 bits */
512 VPX_BITS_10
= 10, /**< 10 bits */
513 VPX_BITS_12
= 12, /**< 12 bits */
516 #define MAX_SLICE_NUM 1024
517 struct PIC_BUFFER_CONFIG_s
{
526 #ifdef MULTI_INSTANCE_SUPPORT
527 struct canvas_config_s canvas_config
[2];
539 unsigned long header_adr
;
540 unsigned long mpred_mv_wr_start_addr
;
541 /*unsigned long mc_y_adr;
542 *unsigned long mc_u_v_adr;
544 unsigned int dw_y_adr
;
545 unsigned int dw_u_v_adr
;
570 uint8_t *alpha_buffer
;
572 uint8_t *buffer_alloc
;
578 unsigned int bit_depth
;
579 enum vpx_color_space_t color_space
;
583 unsigned long cma_alloc_addr
;
585 int double_write_mode
;
587 /* picture qos infomation*/
599 u32 frame_size2
; // For frame base mode
606 enum BITSTREAM_PROFILE
{
620 enum REFERENCE_MODE
{
621 SINGLE_REFERENCE
= 0,
622 COMPOUND_REFERENCE
= 1,
623 REFERENCE_MODE_SELECT
= 2,
628 #define INTRA_FRAME 0
630 #define GOLDEN_FRAME 2
631 #define ALTREF_FRAME 3
632 #define MAX_REF_FRAMES 4
634 #define REFS_PER_FRAME 3
636 #define REF_FRAMES_LOG2 3
637 #define REF_FRAMES (1 << REF_FRAMES_LOG2)
638 #define REF_FRAMES_4K (6)
640 /*4 scratch frames for the new frames to support a maximum of 4 cores decoding
641 *in parallel, 3 for scaled references on the encoder.
642 *TODO(hkuang): Add ondemand frame buffers instead of hardcoding the number
643 * // of framebuffers.
644 *TODO(jkoleszar): These 3 extra references could probably come from the
645 *normal reference pool.
647 #define FRAME_BUFFERS (REF_FRAMES + 16)
648 #define HEADER_FRAME_BUFFERS (FRAME_BUFFERS)
649 #define MAX_BUF_NUM (FRAME_BUFFERS)
650 #define MV_BUFFER_NUM FRAME_BUFFERS
651 #ifdef SUPPORT_FB_DECODING
652 #define STAGE_MAX_BUFFERS 16
654 #define STAGE_MAX_BUFFERS 0
657 #define FRAME_CONTEXTS_LOG2 2
658 #define FRAME_CONTEXTS (1 << FRAME_CONTEXTS_LOG2)
659 /*buffer + header buffer + workspace*/
660 #ifdef MV_USE_FIXED_BUF
661 #define MAX_BMMU_BUFFER_NUM (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + 1)
662 #define VF_BUFFER_IDX(n) (n)
663 #define HEADER_BUFFER_IDX(n) (FRAME_BUFFERS + n)
664 #define WORK_SPACE_BUF_ID (FRAME_BUFFERS + HEADER_FRAME_BUFFERS)
666 #define MAX_BMMU_BUFFER_NUM \
667 (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + MV_BUFFER_NUM + 1)
668 #define VF_BUFFER_IDX(n) (n)
669 #define HEADER_BUFFER_IDX(n) (FRAME_BUFFERS + n)
670 #define MV_BUFFER_IDX(n) (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + n)
671 #define WORK_SPACE_BUF_ID \
672 (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + MV_BUFFER_NUM)
675 struct RefCntBuffer_s
{
680 struct vpx_codec_frame_buffer_s raw_frame_buffer
;
681 struct PIC_BUFFER_CONFIG_s buf
;
683 /*The Following variables will only be used in frame parallel decode.
685 *frame_worker_owner indicates which FrameWorker owns this buffer. NULL means
686 *that no FrameWorker owns, or is decoding, this buffer.
687 *VP9Worker *frame_worker_owner;
689 *row and col indicate which position frame has been decoded to in real
690 *pixel unit. They are reset to -1 when decoding begins and set to INT_MAX
691 *when the frame is fully decoded.
698 /*TODO(dkovalev): idx is not really required and should be removed, now it
699 *is used in vp9_onyxd_if.c
702 struct PIC_BUFFER_CONFIG_s
*buf
;
703 /*struct scale_factors sf;*/
706 struct InternalFrameBuffer_s
{
710 } InternalFrameBuffer
;
712 struct InternalFrameBufferList_s
{
713 int num_internal_frame_buffers
;
714 struct InternalFrameBuffer_s
*int_fb
;
715 } InternalFrameBufferList
;
717 struct BufferPool_s
{
718 /*Protect BufferPool from being accessed by several FrameWorkers at
719 *the same time during frame parallel decode.
720 *TODO(hkuang): Try to use atomic variable instead of locking the whole pool.
722 *Private data associated with the frame buffer callbacks.
725 *vpx_get_frame_buffer_cb_fn_t get_fb_cb;
726 *vpx_release_frame_buffer_cb_fn_t release_fb_cb;
729 struct RefCntBuffer_s frame_bufs
[FRAME_BUFFERS
];
731 /*Frame buffers allocated internally by the codec.*/
732 struct InternalFrameBufferList_s int_frame_buffers
;
738 #define lock_buffer_pool(pool, flags) \
739 spin_lock_irqsave(&pool->lock, flags)
741 #define unlock_buffer_pool(pool, flags) \
742 spin_unlock_irqrestore(&pool->lock, flags)
744 struct VP9_Common_s
{
745 enum vpx_color_space_t color_space
;
756 int use_highbitdepth
;/*Marks if we need to use 16bit frame buffers.*/
758 struct PIC_BUFFER_CONFIG_s
*frame_to_show
;
759 struct RefCntBuffer_s
*prev_frame
;
761 /*TODO(hkuang): Combine this with cur_buf in macroblockd.*/
762 struct RefCntBuffer_s
*cur_frame
;
764 int ref_frame_map
[REF_FRAMES
]; /* maps fb_idx to reference slot */
766 /*Prepare ref_frame_map for the next frame.
767 *Only used in frame parallel decode.
769 int next_ref_frame_map
[REF_FRAMES
];
771 /* TODO(jkoleszar): could expand active_ref_idx to 4,
772 *with 0 as intra, and roll new_fb_idx into it.
775 /*Each frame can reference REFS_PER_FRAME buffers*/
776 struct RefBuffer_s frame_refs
[REFS_PER_FRAME
];
781 /*last frame's frame type for motion search*/
782 enum FRAME_TYPE last_frame_type
;
783 enum FRAME_TYPE frame_type
;
787 int show_existing_frame
;
789 /*Flag signaling that the frame is encoded using only INTRA modes.*/
791 uint8_t last_intra_only
;
793 int allow_high_precision_mv
;
795 /*Flag signaling that the frame context should be reset to default
796 *values. 0 or 1 implies don't reset, 2 reset just the context
797 *specified in the frame header, 3 reset all contexts.
799 int reset_frame_context
;
801 /*MBs, mb_rows/cols is in 16-pixel units; mi_rows/cols is in
802 * MODE_INFO (8-pixel) units.
805 int mb_rows
, mi_rows
;
806 int mb_cols
, mi_cols
;
809 /*Whether to use previous frame's motion vectors for prediction.*/
810 int use_prev_frame_mvs
;
812 int refresh_frame_context
; /* Two state 0 = NO, 1 = YES */
814 int ref_frame_sign_bias
[MAX_REF_FRAMES
]; /* Two state 0, 1 */
816 /*struct loopfilter lf;*/
817 /*struct segmentation seg;*/
819 /*TODO(hkuang):Remove this as it is the same as frame_parallel_decode*/
821 int frame_parallel_decode
; /* frame-based threading.*/
823 /*Context probabilities for reference frame prediction*/
824 /*MV_REFERENCE_FRAME comp_fixed_ref;*/
825 /*MV_REFERENCE_FRAME comp_var_ref[2];*/
826 enum REFERENCE_MODE reference_mode
;
828 /*FRAME_CONTEXT *fc; */ /* this frame entropy */
829 /*FRAME_CONTEXT *frame_contexts; */ /*FRAME_CONTEXTS*/
830 /*unsigned int frame_context_idx; *//* Context to use/update */
831 /*FRAME_COUNTS counts;*/
833 unsigned int current_video_frame
;
834 enum BITSTREAM_PROFILE profile
;
836 enum vpx_bit_depth_t bit_depth
;
838 int error_resilient_mode
;
839 int frame_parallel_decoding_mode
;
842 int skip_loop_filter
;
844 /*External BufferPool passed from outside.*/
845 struct BufferPool_s
*buffer_pool
;
847 int above_context_alloc_cols
;
851 static void set_canvas(struct VP9Decoder_s
*pbi
,
852 struct PIC_BUFFER_CONFIG_s
*pic_config
);
853 static int prepare_display_buf(struct VP9Decoder_s
*pbi
,
854 struct PIC_BUFFER_CONFIG_s
*pic_config
);
856 static void fill_frame_info(struct VP9Decoder_s
*pbi
,
857 struct PIC_BUFFER_CONFIG_s
*frame
,
858 unsigned int framesize
,
861 static struct PIC_BUFFER_CONFIG_s
*get_frame_new_buffer(struct VP9_Common_s
*cm
)
863 return &cm
->buffer_pool
->frame_bufs
[cm
->new_fb_idx
].buf
;
866 static void ref_cnt_fb(struct RefCntBuffer_s
*bufs
, int *idx
, int new_idx
)
868 const int ref_index
= *idx
;
870 if (ref_index
>= 0 && bufs
[ref_index
].ref_count
> 0) {
871 bufs
[ref_index
].ref_count
--;
872 /*pr_info("[MMU DEBUG 2] dec ref_count[%d] : %d\r\n",
873 * ref_index, bufs[ref_index].ref_count);
879 bufs
[new_idx
].ref_count
++;
880 /*pr_info("[MMU DEBUG 3] inc ref_count[%d] : %d\r\n",
881 * new_idx, bufs[new_idx].ref_count);
885 int vp9_release_frame_buffer(struct vpx_codec_frame_buffer_s
*fb
)
887 struct InternalFrameBuffer_s
*const int_fb
=
888 (struct InternalFrameBuffer_s
*)fb
->priv
;
894 static int compute_losless_comp_body_size(int width
, int height
,
895 uint8_t is_bit_depth_10
);
897 static void setup_display_size(struct VP9_Common_s
*cm
, union param_u
*params
,
898 int print_header_info
)
900 cm
->display_width
= cm
->width
;
901 cm
->display_height
= cm
->height
;
902 if (params
->p
.display_size_present
) {
903 if (print_header_info
)
904 pr_info(" * 1-bit display_size_present read : 1\n");
905 cm
->display_width
= params
->p
.display_width
;
906 cm
->display_height
= params
->p
.display_height
;
907 /*vp9_read_frame_size(rb, &cm->display_width,
908 * &cm->display_height);
911 if (print_header_info
)
912 pr_info(" * 1-bit display_size_present read : 0\n");
917 uint8_t print_header_info
= 0;
931 struct buff_s sao_abv
;
932 struct buff_s sao_vb
;
933 struct buff_s short_term_rps
;
937 struct buff_s sao_up
;
938 struct buff_s swap_buf
;
939 struct buff_s swap_buf2
;
940 struct buff_s scalelut
;
941 struct buff_s dblk_para
;
942 struct buff_s dblk_data
;
943 struct buff_s seg_map
;
944 struct buff_s mmu_vbh
;
945 struct buff_s cm_header
;
946 struct buff_s mpred_above
;
947 #ifdef MV_USE_FIXED_BUF
948 struct buff_s mpred_mv
;
953 #ifdef MULTI_INSTANCE_SUPPORT
954 #define DEC_RESULT_NONE 0
955 #define DEC_RESULT_DONE 1
956 #define DEC_RESULT_AGAIN 2
957 #define DEC_RESULT_CONFIG_PARAM 3
958 #define DEC_RESULT_ERROR 4
959 #define DEC_INIT_PICLIST 5
960 #define DEC_UNINIT_PICLIST 6
961 #define DEC_RESULT_GET_DATA 7
962 #define DEC_RESULT_GET_DATA_RETRY 8
963 #define DEC_RESULT_EOS 9
964 #define DEC_RESULT_FORCE_EXIT 10
965 #define DEC_RESULT_NEED_MORE_BUFFER 11
966 #define DEC_V4L2_CONTINUE_DECODING 18
968 #define DEC_S1_RESULT_NONE 0
969 #define DEC_S1_RESULT_DONE 1
970 #define DEC_S1_RESULT_FORCE_EXIT 2
971 #define DEC_S1_RESULT_TEST_TRIGGER_DONE 0xf0
973 #ifdef FB_DECODING_TEST_SCHEDULE
974 #define TEST_SET_NONE 0
975 #define TEST_SET_PIC_DONE 1
976 #define TEST_SET_S2_DONE 2
979 static void vp9_work(struct work_struct
*work
);
981 struct loop_filter_info_n
;
985 #ifdef SUPPORT_FB_DECODING
986 static void mpred_process(struct VP9Decoder_s
*pbi
);
987 static void vp9_s1_work(struct work_struct
*work
);
991 unsigned short rpm
[RPM_END
- RPM_BEGIN
];
994 static unsigned int not_run2_ready
[MAX_DECODE_INSTANCE_NUM
];
996 static unsigned int run2_count
[MAX_DECODE_INSTANCE_NUM
];
998 #ifdef FB_DECODING_TEST_SCHEDULE
999 u32 stage_buf_num
; /* = 16;*/
1005 struct VP9Decoder_s
{
1006 #ifdef MULTI_INSTANCE_SUPPORT
1007 unsigned char index
;
1009 struct device
*cma_dev
;
1010 struct platform_device
*platform_dev
;
1011 void (*vdec_cb
)(struct vdec_s
*, void *);
1013 struct vframe_chunk_s
*chunk
;
1015 struct work_struct work
;
1016 struct work_struct recycle_mmu_work
;
1017 struct work_struct set_clk_work
;
1018 u32 start_shift_bytes
;
1020 struct BuffInfo_s work_space_buf_store
;
1021 unsigned long buf_start
;
1023 u32 cma_alloc_count
;
1024 unsigned long cma_alloc_addr
;
1026 unsigned long int start_process_time
;
1027 unsigned last_lcu_idx
;
1028 int decode_timeout_count
;
1029 unsigned timeout_num
;
1030 int save_buffer_mode
;
1032 int double_write_mode
;
1036 unsigned char m_ins_flag
;
1037 char *provider_name
;
1038 union param_u param
;
1042 struct timer_list timer
;
1047 uint8_t first_sc_checked
;
1048 uint8_t process_busy
;
1049 #define PROC_STATE_INIT 0
1050 #define PROC_STATE_DECODESLICE 1
1051 #define PROC_STATE_SENDAGAIN 2
1052 uint8_t process_state
;
1053 u32 ucode_pause_pos
;
1056 struct buff_s mc_buf_spec
;
1057 struct dec_sysinfo vvp9_amstream_dec_info
;
1060 dma_addr_t rpm_phy_addr
;
1061 dma_addr_t lmem_phy_addr
;
1062 unsigned short *lmem_ptr
;
1063 unsigned short *debug_ptr
;
1065 void *prob_buffer_addr
;
1066 void *count_buffer_addr
;
1067 dma_addr_t prob_buffer_phy_addr
;
1068 dma_addr_t count_buffer_phy_addr
;
1070 void *frame_mmu_map_addr
;
1071 dma_addr_t frame_mmu_map_phy_addr
;
1073 unsigned int use_cma_flag
;
1075 struct BUF_s m_BUF
[MAX_BUF_NUM
];
1076 struct MVBUF_s m_mv_BUF
[MV_BUFFER_NUM
];
1078 DECLARE_KFIFO(newframe_q
, struct vframe_s
*, VF_POOL_SIZE
);
1079 DECLARE_KFIFO(display_q
, struct vframe_s
*, VF_POOL_SIZE
);
1080 DECLARE_KFIFO(pending_q
, struct vframe_s
*, VF_POOL_SIZE
);
1081 struct vframe_s vfpool
[VF_POOL_SIZE
];
1088 unsigned int losless_comp_body_size
;
1090 u32 video_signal_type
;
1093 int last_lookup_pts
;
1095 u64 last_lookup_pts_us64
;
1097 u64 shift_byte_count
;
1100 u32 frame_cnt_window
;
1103 u32 duration_from_pts_done
;
1104 bool vp9_first_pts_ready
;
1106 u32 shift_byte_count_lo
;
1107 u32 shift_byte_count_hi
;
1108 int pts_mode_switching_count
;
1109 int pts_mode_recovery_count
;
1112 u32 saved_resolution
;
1115 struct VP9_Common_s common
;
1116 struct RefCntBuffer_s
*cur_buf
;
1117 int refresh_frame_flags
;
1118 uint8_t need_resync
;
1119 uint8_t hold_ref_buf
;
1120 uint8_t ready_for_new_data
;
1121 struct BufferPool_s vp9_buffer_pool
;
1123 struct BuffInfo_s
*work_space_buf
;
1125 struct buff_s
*mc_buf
;
1127 unsigned int frame_width
;
1128 unsigned int frame_height
;
1130 unsigned short *rpm_ptr
;
1141 uint8_t has_keyframe
;
1145 /* bit 0, for decoding; bit 1, for displaying */
1146 uint8_t ignore_bufmgr_error
;
1148 int PB_skip_count_after_decoding
;
1152 int default_filt_lvl
;
1153 struct loop_filter_info_n
*lfi
;
1154 struct loopfilter
*lf
;
1155 struct segmentation
*seg_4lf
;
1157 struct vdec_info
*gvs
;
1159 u32 pre_stream_offset
;
1161 unsigned int dec_status
;
1163 int new_frame_displayed
;
1167 struct vframe_master_display_colour_s vf_dp
;
1168 struct firmware_s
*fw
;
1171 #ifdef SUPPORT_FB_DECODING
1174 struct work_struct s1_work
;
1175 int used_stage_buf_num
;
1178 void *stage_mmu_map_addr
;
1179 dma_addr_t stage_mmu_map_phy_addr
;
1180 struct stage_buf_s
*s1_buf
;
1181 struct stage_buf_s
*s2_buf
;
1182 struct stage_buf_s
*stage_bufs
1183 [STAGE_MAX_BUFFERS
];
1184 unsigned char run2_busy
;
1186 int s1_mv_buf_index
;
1187 int s1_mv_buf_index_pre
;
1188 int s1_mv_buf_index_pre_pre
;
1189 unsigned long s1_mpred_mv_wr_start_addr
;
1190 unsigned long s1_mpred_mv_wr_start_addr_pre
;
1191 unsigned short s1_intra_only
;
1192 unsigned short s1_frame_type
;
1193 unsigned short s1_width
;
1194 unsigned short s1_height
;
1195 unsigned short s1_last_show_frame
;
1196 union param_u s1_param
;
1197 u8 back_not_run_ready
;
1199 int need_cache_size
;
1202 int low_latency_flag
;
1204 bool pic_list_init_done
;
1205 bool pic_list_init_done2
;
1208 bool v4l_params_parsed
;
1209 int frameinfo_enable
;
1210 struct vframe_qos_s vframe_qos
;
1212 u32 dynamic_buf_num_margin
;
1213 struct vframe_s vframe_dummy
;
1214 unsigned int res_ch_flag
;
1215 /*struct VP9Decoder_s vp9_decoder;*/
1216 union param_u vp9_param
;
1218 int sidebind_channel_id
;
1221 u32 frame_mode_pts_save
[FRAME_BUFFERS
];
1222 u64 frame_mode_pts64_save
[FRAME_BUFFERS
];
1223 int run_ready_min_buf_num
;
1224 int one_package_frame_cnt
;
1227 static int vp9_print(struct VP9Decoder_s
*pbi
,
1228 int flag
, const char *fmt
, ...)
1230 #define HEVC_PRINT_BUF 256
1231 unsigned char buf
[HEVC_PRINT_BUF
];
1239 va_start(args
, fmt
);
1241 len
= sprintf(buf
, "[%d]", pbi
->index
);
1242 vsnprintf(buf
+ len
, HEVC_PRINT_BUF
- len
, fmt
, args
);
1243 pr_debug("%s", buf
);
1249 static int is_oversize(int w
, int h
)
1251 int max
= (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
)?
1252 MAX_SIZE_8K
: MAX_SIZE_4K
;
1254 if (w
<= 0 || h
<= 0)
1257 if (h
!= 0 && (w
> max
/ h
))
1263 static int v4l_alloc_and_config_pic(struct VP9Decoder_s
*pbi
,
1264 struct PIC_BUFFER_CONFIG_s
*pic
);
1266 static void resize_context_buffers(struct VP9Decoder_s
*pbi
,
1267 struct VP9_Common_s
*cm
, int width
, int height
)
1269 if (cm
->width
!= width
|| cm
->height
!= height
) {
1272 pbi
->vp9_first_pts_ready
= 0;
1273 pbi
->duration_from_pts_done
= 0;
1275 pr_info("%s (%d,%d)=>(%d,%d)\r\n", __func__
, cm
->width
,
1276 cm
->height
, width
, height
);
1278 cm
->height
= height
;
1281 *if (cm->cur_frame->mvs == NULL ||
1282 * cm->mi_rows > cm->cur_frame->mi_rows ||
1283 * cm->mi_cols > cm->cur_frame->mi_cols) {
1284 * resize_mv_buffer(cm);
1289 static int valid_ref_frame_size(int ref_width
, int ref_height
,
1290 int this_width
, int this_height
) {
1291 return 2 * this_width
>= ref_width
&&
1292 2 * this_height
>= ref_height
&&
1293 this_width
<= 16 * ref_width
&&
1294 this_height
<= 16 * ref_height
;
1298 *static int valid_ref_frame_img_fmt(enum vpx_bit_depth_t ref_bit_depth,
1299 * int ref_xss, int ref_yss,
1300 * enum vpx_bit_depth_t this_bit_depth,
1301 * int this_xss, int this_yss) {
1302 * return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
1303 * ref_yss == this_yss;
1308 static int setup_frame_size(
1309 struct VP9Decoder_s
*pbi
,
1310 struct VP9_Common_s
*cm
, union param_u
*params
,
1311 unsigned int *mmu_index_adr
,
1312 int print_header_info
) {
1314 struct BufferPool_s
* const pool
= cm
->buffer_pool
;
1315 struct PIC_BUFFER_CONFIG_s
*ybf
;
1318 width
= params
->p
.width
;
1319 height
= params
->p
.height
;
1320 if (is_oversize(width
, height
)) {
1321 vp9_print(pbi
, 0, "%s, Error: Invalid frame size\n", __func__
);
1325 /*vp9_read_frame_size(rb, &width, &height);*/
1326 if (print_header_info
)
1327 pr_info(" * 16-bits w read : %d (width : %d)\n", width
, height
);
1328 if (print_header_info
)
1330 (" * 16-bits h read : %d (height : %d)\n", width
, height
);
1332 WRITE_VREG(HEVC_PARSER_PICTURE_SIZE
, (height
<< 16) | width
);
1333 #ifdef VP9_10B_HED_FB
1334 WRITE_VREG(HEVC_ASSIST_PIC_SIZE_FB_READ
, (height
<< 16) | width
);
1336 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
1337 ret
= vp9_alloc_mmu(pbi
,
1341 params
->p
.bit_depth
,
1344 pr_err("can't alloc need mmu1,idx %d ret =%d\n",
1349 cm
->cur_fb_idx_mmu
= cm
->new_fb_idx
;
1352 resize_context_buffers(pbi
, cm
, width
, height
);
1353 setup_display_size(cm
, params
, print_header_info
);
1355 lock_buffer_pool(pool
);
1356 if (vp9_realloc_frame_buffer(
1357 get_frame_new_buffer(cm
), cm
->width
, cm
->height
,
1358 cm
->subsampling_x
, cm
->subsampling_y
,
1359 #if CONFIG_VP9_HIGHBITDEPTH
1360 cm
->use_highbitdepth
,
1362 VP9_DEC_BORDER_IN_PIXELS
,
1364 &pool
->frame_bufs
[cm
->new_fb_idx
].raw_frame_buffer
,
1365 pool
->get_fb_cb
, pool
->cb_priv
)) {
1366 unlock_buffer_pool(pool
);
1367 vpx_internal_error(&cm
->error
, VPX_CODEC_MEM_ERROR
,
1368 "Failed to allocate frame buffer");
1370 unlock_buffer_pool(pool
);
1373 ybf
= get_frame_new_buffer(cm
);
1377 ybf
->y_crop_width
= width
;
1378 ybf
->y_crop_height
= height
;
1379 ybf
->bit_depth
= params
->p
.bit_depth
;
1381 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.subsampling_x
= cm
->subsampling_x
;
1382 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.subsampling_y
= cm
->subsampling_y
;
1383 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.bit_depth
=
1384 (unsigned int)cm
->bit_depth
;
1385 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.color_space
= cm
->color_space
;
1389 static int setup_frame_size_with_refs(
1390 struct VP9Decoder_s
*pbi
,
1391 struct VP9_Common_s
*cm
,
1392 union param_u
*params
,
1393 unsigned int *mmu_index_adr
,
1394 int print_header_info
) {
1398 int has_valid_ref_frame
= 0;
1399 struct PIC_BUFFER_CONFIG_s
*ybf
;
1400 struct BufferPool_s
* const pool
= cm
->buffer_pool
;
1403 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
1404 if ((params
->p
.same_frame_size
>>
1405 (REFS_PER_FRAME
- i
- 1)) & 0x1) {
1406 struct PIC_BUFFER_CONFIG_s
*const buf
=
1407 cm
->frame_refs
[i
].buf
;
1408 /*if (print_header_info)
1410 * ("1-bit same_frame_size[%d] read : 1\n", i);
1412 width
= buf
->y_crop_width
;
1413 height
= buf
->y_crop_height
;
1414 /*if (print_header_info)
1416 * (" - same_frame_size width : %d\n", width);
1418 /*if (print_header_info)
1420 * (" - same_frame_size height : %d\n", height);
1425 /*if (print_header_info)
1427 * ("1-bit same_frame_size[%d] read : 0\n", i);
1433 /*vp9_read_frame_size(rb, &width, &height);*/
1434 width
= params
->p
.width
;
1435 height
= params
->p
.height
;
1436 /*if (print_header_info)
1438 * (" * 16-bits w read : %d (width : %d)\n",
1440 *if (print_header_info)
1442 * (" * 16-bits h read : %d (height : %d)\n",
1447 if (is_oversize(width
, height
)) {
1448 vp9_print(pbi
, 0, "%s, Error: Invalid frame size\n", __func__
);
1452 params
->p
.width
= width
;
1453 params
->p
.height
= height
;
1455 WRITE_VREG(HEVC_PARSER_PICTURE_SIZE
, (height
<< 16) | width
);
1456 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
1457 /*if(cm->prev_fb_idx >= 0) release_unused_4k(cm->prev_fb_idx);
1458 *cm->prev_fb_idx = cm->new_fb_idx;
1461 * ("[DEBUG DEBUG]Before alloc_mmu,
1462 * prev_fb_idx : %d, new_fb_idx : %d\r\n",
1463 * cm->prev_fb_idx, cm->new_fb_idx);
1465 ret
= vp9_alloc_mmu(pbi
, cm
->new_fb_idx
,
1466 params
->p
.width
, params
->p
.height
,
1467 params
->p
.bit_depth
, mmu_index_adr
);
1469 pr_err("can't alloc need mmu,idx %d\r\n",
1473 cm
->cur_fb_idx_mmu
= cm
->new_fb_idx
;
1476 /*Check to make sure at least one of frames that this frame references
1477 *has valid dimensions.
1479 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
1480 struct RefBuffer_s
* const ref_frame
= &cm
->frame_refs
[i
];
1482 has_valid_ref_frame
|=
1483 valid_ref_frame_size(ref_frame
->buf
->y_crop_width
,
1484 ref_frame
->buf
->y_crop_height
,
1487 if (!has_valid_ref_frame
) {
1488 pr_err("Error: Referenced frame has invalid size\r\n");
1492 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
1493 struct RefBuffer_s
* const ref_frame
=
1495 if (!valid_ref_frame_img_fmt(
1496 ref_frame
->buf
->bit_depth
,
1497 ref_frame
->buf
->subsampling_x
,
1498 ref_frame
->buf
->subsampling_y
,
1503 ("Referenced frame incompatible color fmt\r\n");
1507 resize_context_buffers(pbi
, cm
, width
, height
);
1508 setup_display_size(cm
, params
, print_header_info
);
1511 lock_buffer_pool(pool
);
1512 if (vp9_realloc_frame_buffer(
1513 get_frame_new_buffer(cm
), cm
->width
, cm
->height
,
1514 cm
->subsampling_x
, cm
->subsampling_y
,
1515 #if CONFIG_VP9_HIGHBITDEPTH
1516 cm
->use_highbitdepth
,
1518 VP9_DEC_BORDER_IN_PIXELS
,
1520 &pool
->frame_bufs
[cm
->new_fb_idx
].raw_frame_buffer
,
1523 unlock_buffer_pool(pool
);
1524 vpx_internal_error(&cm
->error
, VPX_CODEC_MEM_ERROR
,
1525 "Failed to allocate frame buffer");
1527 unlock_buffer_pool(pool
);
1530 ybf
= get_frame_new_buffer(cm
);
1534 ybf
->y_crop_width
= width
;
1535 ybf
->y_crop_height
= height
;
1536 ybf
->bit_depth
= params
->p
.bit_depth
;
1538 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.subsampling_x
= cm
->subsampling_x
;
1539 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.subsampling_y
= cm
->subsampling_y
;
1540 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.bit_depth
=
1541 (unsigned int)cm
->bit_depth
;
1542 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.color_space
= cm
->color_space
;
1546 static inline bool close_to(int a
, int b
, int m
)
1548 return (abs(a
- b
) < m
) ? true : false;
1551 #ifdef MULTI_INSTANCE_SUPPORT
1552 static int vp9_print_cont(struct VP9Decoder_s
*pbi
,
1553 int flag
, const char *fmt
, ...)
1555 unsigned char buf
[HEVC_PRINT_BUF
];
1563 va_start(args
, fmt
);
1564 vsnprintf(buf
+ len
, HEVC_PRINT_BUF
- len
, fmt
, args
);
1565 pr_debug("%s", buf
);
1571 static void trigger_schedule(struct VP9Decoder_s
*pbi
)
1573 if (pbi
->is_used_v4l
) {
1574 struct aml_vcodec_ctx
*ctx
=
1575 (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
1577 if (ctx
->param_sets_from_ucode
&&
1578 !pbi
->v4l_params_parsed
)
1579 vdec_v4l_write_frame_sync(ctx
);
1583 pbi
->vdec_cb(hw_to_vdec(pbi
), pbi
->vdec_cb_arg
);
1586 static void reset_process_time(struct VP9Decoder_s
*pbi
)
1588 if (pbi
->start_process_time
) {
1589 unsigned process_time
=
1590 1000 * (jiffies
- pbi
->start_process_time
) / HZ
;
1591 pbi
->start_process_time
= 0;
1592 if (process_time
> max_process_time
[pbi
->index
])
1593 max_process_time
[pbi
->index
] = process_time
;
1597 static void start_process_time(struct VP9Decoder_s
*pbi
)
1599 pbi
->start_process_time
= jiffies
;
1600 pbi
->decode_timeout_count
= 0;
1601 pbi
->last_lcu_idx
= 0;
1604 static void timeout_process(struct VP9Decoder_s
*pbi
)
1609 0, "%s decoder timeout\n", __func__
);
1611 pbi
->dec_result
= DEC_RESULT_DONE
;
1612 reset_process_time(pbi
);
1613 vdec_schedule_work(&pbi
->work
);
1616 static u32
get_valid_double_write_mode(struct VP9Decoder_s
*pbi
)
1618 return ((double_write_mode
& 0x80000000) == 0) ?
1619 pbi
->double_write_mode
:
1620 (double_write_mode
& 0x7fffffff);
1623 static int v4l_parser_get_double_write_mode(struct VP9Decoder_s
*pbi
)
1625 u32 valid_dw_mode
= get_valid_double_write_mode(pbi
);
1629 /* mask for supporting double write value bigger than 0x100 */
1630 if (valid_dw_mode
& 0xffffff00) {
1631 w
= pbi
->frame_width
;
1632 h
= pbi
->frame_height
;
1635 switch (valid_dw_mode
) {
1637 if (w
> 1920 && h
> 1088)
1641 if (w
> 1920 && h
> 1088)
1645 if (w
> 1280 && h
> 720)
1654 return valid_dw_mode
;
1658 static int get_double_write_mode(struct VP9Decoder_s
*pbi
)
1660 u32 valid_dw_mode
= get_valid_double_write_mode(pbi
);
1663 struct VP9_Common_s
*cm
= &pbi
->common
;
1664 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
;
1666 /* mask for supporting double write value bigger than 0x100 */
1667 if (valid_dw_mode
& 0xffffff00) {
1669 return 1;/*no valid frame,*/
1670 cur_pic_config
= &cm
->cur_frame
->buf
;
1671 w
= cur_pic_config
->y_crop_width
;
1672 h
= cur_pic_config
->y_crop_height
;
1675 switch (valid_dw_mode
) {
1677 if (w
> 1920 && h
> 1088)
1681 if (w
> 1920 && h
> 1088)
1685 if (w
> 1280 && h
> 720)
1694 return valid_dw_mode
;
1697 /* for double write buf alloc */
1698 static int get_double_write_mode_init(struct VP9Decoder_s
*pbi
)
1700 u32 valid_dw_mode
= get_valid_double_write_mode(pbi
);
1702 int w
= pbi
->init_pic_w
;
1703 int h
= pbi
->init_pic_h
;
1706 switch (valid_dw_mode
) {
1708 if (w
> 1920 && h
> 1088)
1712 if (w
> 1920 && h
> 1088)
1716 if (w
> 1280 && h
> 720)
1727 static int get_double_write_ratio(struct VP9Decoder_s
*pbi
,
1731 if ((dw_mode
== 2) ||
1734 else if (dw_mode
== 4)
1739 //#define MAX_4K_NUM 0x1200
1742 struct VP9Decoder_s
*pbi
,
1746 unsigned short bit_depth
,
1747 unsigned int *mmu_index_adr
)
1749 int bit_depth_10
= (bit_depth
== VPX_BITS_10
);
1751 int cur_mmu_4k_number
, max_frame_num
;
1752 if (!pbi
->mmu_box
) {
1753 pr_err("error no mmu box!\n");
1756 if (get_double_write_mode(pbi
) == 0x10)
1758 if (bit_depth
>= VPX_BITS_12
) {
1759 pbi
->fatal_error
= DECODER_FATAL_ERROR_SIZE_OVERFLOW
;
1760 pr_err("fatal_error, un support bit depth 12!\n\n");
1763 picture_size
= compute_losless_comp_body_size(pic_width
, pic_height
,
1765 cur_mmu_4k_number
= ((picture_size
+ (1 << 12) - 1) >> 12);
1767 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
)
1768 max_frame_num
= MAX_FRAME_8K_NUM
;
1770 max_frame_num
= MAX_FRAME_4K_NUM
;
1772 if (cur_mmu_4k_number
> max_frame_num
) {
1773 pr_err("over max !! cur_mmu_4k_number 0x%x width %d height %d\n",
1774 cur_mmu_4k_number
, pic_width
, pic_height
);
1778 return decoder_mmu_box_alloc_idx(
1786 #ifndef MV_USE_FIXED_BUF
1787 static void dealloc_mv_bufs(struct VP9Decoder_s
*pbi
)
1790 for (i
= 0; i
< MV_BUFFER_NUM
; i
++) {
1791 if (pbi
->m_mv_BUF
[i
].start_adr
) {
1794 "dealloc mv buf(%d) adr %ld size 0x%x used_flag %d\n",
1795 i
, pbi
->m_mv_BUF
[i
].start_adr
,
1796 pbi
->m_mv_BUF
[i
].size
,
1797 pbi
->m_mv_BUF
[i
].used_flag
);
1798 decoder_bmmu_box_free_idx(
1801 pbi
->m_mv_BUF
[i
].start_adr
= 0;
1802 pbi
->m_mv_BUF
[i
].size
= 0;
1803 pbi
->m_mv_BUF
[i
].used_flag
= 0;
1808 static int alloc_mv_buf(struct VP9Decoder_s
*pbi
,
1813 if (pbi
->m_mv_BUF
[i
].start_adr
&&
1814 size
> pbi
->m_mv_BUF
[i
].size
) {
1815 dealloc_mv_bufs(pbi
);
1816 } else if (pbi
->m_mv_BUF
[i
].start_adr
)
1819 if (decoder_bmmu_box_alloc_buf_phy
1821 MV_BUFFER_IDX(i
), size
,
1823 &pbi
->m_mv_BUF
[i
].start_adr
) < 0) {
1824 pbi
->m_mv_BUF
[i
].start_adr
= 0;
1827 pbi
->m_mv_BUF
[i
].size
= size
;
1828 pbi
->m_mv_BUF
[i
].used_flag
= 0;
1832 "MV Buffer %d: start_adr %p size %x\n",
1834 (void *)pbi
->m_mv_BUF
[i
].start_adr
,
1835 pbi
->m_mv_BUF
[i
].size
);
1841 static int init_mv_buf_list(struct VP9Decoder_s
*pbi
)
1845 int count
= MV_BUFFER_NUM
;
1846 int pic_width
= pbi
->init_pic_w
;
1847 int pic_height
= pbi
->init_pic_h
;
1848 int lcu_size
= 64; /*fixed 64*/
1849 int pic_width_64
= (pic_width
+ 63) & (~0x3f);
1850 int pic_height_32
= (pic_height
+ 31) & (~0x1f);
1851 int pic_width_lcu
= (pic_width_64
% lcu_size
) ?
1852 pic_width_64
/ lcu_size
+ 1
1853 : pic_width_64
/ lcu_size
;
1854 int pic_height_lcu
= (pic_height_32
% lcu_size
) ?
1855 pic_height_32
/ lcu_size
+ 1
1856 : pic_height_32
/ lcu_size
;
1857 int lcu_total
= pic_width_lcu
* pic_height_lcu
;
1858 int size
= ((lcu_total
* MV_MEM_UNIT
) + 0xffff) &
1860 if (mv_buf_margin
> 0)
1861 count
= REF_FRAMES
+ mv_buf_margin
;
1863 if (pbi
->init_pic_w
> 2048 && pbi
->init_pic_h
> 1088)
1864 count
= REF_FRAMES_4K
+ mv_buf_margin
;
1867 pr_info("%s w:%d, h:%d, count: %d\n",
1868 __func__
, pbi
->init_pic_w
, pbi
->init_pic_h
, count
);
1872 i
< count
&& i
< MV_BUFFER_NUM
; i
++) {
1873 if (alloc_mv_buf(pbi
, i
, size
) < 0) {
1881 static int get_mv_buf(struct VP9Decoder_s
*pbi
,
1883 unsigned long *mpred_mv_wr_start_addr
)
1887 for (i
= 0; i
< MV_BUFFER_NUM
; i
++) {
1888 if (pbi
->m_mv_BUF
[i
].start_adr
&&
1889 pbi
->m_mv_BUF
[i
].used_flag
== 0) {
1890 pbi
->m_mv_BUF
[i
].used_flag
= 1;
1897 *mv_buf_index
= ret
;
1898 *mpred_mv_wr_start_addr
=
1899 (pbi
->m_mv_BUF
[ret
].start_adr
+ 0xffff) &
1901 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
1903 "%s => %d (%ld) size 0x%x\n",
1905 *mpred_mv_wr_start_addr
,
1906 pbi
->m_mv_BUF
[ret
].size
);
1909 "%s: Error, mv buf is not enough\n",
1915 static void put_mv_buf(struct VP9Decoder_s
*pbi
,
1918 int i
= *mv_buf_index
;
1919 if (i
>= MV_BUFFER_NUM
) {
1920 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
1922 "%s: index %d beyond range\n",
1926 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
1928 "%s(%d): used_flag(%d)\n",
1930 pbi
->m_mv_BUF
[i
].used_flag
);
1933 if (pbi
->m_mv_BUF
[i
].start_adr
&&
1934 pbi
->m_mv_BUF
[i
].used_flag
)
1935 pbi
->m_mv_BUF
[i
].used_flag
= 0;
1938 static void put_un_used_mv_bufs(struct VP9Decoder_s
*pbi
)
1940 struct VP9_Common_s
*const cm
= &pbi
->common
;
1941 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
1943 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
1944 if ((frame_bufs
[i
].ref_count
== 0) &&
1945 (frame_bufs
[i
].buf
.index
!= -1) &&
1946 (frame_bufs
[i
].buf
.mv_buf_index
>= 0)
1948 put_mv_buf(pbi
, &frame_bufs
[i
].buf
.mv_buf_index
);
1952 #ifdef SUPPORT_FB_DECODING
1953 static bool mv_buf_available(struct VP9Decoder_s
*pbi
)
1957 for (i
= 0; i
< MV_BUFFER_NUM
; i
++) {
1958 if (pbi
->m_mv_BUF
[i
].start_adr
&&
1959 pbi
->m_mv_BUF
[i
].used_flag
== 0) {
1969 #ifdef SUPPORT_FB_DECODING
1970 static void init_stage_buf(struct VP9Decoder_s
*pbi
)
1973 for (i
= 0; i
< STAGE_MAX_BUFFERS
1974 && i
< stage_buf_num
; i
++) {
1975 pbi
->stage_bufs
[i
] =
1976 vmalloc(sizeof(struct stage_buf_s
));
1977 if (pbi
->stage_bufs
[i
] == NULL
) {
1979 0, "%s vmalloc fail\n", __func__
);
1982 pbi
->stage_bufs
[i
]->index
= i
;
1984 pbi
->used_stage_buf_num
= i
;
1989 pbi
->s1_mv_buf_index
= FRAME_BUFFERS
;
1990 pbi
->s1_mv_buf_index_pre
= FRAME_BUFFERS
;
1991 pbi
->s1_mv_buf_index_pre_pre
= FRAME_BUFFERS
;
1993 if (pbi
->used_stage_buf_num
> 0)
1995 0, "%s 2 stage decoding buf %d\n",
1997 pbi
->used_stage_buf_num
);
2000 static void uninit_stage_buf(struct VP9Decoder_s
*pbi
)
2003 for (i
= 0; i
< pbi
->used_stage_buf_num
; i
++) {
2004 if (pbi
->stage_bufs
[i
])
2005 vfree(pbi
->stage_bufs
[i
]);
2006 pbi
->stage_bufs
[i
] = NULL
;
2008 pbi
->used_stage_buf_num
= 0;
2015 static int get_s1_buf(
2016 struct VP9Decoder_s
*pbi
)
2018 struct stage_buf_s
*buf
= NULL
;
2020 int buf_page_num
= MAX_STAGE_PAGE_NUM
;
2021 int next_s1_pos
= pbi
->s1_pos
+ 1;
2023 if (next_s1_pos
>= pbi
->used_stage_buf_num
)
2025 if (next_s1_pos
== pbi
->s2_pos
) {
2030 buf
= pbi
->stage_bufs
[pbi
->s1_pos
];
2031 ret
= decoder_mmu_box_alloc_idx(
2035 pbi
->stage_mmu_map_addr
);
2038 "%s decoder_mmu_box_alloc fail for index %d (s1_pos %d s2_pos %d)\n",
2039 __func__
, buf
->index
,
2040 pbi
->s1_pos
, pbi
->s2_pos
);
2043 vp9_print(pbi
, VP9_DEBUG_2_STAGE
,
2044 "%s decoder_mmu_box_alloc %d page for index %d (s1_pos %d s2_pos %d)\n",
2045 __func__
, buf_page_num
, buf
->index
,
2046 pbi
->s1_pos
, pbi
->s2_pos
);
2052 static void inc_s1_pos(struct VP9Decoder_s
*pbi
)
2054 struct stage_buf_s
*buf
=
2055 pbi
->stage_bufs
[pbi
->s1_pos
];
2058 #ifdef FB_DECODING_TEST_SCHEDULE
2059 MAX_STAGE_PAGE_NUM
/2;
2061 (READ_VREG(HEVC_ASSIST_HED_FB_W_CTL
) >> 16);
2063 decoder_mmu_box_free_idx_tail(pbi
->mmu_box
,
2064 FRAME_BUFFERS
+ buf
->index
, used_page_num
);
2067 if (pbi
->s1_pos
>= pbi
->used_stage_buf_num
)
2070 vp9_print(pbi
, VP9_DEBUG_2_STAGE
,
2071 "%s (used_page_num %d) for index %d (s1_pos %d s2_pos %d)\n",
2072 __func__
, used_page_num
, buf
->index
,
2073 pbi
->s1_pos
, pbi
->s2_pos
);
2076 #define s2_buf_available(pbi) (pbi->s1_pos != pbi->s2_pos)
2078 static int get_s2_buf(
2079 struct VP9Decoder_s
*pbi
)
2082 struct stage_buf_s
*buf
= NULL
;
2083 if (s2_buf_available(pbi
)) {
2084 buf
= pbi
->stage_bufs
[pbi
->s2_pos
];
2085 vp9_print(pbi
, VP9_DEBUG_2_STAGE
,
2086 "%s for index %d (s1_pos %d s2_pos %d)\n",
2087 __func__
, buf
->index
,
2088 pbi
->s1_pos
, pbi
->s2_pos
);
2095 static void inc_s2_pos(struct VP9Decoder_s
*pbi
)
2097 struct stage_buf_s
*buf
=
2098 pbi
->stage_bufs
[pbi
->s2_pos
];
2099 decoder_mmu_box_free_idx(pbi
->mmu_box
,
2100 FRAME_BUFFERS
+ buf
->index
);
2102 if (pbi
->s2_pos
>= pbi
->used_stage_buf_num
)
2104 vp9_print(pbi
, VP9_DEBUG_2_STAGE
,
2105 "%s for index %d (s1_pos %d s2_pos %d)\n",
2106 __func__
, buf
->index
,
2107 pbi
->s1_pos
, pbi
->s2_pos
);
2110 static int get_free_stage_buf_num(struct VP9Decoder_s
*pbi
)
2113 if (pbi
->s1_pos
>= pbi
->s2_pos
)
2114 num
= pbi
->used_stage_buf_num
-
2115 (pbi
->s1_pos
- pbi
->s2_pos
) - 1;
2117 num
= (pbi
->s2_pos
- pbi
->s1_pos
) - 1;
2121 #ifndef FB_DECODING_TEST_SCHEDULE
2122 static DEFINE_SPINLOCK(fb_core_spin_lock
);
2124 static u8
is_s2_decoding_finished(struct VP9Decoder_s
*pbi
)
2126 /* to do: VLSI review
2127 completion of last LCU decoding in BACK
2132 static void start_s1_decoding(struct VP9Decoder_s
*pbi
)
2134 /* to do: VLSI review
2135 after parser, how to start LCU decoding in BACK
2139 static void fb_reset_core(struct vdec_s
*vdec
, u32 mask
)
2141 /* to do: VLSI review
2142 1. how to disconnect DMC for FRONT and BACK
2143 2. reset bit 13, 24, FRONT or BACK ??
2146 unsigned long flags
;
2148 if (mask
& HW_MASK_FRONT
)
2149 WRITE_VREG(HEVC_STREAM_CONTROL
, 0);
2150 spin_lock_irqsave(&fb_core_spin_lock
, flags
);
2151 codec_dmcbus_write(DMC_REQ_CTRL
,
2152 codec_dmcbus_read(DMC_REQ_CTRL
) & (~(1 << 4)));
2153 spin_unlock_irqrestore(&fb_core_spin_lock
, flags
);
2155 while (!(codec_dmcbus_read(DMC_CHAN_STS
)
2159 if ((mask
& HW_MASK_FRONT
) &&
2160 input_frame_based(vdec
))
2161 WRITE_VREG(HEVC_STREAM_CONTROL
, 0);
2178 if (mask
& HW_MASK_FRONT
) {
2180 (1<<3)|(1<<4)|(1<<11)|
2183 if (mask
& HW_MASK_BACK
) {
2185 (1<<8)|(1<<13)|(1<<14)|(1<<15)|
2186 (1<<17)|(1<<19)|(1<<24);
2188 WRITE_VREG(DOS_SW_RESET3
, reset_bits
);
2190 (1<<3)|(1<<4)|(1<<8)|(1<<11)|
2191 (1<<12)|(1<<13)|(1<<14)|(1<<15)|
2192 (1<<17)|(1<<18)|(1<<19)|(1<<24);
2194 WRITE_VREG(DOS_SW_RESET3
, 0);
2197 spin_lock_irqsave(&fb_core_spin_lock
, flags
);
2198 codec_dmcbus_write(DMC_REQ_CTRL
,
2199 codec_dmcbus_read(DMC_REQ_CTRL
) | (1 << 4));
2200 spin_unlock_irqrestore(&fb_core_spin_lock
, flags
);
2207 static void init_pic_list_hw(struct VP9Decoder_s
*pbi
);
2209 static int get_free_fb(struct VP9Decoder_s
*pbi
)
2211 struct VP9_Common_s
*const cm
= &pbi
->common
;
2212 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2214 unsigned long flags
;
2216 lock_buffer_pool(cm
->buffer_pool
, flags
);
2217 if (debug
& VP9_DEBUG_BUFMGR_MORE
) {
2218 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
2219 pr_info("%s:%d, ref_count %d vf_ref %d index %d\r\n",
2220 __func__
, i
, frame_bufs
[i
].ref_count
,
2221 frame_bufs
[i
].buf
.vf_ref
,
2222 frame_bufs
[i
].buf
.index
);
2225 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
2226 if ((frame_bufs
[i
].ref_count
== 0) &&
2227 (frame_bufs
[i
].buf
.vf_ref
== 0) &&
2228 (frame_bufs
[i
].buf
.index
!= -1)
2232 if (i
!= pbi
->used_buf_num
) {
2233 frame_bufs
[i
].ref_count
= 1;
2234 /*pr_info("[MMU DEBUG 1] set ref_count[%d] : %d\r\n",
2235 i, frame_bufs[i].ref_count);*/
2237 /* Reset i to be INVALID_IDX to indicate
2238 no free buffer found*/
2242 unlock_buffer_pool(cm
->buffer_pool
, flags
);
2246 static int v4l_get_free_fb(struct VP9Decoder_s
*pbi
)
2248 struct VP9_Common_s
*const cm
= &pbi
->common
;
2249 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2250 struct aml_vcodec_ctx
* v4l
= pbi
->v4l2_ctx
;
2251 struct v4l_buff_pool
*pool
= &v4l
->cap_pool
;
2252 struct PIC_BUFFER_CONFIG_s
*pic
= NULL
;
2253 int i
, idx
= INVALID_IDX
;
2256 lock_buffer_pool(cm
->buffer_pool
, flags
);
2258 for (i
= 0; i
< pool
->in
; ++i
) {
2259 u32 state
= (pool
->seq
[i
] >> 16);
2260 u32 index
= (pool
->seq
[i
] & 0xffff);
2263 case V4L_CAP_BUFF_IN_DEC
:
2264 pic
= &frame_bufs
[i
].buf
;
2265 if ((frame_bufs
[i
].ref_count
== 0) &&
2266 (pic
->vf_ref
== 0) &&
2267 (pic
->index
!= -1) &&
2268 pic
->cma_alloc_addr
) {
2272 case V4L_CAP_BUFF_IN_M2M
:
2273 pic
= &frame_bufs
[index
].buf
;
2274 pic
->y_crop_width
= pbi
->frame_width
;
2275 pic
->y_crop_height
= pbi
->frame_height
;
2276 if (!v4l_alloc_and_config_pic(pbi
, pic
)) {
2277 set_canvas(pbi
, pic
);
2278 init_pic_list_hw(pbi
);
2283 pr_err("v4l buffer state err %d.\n", state
);
2287 if (idx
!= INVALID_IDX
) {
2288 frame_bufs
[idx
].ref_count
= 1;
2293 unlock_buffer_pool(cm
->buffer_pool
, flags
);
2298 static int get_free_buf_count(struct VP9Decoder_s
*pbi
)
2300 struct VP9_Common_s
*const cm
= &pbi
->common
;
2301 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2303 int free_buf_count
= 0;
2304 for (i
= 0; i
< pbi
->used_buf_num
; ++i
)
2305 if ((frame_bufs
[i
].ref_count
== 0) &&
2306 (frame_bufs
[i
].buf
.vf_ref
== 0) &&
2307 (frame_bufs
[i
].buf
.index
!= -1)
2310 return free_buf_count
;
2313 static void decrease_ref_count(int idx
, struct RefCntBuffer_s
*const frame_bufs
,
2314 struct BufferPool_s
*const pool
)
2317 --frame_bufs
[idx
].ref_count
;
2318 /*pr_info("[MMU DEBUG 7] dec ref_count[%d] : %d\r\n", idx,
2319 * frame_bufs[idx].ref_count);
2321 /*A worker may only get a free framebuffer index when
2322 *calling get_free_fb. But the private buffer is not set up
2323 *until finish decoding header. So any error happens during
2324 *decoding header, the frame_bufs will not have valid priv
2328 if (frame_bufs
[idx
].ref_count
== 0 &&
2329 frame_bufs
[idx
].raw_frame_buffer
.priv
)
2330 vp9_release_frame_buffer
2331 (&frame_bufs
[idx
].raw_frame_buffer
);
2335 static void generate_next_ref_frames(struct VP9Decoder_s
*pbi
)
2337 struct VP9_Common_s
*const cm
= &pbi
->common
;
2338 struct RefCntBuffer_s
*frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2339 struct BufferPool_s
*const pool
= cm
->buffer_pool
;
2340 int mask
, ref_index
= 0;
2341 unsigned long flags
;
2343 /* Generate next_ref_frame_map.*/
2344 lock_buffer_pool(pool
, flags
);
2345 for (mask
= pbi
->refresh_frame_flags
; mask
; mask
>>= 1) {
2347 cm
->next_ref_frame_map
[ref_index
] = cm
->new_fb_idx
;
2348 ++frame_bufs
[cm
->new_fb_idx
].ref_count
;
2349 /*pr_info("[MMU DEBUG 4] inc ref_count[%d] : %d\r\n",
2350 *cm->new_fb_idx, frame_bufs[cm->new_fb_idx].ref_count);
2353 cm
->next_ref_frame_map
[ref_index
] =
2354 cm
->ref_frame_map
[ref_index
];
2355 /* Current thread holds the reference frame.*/
2356 if (cm
->ref_frame_map
[ref_index
] >= 0) {
2357 ++frame_bufs
[cm
->ref_frame_map
[ref_index
]].ref_count
;
2359 *("[MMU DEBUG 5] inc ref_count[%d] : %d\r\n",
2360 *cm->ref_frame_map[ref_index],
2361 *frame_bufs[cm->ref_frame_map[ref_index]].ref_count);
2367 for (; ref_index
< REF_FRAMES
; ++ref_index
) {
2368 cm
->next_ref_frame_map
[ref_index
] =
2369 cm
->ref_frame_map
[ref_index
];
2370 /* Current thread holds the reference frame.*/
2371 if (cm
->ref_frame_map
[ref_index
] >= 0) {
2372 ++frame_bufs
[cm
->ref_frame_map
[ref_index
]].ref_count
;
2373 /*pr_info("[MMU DEBUG 6] inc ref_count[%d] : %d\r\n",
2374 *cm->ref_frame_map[ref_index],
2375 *frame_bufs[cm->ref_frame_map[ref_index]].ref_count);
2379 unlock_buffer_pool(pool
, flags
);
2383 static void refresh_ref_frames(struct VP9Decoder_s
*pbi
)
2386 struct VP9_Common_s
*const cm
= &pbi
->common
;
2387 struct BufferPool_s
*pool
= cm
->buffer_pool
;
2388 struct RefCntBuffer_s
*frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2389 int mask
, ref_index
= 0;
2390 unsigned long flags
;
2392 lock_buffer_pool(pool
, flags
);
2393 for (mask
= pbi
->refresh_frame_flags
; mask
; mask
>>= 1) {
2394 const int old_idx
= cm
->ref_frame_map
[ref_index
];
2395 /*Current thread releases the holding of reference frame.*/
2396 decrease_ref_count(old_idx
, frame_bufs
, pool
);
2398 /*Release the reference frame in reference map.*/
2399 if ((mask
& 1) && old_idx
>= 0)
2400 decrease_ref_count(old_idx
, frame_bufs
, pool
);
2401 cm
->ref_frame_map
[ref_index
] =
2402 cm
->next_ref_frame_map
[ref_index
];
2406 /*Current thread releases the holding of reference frame.*/
2407 for (; ref_index
< REF_FRAMES
&& !cm
->show_existing_frame
;
2409 const int old_idx
= cm
->ref_frame_map
[ref_index
];
2411 decrease_ref_count(old_idx
, frame_bufs
, pool
);
2412 cm
->ref_frame_map
[ref_index
] =
2413 cm
->next_ref_frame_map
[ref_index
];
2415 unlock_buffer_pool(pool
, flags
);
2419 int vp9_bufmgr_process(struct VP9Decoder_s
*pbi
, union param_u
*params
)
2421 struct VP9_Common_s
*const cm
= &pbi
->common
;
2422 struct BufferPool_s
*pool
= cm
->buffer_pool
;
2423 struct RefCntBuffer_s
*frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2424 struct PIC_BUFFER_CONFIG_s
*pic
= NULL
;
2428 pbi
->ready_for_new_data
= 0;
2430 if (pbi
->has_keyframe
== 0 &&
2431 params
->p
.frame_type
!= KEY_FRAME
){
2432 on_no_keyframe_skiped
++;
2435 pbi
->has_keyframe
= 1;
2436 on_no_keyframe_skiped
= 0;
2438 if (pbi
->mmu_enable
) {
2439 if (!pbi
->m_ins_flag
)
2440 pbi
->used_4k_num
= (READ_VREG(HEVC_SAO_MMU_STATUS
) >> 16);
2441 if (cm
->prev_fb_idx
>= 0) {
2442 decoder_mmu_box_free_idx_tail(pbi
->mmu_box
,
2443 cm
->prev_fb_idx
, pbi
->used_4k_num
);
2447 if (cm
->new_fb_idx
>= 0
2448 && frame_bufs
[cm
->new_fb_idx
].ref_count
== 0){
2449 vp9_release_frame_buffer
2450 (&frame_bufs
[cm
->new_fb_idx
].raw_frame_buffer
);
2452 /*pr_info("Before get_free_fb, prev_fb_idx : %d, new_fb_idx : %d\r\n",
2453 cm->prev_fb_idx, cm->new_fb_idx);*/
2454 #ifndef MV_USE_FIXED_BUF
2455 put_un_used_mv_bufs(pbi
);
2456 if (debug
& VP9_DEBUG_BUFMGR_DETAIL
)
2459 cm
->new_fb_idx
= pbi
->is_used_v4l
?
2460 v4l_get_free_fb(pbi
) :
2462 if (cm
->new_fb_idx
== INVALID_IDX
) {
2463 pr_info("get_free_fb error\r\n");
2467 #ifndef MV_USE_FIXED_BUF
2468 #ifdef SUPPORT_FB_DECODING
2469 if (pbi
->used_stage_buf_num
== 0) {
2472 &pool
->frame_bufs
[cm
->new_fb_idx
].
2474 &pool
->frame_bufs
[cm
->new_fb_idx
].
2475 buf
.mpred_mv_wr_start_addr
2477 pr_info("get_mv_buf fail\r\n");
2480 if (debug
& VP9_DEBUG_BUFMGR_DETAIL
)
2482 #ifdef SUPPORT_FB_DECODING
2486 cm
->cur_frame
= &pool
->frame_bufs
[cm
->new_fb_idx
];
2487 /*if (debug & VP9_DEBUG_BUFMGR)
2488 pr_info("[VP9 DEBUG]%s(get_free_fb): %d\r\n", __func__,
2491 pbi
->cur_buf
= &frame_bufs
[cm
->new_fb_idx
];
2492 if (pbi
->mmu_enable
) {
2493 /* moved to after picture size ready
2494 *alloc_mmu(cm, params->p.width, params->p.height,
2495 *params->p.bit_depth, pbi->frame_mmu_map_addr);
2497 cm
->prev_fb_idx
= cm
->new_fb_idx
;
2499 /*read_uncompressed_header()*/
2500 cm
->last_frame_type
= cm
->frame_type
;
2501 cm
->last_intra_only
= cm
->intra_only
;
2502 cm
->profile
= params
->p
.profile
;
2503 if (cm
->profile
>= MAX_PROFILES
) {
2504 pr_err("Error: Unsupported profile %d\r\n", cm
->profile
);
2507 cm
->show_existing_frame
= params
->p
.show_existing_frame
;
2508 if (cm
->show_existing_frame
) {
2509 /* Show an existing frame directly.*/
2510 int frame_to_show_idx
= params
->p
.frame_to_show_idx
;
2512 unsigned long flags
;
2513 if (frame_to_show_idx
>= REF_FRAMES
) {
2514 pr_info("frame_to_show_idx %d exceed max index\r\n",
2519 frame_to_show
= cm
->ref_frame_map
[frame_to_show_idx
];
2520 /*pr_info("frame_to_show %d\r\n", frame_to_show);*/
2521 lock_buffer_pool(pool
, flags
);
2522 if (frame_to_show
< 0 ||
2523 frame_bufs
[frame_to_show
].ref_count
< 1) {
2524 unlock_buffer_pool(pool
, flags
);
2526 ("Error:Buffer %d does not contain a decoded frame",
2531 ref_cnt_fb(frame_bufs
, &cm
->new_fb_idx
, frame_to_show
);
2532 unlock_buffer_pool(pool
, flags
);
2533 pbi
->refresh_frame_flags
= 0;
2534 /*cm->lf.filter_level = 0;*/
2538 *if (pbi->frame_parallel_decode) {
2539 * for (i = 0; i < REF_FRAMES; ++i)
2540 * cm->next_ref_frame_map[i] =
2541 * cm->ref_frame_map[i];
2544 /* do not decode, search next start code */
2547 cm
->frame_type
= params
->p
.frame_type
;
2548 cm
->show_frame
= params
->p
.show_frame
;
2549 cm
->bit_depth
= params
->p
.bit_depth
;
2550 cm
->error_resilient_mode
= params
->p
.error_resilient_mode
;
2553 if (cm
->frame_type
== KEY_FRAME
) {
2554 pbi
->refresh_frame_flags
= (1 << REF_FRAMES
) - 1;
2556 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
2557 cm
->frame_refs
[i
].idx
= INVALID_IDX
;
2558 cm
->frame_refs
[i
].buf
= NULL
;
2561 ret
= setup_frame_size(pbi
,
2562 cm
, params
, pbi
->frame_mmu_map_addr
,
2566 if (pbi
->need_resync
) {
2567 memset(&cm
->ref_frame_map
, -1,
2568 sizeof(cm
->ref_frame_map
));
2569 pbi
->need_resync
= 0;
2572 cm
->intra_only
= cm
->show_frame
? 0 : params
->p
.intra_only
;
2573 /*if (print_header_info) {
2574 * if (cm->show_frame)
2576 * ("intra_only set to 0 because of show_frame\n");
2579 * ("1-bit intra_only read: %d\n", cm->intra_only);
2584 cm
->reset_frame_context
= cm
->error_resilient_mode
?
2585 0 : params
->p
.reset_frame_context
;
2586 if (print_header_info
) {
2587 if (cm
->error_resilient_mode
)
2589 ("reset to 0 error_resilient_mode\n");
2592 (" * 2-bits reset_frame_context read : %d\n",
2593 cm
->reset_frame_context
);
2596 if (cm
->intra_only
) {
2597 if (cm
->profile
> PROFILE_0
) {
2598 /*read_bitdepth_colorspace_sampling(cm,
2599 * rb, print_header_info);
2602 /*NOTE: The intra-only frame header
2603 *does not include the specification
2604 *of either the color format or
2606 *in profile 0. VP9 specifies that the default
2607 *color format should be YUV 4:2:0 in this
2610 cm
->color_space
= VPX_CS_BT_601
;
2611 cm
->subsampling_y
= cm
->subsampling_x
= 1;
2612 cm
->bit_depth
= VPX_BITS_8
;
2613 cm
->use_highbitdepth
= 0;
2616 pbi
->refresh_frame_flags
=
2617 params
->p
.refresh_frame_flags
;
2618 /*if (print_header_info)
2619 * pr_info("*%d-bits refresh_frame read:0x%x\n",
2620 * REF_FRAMES, pbi->refresh_frame_flags);
2622 ret
= setup_frame_size(pbi
,
2625 pbi
->frame_mmu_map_addr
,
2629 if (pbi
->need_resync
) {
2630 memset(&cm
->ref_frame_map
, -1,
2631 sizeof(cm
->ref_frame_map
));
2632 pbi
->need_resync
= 0;
2634 } else if (pbi
->need_resync
!= 1) { /* Skip if need resync */
2635 pbi
->refresh_frame_flags
=
2636 params
->p
.refresh_frame_flags
;
2637 if (print_header_info
)
2639 ("*%d-bits refresh_frame read:0x%x\n",
2640 REF_FRAMES
, pbi
->refresh_frame_flags
);
2641 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
2643 (params
->p
.ref_info
>>
2644 (((REFS_PER_FRAME
-i
-1)*4)+1))
2647 cm
->ref_frame_map
[ref
];
2648 struct RefBuffer_s
* const ref_frame
=
2650 if (print_header_info
)
2651 pr_info("*%d-bits ref[%d]read:%d\n",
2652 REF_FRAMES_LOG2
, i
, ref
);
2653 ref_frame
->idx
= idx
;
2654 ref_frame
->buf
= &frame_bufs
[idx
].buf
;
2655 cm
->ref_frame_sign_bias
[LAST_FRAME
+ i
]
2656 = (params
->p
.ref_info
>>
2657 ((REFS_PER_FRAME
-i
-1)*4)) & 0x1;
2658 if (print_header_info
)
2659 pr_info("1bit ref_frame_sign_bias");
2663 *cm->ref_frame_sign_bias
2667 *("[VP9 DEBUG]%s(get ref):%d\r\n",
2668 *__func__, ref_frame->idx);
2673 ret
= setup_frame_size_with_refs(
2677 pbi
->frame_mmu_map_addr
,
2681 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
2682 /*struct RefBuffer_s *const ref_buf =
2683 *&cm->frame_refs[i];
2686 *vp9_setup_scale_factors_for_frame
2692 pic
= get_frame_new_buffer(cm
);
2696 pic
->bit_depth
= cm
->bit_depth
;
2697 pic
->color_space
= cm
->color_space
;
2698 pic
->slice_type
= cm
->frame_type
;
2700 if (pbi
->need_resync
) {
2702 ("Error: Keyframe/intra-only frame required to reset\r\n");
2705 generate_next_ref_frames(pbi
);
2706 pbi
->hold_ref_buf
= 1;
2709 if (frame_is_intra_only(cm
) || cm
->error_resilient_mode
)
2710 vp9_setup_past_independence(cm
);
2711 setup_loopfilter(&cm
->lf
, rb
, print_header_info
);
2712 setup_quantization(cm
, &pbi
->mb
, rb
, print_header_info
);
2713 setup_segmentation(&cm
->seg
, rb
, print_header_info
);
2714 setup_segmentation_dequant(cm
, print_header_info
);
2716 setup_tile_info(cm
, rb
, print_header_info
);
2717 sz
= vp9_rb_read_literal(rb
, 16);
2718 if (print_header_info
)
2719 pr_info(" * 16-bits size read : %d (0x%x)\n", sz
, sz
);
2722 vpx_internal_error(&cm
->error
, VPX_CODEC_CORRUPT_FRAME
,
2723 "Invalid header size");
2725 /*end read_uncompressed_header()*/
2726 cm
->use_prev_frame_mvs
= !cm
->error_resilient_mode
&&
2727 cm
->width
== cm
->last_width
&&
2728 cm
->height
== cm
->last_height
&&
2729 !cm
->last_intra_only
&&
2730 cm
->last_show_frame
&&
2731 (cm
->last_frame_type
!= KEY_FRAME
);
2734 *("set use_prev_frame_mvs to %d (last_width %d last_height %d",
2735 *cm->use_prev_frame_mvs, cm->last_width, cm->last_height);
2737 *(" last_intra_only %d last_show_frame %d last_frame_type %d)\n",
2738 *cm->last_intra_only, cm->last_show_frame, cm->last_frame_type);
2741 if (pbi
->enable_fence
&& cm
->show_frame
) {
2742 struct PIC_BUFFER_CONFIG_s
*pic
= &cm
->cur_frame
->buf
;
2743 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
2745 /* create fence for each buffers. */
2746 ret
= vdec_timeline_create_fence(&vdec
->sync
);
2750 pic
->fence
= vdec
->sync
.fence
;
2751 pic
->bit_depth
= cm
->bit_depth
;
2752 pic
->slice_type
= cm
->frame_type
;
2753 pic
->stream_offset
= pbi
->pre_stream_offset
;
2756 pic
->pts
= pbi
->chunk
->pts
;
2757 pic
->pts64
= pbi
->chunk
->pts64
;
2758 pic
->timestamp
= pbi
->chunk
->timestamp
;
2761 /* post video vframe. */
2762 prepare_display_buf(pbi
, pic
);
2769 void swap_frame_buffers(struct VP9Decoder_s
*pbi
)
2772 struct VP9_Common_s
*const cm
= &pbi
->common
;
2773 struct BufferPool_s
*const pool
= cm
->buffer_pool
;
2774 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2775 unsigned long flags
;
2776 refresh_ref_frames(pbi
);
2777 pbi
->hold_ref_buf
= 0;
2778 cm
->frame_to_show
= get_frame_new_buffer(cm
);
2780 if (cm
->frame_to_show
) {
2781 /*if (!pbi->frame_parallel_decode || !cm->show_frame) {*/
2782 lock_buffer_pool(pool
, flags
);
2783 --frame_bufs
[cm
->new_fb_idx
].ref_count
;
2784 /*pr_info("[MMU DEBUG 8] dec ref_count[%d] : %d\r\n", cm->new_fb_idx,
2785 * frame_bufs[cm->new_fb_idx].ref_count);
2787 unlock_buffer_pool(pool
, flags
);
2791 /*Invalidate these references until the next frame starts.*/
2792 for (ref_index
= 0; ref_index
< 3; ref_index
++)
2793 cm
->frame_refs
[ref_index
].idx
= -1;
2797 static void check_resync(vpx_codec_alg_priv_t
*const ctx
,
2798 const struct VP9Decoder_s
*const pbi
)
2800 /* Clear resync flag if worker got a key frame or intra only frame.*/
2801 if (ctx
->need_resync
== 1 && pbi
->need_resync
== 0 &&
2802 (pbi
->common
.intra_only
|| pbi
->common
.frame_type
== KEY_FRAME
))
2803 ctx
->need_resync
= 0;
2807 int vp9_get_raw_frame(struct VP9Decoder_s
*pbi
, struct PIC_BUFFER_CONFIG_s
*sd
)
2809 struct VP9_Common_s
*const cm
= &pbi
->common
;
2812 if (pbi
->ready_for_new_data
== 1)
2815 pbi
->ready_for_new_data
= 1;
2817 /* no raw frame to show!!! */
2818 if (!cm
->show_frame
)
2821 /* may not be get buff in v4l2 */
2822 if (!cm
->frame_to_show
)
2825 pbi
->ready_for_new_data
= 1;
2827 *sd
= *cm
->frame_to_show
;
2833 int vp9_bufmgr_init(struct VP9Decoder_s
*pbi
, struct BuffInfo_s
*buf_spec_i
,
2834 struct buff_s
*mc_buf_i
) {
2835 struct VP9_Common_s
*cm
= &pbi
->common
;
2837 /*memset(pbi, 0, sizeof(struct VP9Decoder_s));*/
2838 pbi
->frame_count
= 0;
2840 pbi
->pre_stream_offset
= 0;
2841 cm
->buffer_pool
= &pbi
->vp9_buffer_pool
;
2842 spin_lock_init(&cm
->buffer_pool
->lock
);
2843 cm
->prev_fb_idx
= INVALID_IDX
;
2844 cm
->new_fb_idx
= INVALID_IDX
;
2845 pbi
->used_4k_num
= -1;
2846 cm
->cur_fb_idx_mmu
= INVALID_IDX
;
2848 ("After vp9_bufmgr_init, prev_fb_idx : %d, new_fb_idx : %d\r\n",
2849 cm
->prev_fb_idx
, cm
->new_fb_idx
);
2850 pbi
->need_resync
= 1;
2851 /* Initialize the references to not point to any frame buffers.*/
2852 memset(&cm
->ref_frame_map
, -1, sizeof(cm
->ref_frame_map
));
2853 memset(&cm
->next_ref_frame_map
, -1, sizeof(cm
->next_ref_frame_map
));
2854 cm
->current_video_frame
= 0;
2855 pbi
->ready_for_new_data
= 1;
2858 pbi
->work_space_buf
= buf_spec_i
;
2859 if (!pbi
->mmu_enable
)
2860 pbi
->mc_buf
= mc_buf_i
;
2862 pbi
->rpm_addr
= NULL
;
2863 pbi
->lmem_addr
= NULL
;
2865 pbi
->use_cma_flag
= 0;
2866 pbi
->decode_idx
= 0;
2868 /*int m_uiMaxCUWidth = 1<<7;*/
2869 /*int m_uiMaxCUHeight = 1<<7;*/
2870 pbi
->has_keyframe
= 0;
2873 pbi
->error_flag
= 0;
2875 pbi
->pts_mode
= PTS_NORMAL
;
2877 pbi
->last_lookup_pts
= 0;
2878 pbi
->last_pts_us64
= 0;
2879 pbi
->last_lookup_pts_us64
= 0;
2880 pbi
->shift_byte_count
= 0;
2881 pbi
->shift_byte_count_lo
= 0;
2882 pbi
->shift_byte_count_hi
= 0;
2883 pbi
->pts_mode_switching_count
= 0;
2884 pbi
->pts_mode_recovery_count
= 0;
2892 int vp9_bufmgr_postproc(struct VP9Decoder_s
*pbi
)
2894 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
2895 struct VP9_Common_s
*cm
= &pbi
->common
;
2896 struct PIC_BUFFER_CONFIG_s sd
;
2898 if (pbi
->postproc_done
)
2900 pbi
->postproc_done
= 1;
2901 swap_frame_buffers(pbi
);
2902 if (!cm
->show_existing_frame
) {
2903 cm
->last_show_frame
= cm
->show_frame
;
2904 cm
->prev_frame
= cm
->cur_frame
;
2906 if (cm
->seg
.enabled
&& !pbi
->frame_parallel_decode
)
2907 vp9_swap_current_and_last_seg_map(cm
);
2910 cm
->last_width
= cm
->width
;
2911 cm
->last_height
= cm
->height
;
2913 cm
->current_video_frame
++;
2915 if (vp9_get_raw_frame(pbi
, &sd
) == 0) {
2916 /*pr_info("Display frame index %d\r\n", sd.index);*/
2917 sd
.stream_offset
= pbi
->pre_stream_offset
;
2919 if (pbi
->enable_fence
) {
2920 /* notify signal to wake up wq of fence. */
2921 vdec_timeline_increase(&vdec
->sync
, 1);
2923 prepare_display_buf(pbi
, &sd
);
2926 pbi
->pre_stream_offset
= READ_VREG(HEVC_SHIFT_BYTE_COUNT
);
2931 * ("Not display this frame,ready_for_new_data%d show_frame%d\r\n",
2932 * pbi->ready_for_new_data, cm->show_frame);
2937 /**************************************************
2939 *VP9 buffer management end
2941 ***************************************************
2945 #define HEVC_CM_BODY_START_ADDR 0x3626
2946 #define HEVC_CM_BODY_LENGTH 0x3627
2947 #define HEVC_CM_HEADER_LENGTH 0x3629
2948 #define HEVC_CM_HEADER_OFFSET 0x362b
2950 #define LOSLESS_COMPRESS_MODE
2952 /*#define DECOMP_HEADR_SURGENT*/
2954 static u32 mem_map_mode
= 2 /* 0:linear 1:32x32 2:64x32*/
2956 static u32 mem_map_mode
; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
2958 static u32 enable_mem_saving
= 1;
2959 static u32 force_w_h
;
2961 static u32 force_fps
;
2964 const u32 vp9_version
= 201602101;
2968 static u32 pop_shorts
;
2970 static u32 dbg_skip_decode_index
;
2971 static u32 endian
= 0xff0;
2972 #ifdef ERROR_HANDLE_DEBUG
2973 static u32 dbg_nal_skip_flag
;
2974 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
2975 static u32 dbg_nal_skip_count
;
2978 static u32 decode_pic_begin
;
2979 static uint slice_parse_begin
;
2981 #ifdef MIX_STREAM_SUPPORT
2982 static u32 buf_alloc_width
= 4096;
2983 static u32 buf_alloc_height
= 2304;
2984 static u32 vp9_max_pic_w
= 4096;
2985 static u32 vp9_max_pic_h
= 2304;
2987 static u32 dynamic_buf_num_margin
;
2989 static u32 buf_alloc_width
;
2990 static u32 buf_alloc_height
;
2991 static u32 dynamic_buf_num_margin
= 7;
2993 static u32 buf_alloc_depth
= 10;
2994 static u32 buf_alloc_size
;
2997 * bit[1]: 0, always release cma buffer when stop
2998 * bit[1]: 1, never release cma buffer when stop
2999 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
3000 *do not release cma buffer is blackout is not 1
3002 *bit[2]: 0, when start decoding, check current displayed buffer
3003 * (only for buffer decoded by vp9) if blackout is 0
3004 * 1, do not check current displayed buffer
3006 *bit[3]: 1, if blackout is not 1, do not release current
3007 * displayed cma buffer always.
3009 /* set to 1 for fast play;
3010 * set to 8 for other case of "keep last frame"
3012 static u32 buffer_mode
= 1;
3013 /* buffer_mode_dbg: debug only*/
3014 static u32 buffer_mode_dbg
= 0xffff0000;
3018 *bit 0, 1: only display I picture;
3019 *bit 1, 1: only decode I picture;
3021 static u32 i_only_flag
;
3023 static u32 low_latency_flag
;
3027 static u32 max_decoding_time
;
3031 /*error_handle_policy:
3032 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
3033 *1, skip error_skip_nal_count nals before error recovery;
3034 *bit 1 (valid only when bit0 == 1):
3035 *1, wait vps/sps/pps after error recovery;
3036 *bit 2 (valid only when bit0 == 0):
3037 *0, auto search after error recovery (vp9_recover() called);
3038 *1, manual search after error recovery
3039 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
3041 *bit 4: 0, set error_mark after reset/recover
3042 * 1, do not set error_mark after reset/recover
3043 *bit 5: 0, check total lcu for every picture
3044 * 1, do not check total lcu
3048 static u32 error_handle_policy
;
3049 /*static u32 parser_sei_enable = 1;*/
3050 #define MAX_BUF_NUM_NORMAL 12
3051 #define MAX_BUF_NUM_LESS 10
3052 static u32 max_buf_num
= MAX_BUF_NUM_NORMAL
;
3053 #define MAX_BUF_NUM_SAVE_BUF 8
3055 static u32 run_ready_min_buf_num
= 2;
3058 static DEFINE_MUTEX(vvp9_mutex
);
3059 #ifndef MULTI_INSTANCE_SUPPORT
3060 static struct device
*cma_dev
;
3063 #define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
3064 #define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
3065 #define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
3066 #define VP9_ADAPT_PROB_REG HEVC_ASSIST_SCRATCH_3
3067 #define VP9_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_4
3068 #define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
3069 #define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
3070 #define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
3071 #define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
3072 #define VP9_PROB_SWAP_BUFFER HEVC_ASSIST_SCRATCH_9
3073 #define VP9_COUNT_SWAP_BUFFER HEVC_ASSIST_SCRATCH_A
3074 #define VP9_SEG_MAP_BUFFER HEVC_ASSIST_SCRATCH_B
3075 #define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
3076 #define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
3077 #define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
3078 #define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
3079 #define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
3080 #ifdef MULTI_INSTANCE_SUPPORT
3081 #define HEVC_DECODE_COUNT HEVC_ASSIST_SCRATCH_M
3082 #define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
3084 #define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M
3085 #define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N
3087 #define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
3088 #define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
3092 *ucode parser/search control
3093 *bit 0: 0, header auto parse; 1, header manual parse
3094 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
3095 *bit [3:2]: valid when bit1==0;
3096 *0, auto skip nal before first vps/sps/pps/idr;
3097 *1, auto skip nal before first vps/sps/pps
3098 *2, auto skip nal before first vps/sps/pps,
3099 * and not decode until the first I slice (with slice address of 0)
3101 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
3102 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
3103 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
3104 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
3105 *bit [17]: for NAL_SEI when bit0 is 0:
3106 * 0, do not parse SEI in ucode; 1, parse SEI in ucode
3107 *bit [31:20]: used by ucode for debug purpose
3109 #define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
3110 /*[31:24] chip feature
3111 31: 0, use MBOX1; 1, use MBOX0
3113 #define DECODE_MODE HEVC_ASSIST_SCRATCH_J
3114 #define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
3116 #ifdef MULTI_INSTANCE_SUPPORT
3117 #define RPM_BUF_SIZE (0x400 * 2)
3119 #define RPM_BUF_SIZE (0x80*2)
3121 #define LMEM_BUF_SIZE (0x400 * 2)
3123 #define WORK_BUF_SPEC_NUM 3
3124 static struct BuffInfo_s amvvp9_workbuff_spec
[WORK_BUF_SPEC_NUM
] = {
3130 /* IPP work space calculation :
3131 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
3136 .buf_size
= 0x30000,
3139 .buf_size
= 0x30000,
3142 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
3143 * total 64x16x2 = 2048 bytes (0x800)
3148 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
3149 * total 0x0800 bytes
3154 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
3155 * total 0x0800 bytes
3160 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
3161 * total 0x2000 bytes
3166 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
3167 * each has 16 bytes total 0x2800 bytes
3172 /* 256cyclex64bit = 2K bytes 0x800
3173 * (only 144 cycles valid)
3181 /* support up to 32 SCALELUT 1024x32 =
3187 /* DBLK -> Max 256(4096/16) LCU,
3188 *each para 1024bytes(total:0x40000),
3189 *data 1024bytes(total:0x40000)
3191 .buf_size
= 0x80000,
3194 .buf_size
= 0x80000,
3197 /*4096x2304/64/64 *24 = 0xd800 Bytes*/
3201 .buf_size
= 0x5000, /*2*16*(more than 2304)/4, 4K*/
3205 /*add one for keeper.*/
3206 .buf_size
= MMU_COMPRESS_HEADER_SIZE
*
3207 (FRAME_BUFFERS
+ 1),
3208 /* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8) */
3212 .buf_size
= 0x10000, /* 2 * size of hevc*/
3214 #ifdef MV_USE_FIXED_BUF
3215 .mpred_mv
= {/* 1080p, 0x40000 per buffer */
3216 .buf_size
= 0x40000 * FRAME_BUFFERS
,
3220 .buf_size
= RPM_BUF_SIZE
,
3223 .buf_size
= 0x400 * 2,
3230 /* IPP work space calculation :
3231 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
3236 .buf_size
= 0x30000,
3239 .buf_size
= 0x30000,
3242 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
3243 * total 64x16x2 = 2048 bytes (0x800)
3248 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
3249 * total 0x0800 bytes
3254 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
3255 * total 0x0800 bytes
3260 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
3261 * total 0x2000 bytes
3266 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
3267 * each has 16 bytes total 0x2800 bytes
3272 /* 256cyclex64bit = 2K bytes 0x800
3273 * (only 144 cycles valid)
3281 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
3287 /* DBLK -> Max 256(4096/16) LCU,
3288 *each para 1024bytes(total:0x40000),
3289 *data 1024bytes(total:0x40000)
3291 .buf_size
= 0x80000,
3294 .buf_size
= 0x80000,
3297 /*4096x2304/64/64 *24 = 0xd800 Bytes*/
3301 .buf_size
= 0x5000,/*2*16*(more than 2304)/4, 4K*/
3305 /*add one for keeper.*/
3306 .buf_size
= MMU_COMPRESS_HEADER_SIZE
*
3307 (FRAME_BUFFERS
+ 1),
3308 /* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8) */
3312 .buf_size
= 0x10000, /* 2 * size of hevc*/
3314 #ifdef MV_USE_FIXED_BUF
3316 /* .buf_size = 0x100000*16,
3317 * //4k2k , 0x100000 per buffer
3319 /* 4096x2304 , 0x120000 per buffer */
3320 .buf_size
= 0x120000 * FRAME_BUFFERS
,
3324 .buf_size
= RPM_BUF_SIZE
,
3327 .buf_size
= 0x400 * 2,
3331 .max_width
= 4096*2,
3332 .max_height
= 2304*2,
3334 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
3335 .buf_size
= 0x4000*2,
3338 .buf_size
= 0x30000*2,
3341 .buf_size
= 0x30000*2,
3344 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
3348 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
3352 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
3356 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
3360 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
3361 .buf_size
= 0x2800*2,
3364 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
3371 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
3372 .buf_size
= 0x8000*2,
3375 // DBLK -> Max 256(4096/16) LCU, each para 1024bytes(total:0x40000), data 1024bytes(total:0x40000)
3376 .buf_size
= 0x80000*2,
3379 .buf_size
= 0x80000*2,
3382 /*4096x2304/64/64 *24 = 0xd800 Bytes*/
3383 .buf_size
= 0xd800*4,
3386 .buf_size
= 0x5000*2, //2*16*(more than 2304)/4, 4K
3390 //.buf_size = MMU_COMPRESS_HEADER_SIZE*8, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
3391 .buf_size
= MMU_COMPRESS_HEADER_SIZE
*16, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
3395 .buf_size
= 0x10000*2, /* 2 * size of hevc*/
3397 #ifdef MV_USE_FIXED_BUF
3399 //4k2k , 0x100000 per buffer */
3400 /* 4096x2304 , 0x120000 per buffer */
3401 .buf_size
= 0x120000 * FRAME_BUFFERS
* 4,
3405 .buf_size
= RPM_BUF_SIZE
,
3408 .buf_size
= 0x400 * 2,
3414 /*Losless compression body buffer size 4K per 64x32 (jt)*/
3415 int compute_losless_comp_body_size(int width
, int height
,
3416 uint8_t is_bit_depth_10
)
3422 width_x64
= width
+ 63;
3424 height_x32
= height
+ 31;
3426 bsize
= (is_bit_depth_10
?4096:3200)*width_x64
*height_x32
;
3427 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
3428 pr_info("%s(%d,%d,%d)=>%d\n",
3429 __func__
, width
, height
,
3430 is_bit_depth_10
, bsize
);
3435 /* Losless compression header buffer size 32bytes per 128x64 (jt)*/
3436 static int compute_losless_comp_header_size(int width
, int height
)
3442 width_x128
= width
+ 127;
3444 height_x64
= height
+ 63;
3447 hsize
= 32 * width_x128
* height_x64
;
3448 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
3449 pr_info("%s(%d,%d)=>%d\n",
3450 __func__
, width
, height
,
3456 static void init_buff_spec(struct VP9Decoder_s
*pbi
,
3457 struct BuffInfo_s
*buf_spec
)
3459 void *mem_start_virt
;
3461 buf_spec
->ipp
.buf_start
= buf_spec
->start_adr
;
3462 buf_spec
->sao_abv
.buf_start
=
3463 buf_spec
->ipp
.buf_start
+ buf_spec
->ipp
.buf_size
;
3465 buf_spec
->sao_vb
.buf_start
=
3466 buf_spec
->sao_abv
.buf_start
+ buf_spec
->sao_abv
.buf_size
;
3467 buf_spec
->short_term_rps
.buf_start
=
3468 buf_spec
->sao_vb
.buf_start
+ buf_spec
->sao_vb
.buf_size
;
3469 buf_spec
->vps
.buf_start
=
3470 buf_spec
->short_term_rps
.buf_start
+
3471 buf_spec
->short_term_rps
.buf_size
;
3472 buf_spec
->sps
.buf_start
=
3473 buf_spec
->vps
.buf_start
+ buf_spec
->vps
.buf_size
;
3474 buf_spec
->pps
.buf_start
=
3475 buf_spec
->sps
.buf_start
+ buf_spec
->sps
.buf_size
;
3476 buf_spec
->sao_up
.buf_start
=
3477 buf_spec
->pps
.buf_start
+ buf_spec
->pps
.buf_size
;
3478 buf_spec
->swap_buf
.buf_start
=
3479 buf_spec
->sao_up
.buf_start
+ buf_spec
->sao_up
.buf_size
;
3480 buf_spec
->swap_buf2
.buf_start
=
3481 buf_spec
->swap_buf
.buf_start
+ buf_spec
->swap_buf
.buf_size
;
3482 buf_spec
->scalelut
.buf_start
=
3483 buf_spec
->swap_buf2
.buf_start
+ buf_spec
->swap_buf2
.buf_size
;
3484 buf_spec
->dblk_para
.buf_start
=
3485 buf_spec
->scalelut
.buf_start
+ buf_spec
->scalelut
.buf_size
;
3486 buf_spec
->dblk_data
.buf_start
=
3487 buf_spec
->dblk_para
.buf_start
+ buf_spec
->dblk_para
.buf_size
;
3488 buf_spec
->seg_map
.buf_start
=
3489 buf_spec
->dblk_data
.buf_start
+ buf_spec
->dblk_data
.buf_size
;
3490 if (pbi
== NULL
|| pbi
->mmu_enable
) {
3491 buf_spec
->mmu_vbh
.buf_start
=
3492 buf_spec
->seg_map
.buf_start
+
3493 buf_spec
->seg_map
.buf_size
;
3494 buf_spec
->mpred_above
.buf_start
=
3495 buf_spec
->mmu_vbh
.buf_start
+
3496 buf_spec
->mmu_vbh
.buf_size
;
3498 buf_spec
->mpred_above
.buf_start
=
3499 buf_spec
->seg_map
.buf_start
+ buf_spec
->seg_map
.buf_size
;
3501 #ifdef MV_USE_FIXED_BUF
3502 buf_spec
->mpred_mv
.buf_start
=
3503 buf_spec
->mpred_above
.buf_start
+
3504 buf_spec
->mpred_above
.buf_size
;
3506 buf_spec
->rpm
.buf_start
=
3507 buf_spec
->mpred_mv
.buf_start
+
3508 buf_spec
->mpred_mv
.buf_size
;
3510 buf_spec
->rpm
.buf_start
=
3511 buf_spec
->mpred_above
.buf_start
+
3512 buf_spec
->mpred_above
.buf_size
;
3515 buf_spec
->lmem
.buf_start
=
3516 buf_spec
->rpm
.buf_start
+
3517 buf_spec
->rpm
.buf_size
;
3519 buf_spec
->lmem
.buf_start
+
3520 buf_spec
->lmem
.buf_size
;
3525 if (!vdec_secure(hw_to_vdec(pbi
))) {
3527 codec_mm_phys_to_virt(buf_spec
->dblk_para
.buf_start
);
3528 if (mem_start_virt
) {
3529 memset(mem_start_virt
, 0,
3530 buf_spec
->dblk_para
.buf_size
);
3531 codec_mm_dma_flush(mem_start_virt
,
3532 buf_spec
->dblk_para
.buf_size
,
3535 mem_start_virt
= codec_mm_vmap(
3536 buf_spec
->dblk_para
.buf_start
,
3537 buf_spec
->dblk_para
.buf_size
);
3538 if (mem_start_virt
) {
3539 memset(mem_start_virt
, 0,
3540 buf_spec
->dblk_para
.buf_size
);
3541 codec_mm_dma_flush(mem_start_virt
,
3542 buf_spec
->dblk_para
.buf_size
,
3544 codec_mm_unmap_phyaddr(mem_start_virt
);
3546 /*not virt for tvp playing,
3547 may need clear on ucode.*/
3548 pr_err("mem_start_virt failed\n");
3554 pr_info("%s workspace (%x %x) size = %x\n", __func__
,
3555 buf_spec
->start_adr
, buf_spec
->end_adr
,
3556 buf_spec
->end_adr
- buf_spec
->start_adr
);
3560 pr_info("ipp.buf_start :%x\n",
3561 buf_spec
->ipp
.buf_start
);
3562 pr_info("sao_abv.buf_start :%x\n",
3563 buf_spec
->sao_abv
.buf_start
);
3564 pr_info("sao_vb.buf_start :%x\n",
3565 buf_spec
->sao_vb
.buf_start
);
3566 pr_info("short_term_rps.buf_start :%x\n",
3567 buf_spec
->short_term_rps
.buf_start
);
3568 pr_info("vps.buf_start :%x\n",
3569 buf_spec
->vps
.buf_start
);
3570 pr_info("sps.buf_start :%x\n",
3571 buf_spec
->sps
.buf_start
);
3572 pr_info("pps.buf_start :%x\n",
3573 buf_spec
->pps
.buf_start
);
3574 pr_info("sao_up.buf_start :%x\n",
3575 buf_spec
->sao_up
.buf_start
);
3576 pr_info("swap_buf.buf_start :%x\n",
3577 buf_spec
->swap_buf
.buf_start
);
3578 pr_info("swap_buf2.buf_start :%x\n",
3579 buf_spec
->swap_buf2
.buf_start
);
3580 pr_info("scalelut.buf_start :%x\n",
3581 buf_spec
->scalelut
.buf_start
);
3582 pr_info("dblk_para.buf_start :%x\n",
3583 buf_spec
->dblk_para
.buf_start
);
3584 pr_info("dblk_data.buf_start :%x\n",
3585 buf_spec
->dblk_data
.buf_start
);
3586 pr_info("seg_map.buf_start :%x\n",
3587 buf_spec
->seg_map
.buf_start
);
3588 if (pbi
->mmu_enable
) {
3589 pr_info("mmu_vbh.buf_start :%x\n",
3590 buf_spec
->mmu_vbh
.buf_start
);
3592 pr_info("mpred_above.buf_start :%x\n",
3593 buf_spec
->mpred_above
.buf_start
);
3594 #ifdef MV_USE_FIXED_BUF
3595 pr_info("mpred_mv.buf_start :%x\n",
3596 buf_spec
->mpred_mv
.buf_start
);
3598 if ((debug
& VP9_DEBUG_SEND_PARAM_WITH_REG
) == 0) {
3599 pr_info("rpm.buf_start :%x\n",
3600 buf_spec
->rpm
.buf_start
);
3606 #define THODIYIL_MCRCC_CANVAS_ALGX 4
3608 static u32 mcrcc_cache_alg_flag
= THODIYIL_MCRCC_CANVAS_ALGX
;
3610 static void mcrcc_perfcount_reset(void)
3612 if (debug
& VP9_DEBUG_CACHE
)
3613 pr_info("[cache_util.c] Entered mcrcc_perfcount_reset...\n");
3614 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)0x1);
3615 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)0x0);
3619 static unsigned raw_mcr_cnt_total_prev
;
3620 static unsigned hit_mcr_0_cnt_total_prev
;
3621 static unsigned hit_mcr_1_cnt_total_prev
;
3622 static unsigned byp_mcr_cnt_nchcanv_total_prev
;
3623 static unsigned byp_mcr_cnt_nchoutwin_total_prev
;
3625 static void mcrcc_get_hitrate(unsigned reset_pre
)
3627 unsigned delta_hit_mcr_0_cnt
;
3628 unsigned delta_hit_mcr_1_cnt
;
3629 unsigned delta_raw_mcr_cnt
;
3630 unsigned delta_mcr_cnt_nchcanv
;
3631 unsigned delta_mcr_cnt_nchoutwin
;
3634 unsigned raw_mcr_cnt
;
3635 unsigned hit_mcr_cnt
;
3636 unsigned byp_mcr_cnt_nchoutwin
;
3637 unsigned byp_mcr_cnt_nchcanv
;
3640 raw_mcr_cnt_total_prev
= 0;
3641 hit_mcr_0_cnt_total_prev
= 0;
3642 hit_mcr_1_cnt_total_prev
= 0;
3643 byp_mcr_cnt_nchcanv_total_prev
= 0;
3644 byp_mcr_cnt_nchoutwin_total_prev
= 0;
3646 if (debug
& VP9_DEBUG_CACHE
)
3647 pr_info("[cache_util.c] Entered mcrcc_get_hitrate...\n");
3648 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x0<<1));
3649 raw_mcr_cnt
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3650 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x1<<1));
3651 hit_mcr_cnt
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3652 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x2<<1));
3653 byp_mcr_cnt_nchoutwin
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3654 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x3<<1));
3655 byp_mcr_cnt_nchcanv
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3657 if (debug
& VP9_DEBUG_CACHE
)
3658 pr_info("raw_mcr_cnt_total: %d\n",
3660 if (debug
& VP9_DEBUG_CACHE
)
3661 pr_info("hit_mcr_cnt_total: %d\n",
3663 if (debug
& VP9_DEBUG_CACHE
)
3664 pr_info("byp_mcr_cnt_nchoutwin_total: %d\n",
3665 byp_mcr_cnt_nchoutwin
);
3666 if (debug
& VP9_DEBUG_CACHE
)
3667 pr_info("byp_mcr_cnt_nchcanv_total: %d\n",
3668 byp_mcr_cnt_nchcanv
);
3670 delta_raw_mcr_cnt
= raw_mcr_cnt
-
3671 raw_mcr_cnt_total_prev
;
3672 delta_mcr_cnt_nchcanv
= byp_mcr_cnt_nchcanv
-
3673 byp_mcr_cnt_nchcanv_total_prev
;
3674 delta_mcr_cnt_nchoutwin
= byp_mcr_cnt_nchoutwin
-
3675 byp_mcr_cnt_nchoutwin_total_prev
;
3676 raw_mcr_cnt_total_prev
= raw_mcr_cnt
;
3677 byp_mcr_cnt_nchcanv_total_prev
= byp_mcr_cnt_nchcanv
;
3678 byp_mcr_cnt_nchoutwin_total_prev
= byp_mcr_cnt_nchoutwin
;
3680 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x4<<1));
3681 tmp
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3682 if (debug
& VP9_DEBUG_CACHE
)
3683 pr_info("miss_mcr_0_cnt_total: %d\n", tmp
);
3684 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x5<<1));
3685 tmp
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3686 if (debug
& VP9_DEBUG_CACHE
)
3687 pr_info("miss_mcr_1_cnt_total: %d\n", tmp
);
3688 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x6<<1));
3689 tmp
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3690 if (debug
& VP9_DEBUG_CACHE
)
3691 pr_info("hit_mcr_0_cnt_total: %d\n", tmp
);
3692 delta_hit_mcr_0_cnt
= tmp
- hit_mcr_0_cnt_total_prev
;
3693 hit_mcr_0_cnt_total_prev
= tmp
;
3694 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x7<<1));
3695 tmp
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3696 if (debug
& VP9_DEBUG_CACHE
)
3697 pr_info("hit_mcr_1_cnt_total: %d\n", tmp
);
3698 delta_hit_mcr_1_cnt
= tmp
- hit_mcr_1_cnt_total_prev
;
3699 hit_mcr_1_cnt_total_prev
= tmp
;
3701 if (delta_raw_mcr_cnt
!= 0) {
3702 hitrate
= 100 * delta_hit_mcr_0_cnt
3703 / delta_raw_mcr_cnt
;
3704 if (debug
& VP9_DEBUG_CACHE
)
3705 pr_info("CANV0_HIT_RATE : %d\n", hitrate
);
3706 hitrate
= 100 * delta_hit_mcr_1_cnt
3707 / delta_raw_mcr_cnt
;
3708 if (debug
& VP9_DEBUG_CACHE
)
3709 pr_info("CANV1_HIT_RATE : %d\n", hitrate
);
3710 hitrate
= 100 * delta_mcr_cnt_nchcanv
3711 / delta_raw_mcr_cnt
;
3712 if (debug
& VP9_DEBUG_CACHE
)
3713 pr_info("NONCACH_CANV_BYP_RATE : %d\n", hitrate
);
3714 hitrate
= 100 * delta_mcr_cnt_nchoutwin
3715 / delta_raw_mcr_cnt
;
3716 if (debug
& VP9_DEBUG_CACHE
)
3717 pr_info("CACHE_OUTWIN_BYP_RATE : %d\n", hitrate
);
3721 if (raw_mcr_cnt
!= 0) {
3722 hitrate
= 100 * hit_mcr_cnt
/ raw_mcr_cnt
;
3723 if (debug
& VP9_DEBUG_CACHE
)
3724 pr_info("MCRCC_HIT_RATE : %d\n", hitrate
);
3725 hitrate
= 100 * (byp_mcr_cnt_nchoutwin
+ byp_mcr_cnt_nchcanv
)
3727 if (debug
& VP9_DEBUG_CACHE
)
3728 pr_info("MCRCC_BYP_RATE : %d\n", hitrate
);
3730 if (debug
& VP9_DEBUG_CACHE
)
3731 pr_info("MCRCC_HIT_RATE : na\n");
3732 if (debug
& VP9_DEBUG_CACHE
)
3733 pr_info("MCRCC_BYP_RATE : na\n");
3739 static void decomp_perfcount_reset(void)
3741 if (debug
& VP9_DEBUG_CACHE
)
3742 pr_info("[cache_util.c] Entered decomp_perfcount_reset...\n");
3743 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)0x1);
3744 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)0x0);
3748 static void decomp_get_hitrate(void)
3750 unsigned raw_mcr_cnt
;
3751 unsigned hit_mcr_cnt
;
3753 if (debug
& VP9_DEBUG_CACHE
)
3754 pr_info("[cache_util.c] Entered decomp_get_hitrate...\n");
3755 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x0<<1));
3756 raw_mcr_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3757 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x1<<1));
3758 hit_mcr_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3760 if (debug
& VP9_DEBUG_CACHE
)
3761 pr_info("hcache_raw_cnt_total: %d\n", raw_mcr_cnt
);
3762 if (debug
& VP9_DEBUG_CACHE
)
3763 pr_info("hcache_hit_cnt_total: %d\n", hit_mcr_cnt
);
3765 if (raw_mcr_cnt
!= 0) {
3766 hitrate
= hit_mcr_cnt
* 100 / raw_mcr_cnt
;
3767 if (debug
& VP9_DEBUG_CACHE
)
3768 pr_info("DECOMP_HCACHE_HIT_RATE : %d\n", hitrate
);
3770 if (debug
& VP9_DEBUG_CACHE
)
3771 pr_info("DECOMP_HCACHE_HIT_RATE : na\n");
3773 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x2<<1));
3774 raw_mcr_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3775 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x3<<1));
3776 hit_mcr_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3778 if (debug
& VP9_DEBUG_CACHE
)
3779 pr_info("dcache_raw_cnt_total: %d\n", raw_mcr_cnt
);
3780 if (debug
& VP9_DEBUG_CACHE
)
3781 pr_info("dcache_hit_cnt_total: %d\n", hit_mcr_cnt
);
3783 if (raw_mcr_cnt
!= 0) {
3784 hitrate
= hit_mcr_cnt
* 100 / raw_mcr_cnt
;
3785 if (debug
& VP9_DEBUG_CACHE
)
3786 pr_info("DECOMP_DCACHE_HIT_RATE : %d\n", hitrate
);
3788 if (debug
& VP9_DEBUG_CACHE
)
3789 pr_info("DECOMP_DCACHE_HIT_RATE : na\n");
3794 static void decomp_get_comprate(void)
3796 unsigned raw_ucomp_cnt
;
3797 unsigned fast_comp_cnt
;
3798 unsigned slow_comp_cnt
;
3801 if (debug
& VP9_DEBUG_CACHE
)
3802 pr_info("[cache_util.c] Entered decomp_get_comprate...\n");
3803 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x4<<1));
3804 fast_comp_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3805 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x5<<1));
3806 slow_comp_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3807 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x6<<1));
3808 raw_ucomp_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3810 if (debug
& VP9_DEBUG_CACHE
)
3811 pr_info("decomp_fast_comp_total: %d\n", fast_comp_cnt
);
3812 if (debug
& VP9_DEBUG_CACHE
)
3813 pr_info("decomp_slow_comp_total: %d\n", slow_comp_cnt
);
3814 if (debug
& VP9_DEBUG_CACHE
)
3815 pr_info("decomp_raw_uncomp_total: %d\n", raw_ucomp_cnt
);
3817 if (raw_ucomp_cnt
!= 0) {
3818 comprate
= (fast_comp_cnt
+ slow_comp_cnt
)
3819 * 100 / raw_ucomp_cnt
;
3820 if (debug
& VP9_DEBUG_CACHE
)
3821 pr_info("DECOMP_COMP_RATIO : %d\n", comprate
);
3823 if (debug
& VP9_DEBUG_CACHE
)
3824 pr_info("DECOMP_COMP_RATIO : na\n");
3828 /* cache_util.c end */
3830 /*====================================================
3831 *========================================================================
3833 *========================================================================
3835 #define VP9_PARTITION_START 0
3836 #define VP9_PARTITION_SIZE_STEP (3 * 4)
3837 #define VP9_PARTITION_ONE_SIZE (4 * VP9_PARTITION_SIZE_STEP)
3838 #define VP9_PARTITION_KEY_START 0
3839 #define VP9_PARTITION_P_START VP9_PARTITION_ONE_SIZE
3840 #define VP9_PARTITION_SIZE (2 * VP9_PARTITION_ONE_SIZE)
3841 #define VP9_SKIP_START (VP9_PARTITION_START + VP9_PARTITION_SIZE)
3842 #define VP9_SKIP_SIZE 4 /* only use 3*/
3843 #define VP9_TX_MODE_START (VP9_SKIP_START+VP9_SKIP_SIZE)
3844 #define VP9_TX_MODE_8_0_OFFSET 0
3845 #define VP9_TX_MODE_8_1_OFFSET 1
3846 #define VP9_TX_MODE_16_0_OFFSET 2
3847 #define VP9_TX_MODE_16_1_OFFSET 4
3848 #define VP9_TX_MODE_32_0_OFFSET 6
3849 #define VP9_TX_MODE_32_1_OFFSET 9
3850 #define VP9_TX_MODE_SIZE 12
3851 #define VP9_COEF_START (VP9_TX_MODE_START+VP9_TX_MODE_SIZE)
3852 #define VP9_COEF_BAND_0_OFFSET 0
3853 #define VP9_COEF_BAND_1_OFFSET (VP9_COEF_BAND_0_OFFSET + 3 * 3 + 1)
3854 #define VP9_COEF_BAND_2_OFFSET (VP9_COEF_BAND_1_OFFSET + 6 * 3)
3855 #define VP9_COEF_BAND_3_OFFSET (VP9_COEF_BAND_2_OFFSET + 6 * 3)
3856 #define VP9_COEF_BAND_4_OFFSET (VP9_COEF_BAND_3_OFFSET + 6 * 3)
3857 #define VP9_COEF_BAND_5_OFFSET (VP9_COEF_BAND_4_OFFSET + 6 * 3)
3858 #define VP9_COEF_SIZE_ONE_SET 100 /* ((3 +5*6)*3 + 1 padding)*/
3859 #define VP9_COEF_4X4_START (VP9_COEF_START + 0 * VP9_COEF_SIZE_ONE_SET)
3860 #define VP9_COEF_8X8_START (VP9_COEF_START + 4 * VP9_COEF_SIZE_ONE_SET)
3861 #define VP9_COEF_16X16_START (VP9_COEF_START + 8 * VP9_COEF_SIZE_ONE_SET)
3862 #define VP9_COEF_32X32_START (VP9_COEF_START + 12 * VP9_COEF_SIZE_ONE_SET)
3863 #define VP9_COEF_SIZE_PLANE (2 * VP9_COEF_SIZE_ONE_SET)
3864 #define VP9_COEF_SIZE (4 * 2 * 2 * VP9_COEF_SIZE_ONE_SET)
3865 #define VP9_INTER_MODE_START (VP9_COEF_START+VP9_COEF_SIZE)
3866 #define VP9_INTER_MODE_SIZE 24 /* only use 21 ( #*7)*/
3867 #define VP9_INTERP_START (VP9_INTER_MODE_START+VP9_INTER_MODE_SIZE)
3868 #define VP9_INTERP_SIZE 8
3869 #define VP9_INTRA_INTER_START (VP9_INTERP_START+VP9_INTERP_SIZE)
3870 #define VP9_INTRA_INTER_SIZE 4
3871 #define VP9_INTERP_INTRA_INTER_START VP9_INTERP_START
3872 #define VP9_INTERP_INTRA_INTER_SIZE (VP9_INTERP_SIZE + VP9_INTRA_INTER_SIZE)
3873 #define VP9_COMP_INTER_START \
3874 (VP9_INTERP_INTRA_INTER_START+VP9_INTERP_INTRA_INTER_SIZE)
3875 #define VP9_COMP_INTER_SIZE 5
3876 #define VP9_COMP_REF_START (VP9_COMP_INTER_START+VP9_COMP_INTER_SIZE)
3877 #define VP9_COMP_REF_SIZE 5
3878 #define VP9_SINGLE_REF_START (VP9_COMP_REF_START+VP9_COMP_REF_SIZE)
3879 #define VP9_SINGLE_REF_SIZE 10
3880 #define VP9_REF_MODE_START VP9_COMP_INTER_START
3881 #define VP9_REF_MODE_SIZE \
3882 (VP9_COMP_INTER_SIZE+VP9_COMP_REF_SIZE+VP9_SINGLE_REF_SIZE)
3883 #define VP9_IF_Y_MODE_START (VP9_REF_MODE_START+VP9_REF_MODE_SIZE)
3884 #define VP9_IF_Y_MODE_SIZE 36
3885 #define VP9_IF_UV_MODE_START (VP9_IF_Y_MODE_START+VP9_IF_Y_MODE_SIZE)
3886 #define VP9_IF_UV_MODE_SIZE 92 /* only use 90*/
3887 #define VP9_MV_JOINTS_START (VP9_IF_UV_MODE_START+VP9_IF_UV_MODE_SIZE)
3888 #define VP9_MV_JOINTS_SIZE 3
3889 #define VP9_MV_SIGN_0_START (VP9_MV_JOINTS_START+VP9_MV_JOINTS_SIZE)
3890 #define VP9_MV_SIGN_0_SIZE 1
3891 #define VP9_MV_CLASSES_0_START (VP9_MV_SIGN_0_START+VP9_MV_SIGN_0_SIZE)
3892 #define VP9_MV_CLASSES_0_SIZE 10
3893 #define VP9_MV_CLASS0_0_START (VP9_MV_CLASSES_0_START+VP9_MV_CLASSES_0_SIZE)
3894 #define VP9_MV_CLASS0_0_SIZE 1
3895 #define VP9_MV_BITS_0_START (VP9_MV_CLASS0_0_START+VP9_MV_CLASS0_0_SIZE)
3896 #define VP9_MV_BITS_0_SIZE 10
3897 #define VP9_MV_SIGN_1_START (VP9_MV_BITS_0_START+VP9_MV_BITS_0_SIZE)
3898 #define VP9_MV_SIGN_1_SIZE 1
3899 #define VP9_MV_CLASSES_1_START \
3900 (VP9_MV_SIGN_1_START+VP9_MV_SIGN_1_SIZE)
3901 #define VP9_MV_CLASSES_1_SIZE 10
3902 #define VP9_MV_CLASS0_1_START \
3903 (VP9_MV_CLASSES_1_START+VP9_MV_CLASSES_1_SIZE)
3904 #define VP9_MV_CLASS0_1_SIZE 1
3905 #define VP9_MV_BITS_1_START \
3906 (VP9_MV_CLASS0_1_START+VP9_MV_CLASS0_1_SIZE)
3907 #define VP9_MV_BITS_1_SIZE 10
3908 #define VP9_MV_CLASS0_FP_0_START \
3909 (VP9_MV_BITS_1_START+VP9_MV_BITS_1_SIZE)
3910 #define VP9_MV_CLASS0_FP_0_SIZE 9
3911 #define VP9_MV_CLASS0_FP_1_START \
3912 (VP9_MV_CLASS0_FP_0_START+VP9_MV_CLASS0_FP_0_SIZE)
3913 #define VP9_MV_CLASS0_FP_1_SIZE 9
3914 #define VP9_MV_CLASS0_HP_0_START \
3915 (VP9_MV_CLASS0_FP_1_START+VP9_MV_CLASS0_FP_1_SIZE)
3916 #define VP9_MV_CLASS0_HP_0_SIZE 2
3917 #define VP9_MV_CLASS0_HP_1_START \
3918 (VP9_MV_CLASS0_HP_0_START+VP9_MV_CLASS0_HP_0_SIZE)
3919 #define VP9_MV_CLASS0_HP_1_SIZE 2
3920 #define VP9_MV_START VP9_MV_JOINTS_START
3921 #define VP9_MV_SIZE 72 /*only use 69*/
3923 #define VP9_TOTAL_SIZE (VP9_MV_START + VP9_MV_SIZE)
3926 /*========================================================================
3927 * vp9_count_mem define
3928 *========================================================================
3930 #define VP9_COEF_COUNT_START 0
3931 #define VP9_COEF_COUNT_BAND_0_OFFSET 0
3932 #define VP9_COEF_COUNT_BAND_1_OFFSET \
3933 (VP9_COEF_COUNT_BAND_0_OFFSET + 3*5)
3934 #define VP9_COEF_COUNT_BAND_2_OFFSET \
3935 (VP9_COEF_COUNT_BAND_1_OFFSET + 6*5)
3936 #define VP9_COEF_COUNT_BAND_3_OFFSET \
3937 (VP9_COEF_COUNT_BAND_2_OFFSET + 6*5)
3938 #define VP9_COEF_COUNT_BAND_4_OFFSET \
3939 (VP9_COEF_COUNT_BAND_3_OFFSET + 6*5)
3940 #define VP9_COEF_COUNT_BAND_5_OFFSET \
3941 (VP9_COEF_COUNT_BAND_4_OFFSET + 6*5)
3942 #define VP9_COEF_COUNT_SIZE_ONE_SET 165 /* ((3 +5*6)*5 */
3943 #define VP9_COEF_COUNT_4X4_START \
3944 (VP9_COEF_COUNT_START + 0*VP9_COEF_COUNT_SIZE_ONE_SET)
3945 #define VP9_COEF_COUNT_8X8_START \
3946 (VP9_COEF_COUNT_START + 4*VP9_COEF_COUNT_SIZE_ONE_SET)
3947 #define VP9_COEF_COUNT_16X16_START \
3948 (VP9_COEF_COUNT_START + 8*VP9_COEF_COUNT_SIZE_ONE_SET)
3949 #define VP9_COEF_COUNT_32X32_START \
3950 (VP9_COEF_COUNT_START + 12*VP9_COEF_COUNT_SIZE_ONE_SET)
3951 #define VP9_COEF_COUNT_SIZE_PLANE (2 * VP9_COEF_COUNT_SIZE_ONE_SET)
3952 #define VP9_COEF_COUNT_SIZE (4 * 2 * 2 * VP9_COEF_COUNT_SIZE_ONE_SET)
3954 #define VP9_INTRA_INTER_COUNT_START \
3955 (VP9_COEF_COUNT_START+VP9_COEF_COUNT_SIZE)
3956 #define VP9_INTRA_INTER_COUNT_SIZE (4*2)
3957 #define VP9_COMP_INTER_COUNT_START \
3958 (VP9_INTRA_INTER_COUNT_START+VP9_INTRA_INTER_COUNT_SIZE)
3959 #define VP9_COMP_INTER_COUNT_SIZE (5*2)
3960 #define VP9_COMP_REF_COUNT_START \
3961 (VP9_COMP_INTER_COUNT_START+VP9_COMP_INTER_COUNT_SIZE)
3962 #define VP9_COMP_REF_COUNT_SIZE (5*2)
3963 #define VP9_SINGLE_REF_COUNT_START \
3964 (VP9_COMP_REF_COUNT_START+VP9_COMP_REF_COUNT_SIZE)
3965 #define VP9_SINGLE_REF_COUNT_SIZE (10*2)
3966 #define VP9_TX_MODE_COUNT_START \
3967 (VP9_SINGLE_REF_COUNT_START+VP9_SINGLE_REF_COUNT_SIZE)
3968 #define VP9_TX_MODE_COUNT_SIZE (12*2)
3969 #define VP9_SKIP_COUNT_START \
3970 (VP9_TX_MODE_COUNT_START+VP9_TX_MODE_COUNT_SIZE)
3971 #define VP9_SKIP_COUNT_SIZE (3*2)
3972 #define VP9_MV_SIGN_0_COUNT_START \
3973 (VP9_SKIP_COUNT_START+VP9_SKIP_COUNT_SIZE)
3974 #define VP9_MV_SIGN_0_COUNT_SIZE (1*2)
3975 #define VP9_MV_SIGN_1_COUNT_START \
3976 (VP9_MV_SIGN_0_COUNT_START+VP9_MV_SIGN_0_COUNT_SIZE)
3977 #define VP9_MV_SIGN_1_COUNT_SIZE (1*2)
3978 #define VP9_MV_BITS_0_COUNT_START \
3979 (VP9_MV_SIGN_1_COUNT_START+VP9_MV_SIGN_1_COUNT_SIZE)
3980 #define VP9_MV_BITS_0_COUNT_SIZE (10*2)
3981 #define VP9_MV_BITS_1_COUNT_START \
3982 (VP9_MV_BITS_0_COUNT_START+VP9_MV_BITS_0_COUNT_SIZE)
3983 #define VP9_MV_BITS_1_COUNT_SIZE (10*2)
3984 #define VP9_MV_CLASS0_HP_0_COUNT_START \
3985 (VP9_MV_BITS_1_COUNT_START+VP9_MV_BITS_1_COUNT_SIZE)
3986 #define VP9_MV_CLASS0_HP_0_COUNT_SIZE (2*2)
3987 #define VP9_MV_CLASS0_HP_1_COUNT_START \
3988 (VP9_MV_CLASS0_HP_0_COUNT_START+VP9_MV_CLASS0_HP_0_COUNT_SIZE)
3989 #define VP9_MV_CLASS0_HP_1_COUNT_SIZE (2*2)
3990 /* Start merge_tree*/
3991 #define VP9_INTER_MODE_COUNT_START \
3992 (VP9_MV_CLASS0_HP_1_COUNT_START+VP9_MV_CLASS0_HP_1_COUNT_SIZE)
3993 #define VP9_INTER_MODE_COUNT_SIZE (7*4)
3994 #define VP9_IF_Y_MODE_COUNT_START \
3995 (VP9_INTER_MODE_COUNT_START+VP9_INTER_MODE_COUNT_SIZE)
3996 #define VP9_IF_Y_MODE_COUNT_SIZE (10*4)
3997 #define VP9_IF_UV_MODE_COUNT_START \
3998 (VP9_IF_Y_MODE_COUNT_START+VP9_IF_Y_MODE_COUNT_SIZE)
3999 #define VP9_IF_UV_MODE_COUNT_SIZE (10*10)
4000 #define VP9_PARTITION_P_COUNT_START \
4001 (VP9_IF_UV_MODE_COUNT_START+VP9_IF_UV_MODE_COUNT_SIZE)
4002 #define VP9_PARTITION_P_COUNT_SIZE (4*4*4)
4003 #define VP9_INTERP_COUNT_START \
4004 (VP9_PARTITION_P_COUNT_START+VP9_PARTITION_P_COUNT_SIZE)
4005 #define VP9_INTERP_COUNT_SIZE (4*3)
4006 #define VP9_MV_JOINTS_COUNT_START \
4007 (VP9_INTERP_COUNT_START+VP9_INTERP_COUNT_SIZE)
4008 #define VP9_MV_JOINTS_COUNT_SIZE (1 * 4)
4009 #define VP9_MV_CLASSES_0_COUNT_START \
4010 (VP9_MV_JOINTS_COUNT_START+VP9_MV_JOINTS_COUNT_SIZE)
4011 #define VP9_MV_CLASSES_0_COUNT_SIZE (1*11)
4012 #define VP9_MV_CLASS0_0_COUNT_START \
4013 (VP9_MV_CLASSES_0_COUNT_START+VP9_MV_CLASSES_0_COUNT_SIZE)
4014 #define VP9_MV_CLASS0_0_COUNT_SIZE (1*2)
4015 #define VP9_MV_CLASSES_1_COUNT_START \
4016 (VP9_MV_CLASS0_0_COUNT_START+VP9_MV_CLASS0_0_COUNT_SIZE)
4017 #define VP9_MV_CLASSES_1_COUNT_SIZE (1*11)
4018 #define VP9_MV_CLASS0_1_COUNT_START \
4019 (VP9_MV_CLASSES_1_COUNT_START+VP9_MV_CLASSES_1_COUNT_SIZE)
4020 #define VP9_MV_CLASS0_1_COUNT_SIZE (1*2)
4021 #define VP9_MV_CLASS0_FP_0_COUNT_START \
4022 (VP9_MV_CLASS0_1_COUNT_START+VP9_MV_CLASS0_1_COUNT_SIZE)
4023 #define VP9_MV_CLASS0_FP_0_COUNT_SIZE (3*4)
4024 #define VP9_MV_CLASS0_FP_1_COUNT_START \
4025 (VP9_MV_CLASS0_FP_0_COUNT_START+VP9_MV_CLASS0_FP_0_COUNT_SIZE)
4026 #define VP9_MV_CLASS0_FP_1_COUNT_SIZE (3*4)
4029 #define DC_PRED 0 /* Average of above and left pixels*/
4030 #define V_PRED 1 /* Vertical*/
4031 #define H_PRED 2 /* Horizontal*/
4032 #define D45_PRED 3 /*Directional 45 deg = round(arctan(1/1) * 180/pi)*/
4033 #define D135_PRED 4 /* Directional 135 deg = 180 - 45*/
4034 #define D117_PRED 5 /* Directional 117 deg = 180 - 63*/
4035 #define D153_PRED 6 /* Directional 153 deg = 180 - 27*/
4036 #define D207_PRED 7 /* Directional 207 deg = 180 + 27*/
4037 #define D63_PRED 8 /*Directional 63 deg = round(arctan(2/1) * 180/pi)*/
4038 #define TM_PRED 9 /*True-motion*/
4040 int clip_prob(int p
)
4042 return (p
> 255) ? 255 : (p
< 1) ? 1 : p
;
4045 #define ROUND_POWER_OF_TWO(value, n) \
4046 (((value) + (1 << ((n) - 1))) >> (n))
4048 #define MODE_MV_COUNT_SAT 20
4049 static const int count_to_update_factor
[MODE_MV_COUNT_SAT
+ 1] = {
4050 0, 6, 12, 19, 25, 32, 38, 44, 51, 57, 64,
4051 70, 76, 83, 89, 96, 102, 108, 115, 121, 128
4054 void vp9_tree_merge_probs(unsigned int *prev_prob
, unsigned int *cur_prob
,
4055 int coef_node_start
, int tree_left
, int tree_right
, int tree_i
,
4058 int prob_32
, prob_res
, prob_shift
;
4059 int pre_prob
, new_prob
;
4060 int den
, m_count
, get_prob
, factor
;
4062 prob_32
= prev_prob
[coef_node_start
/ 4 * 2];
4063 prob_res
= coef_node_start
& 3;
4064 prob_shift
= prob_res
* 8;
4065 pre_prob
= (prob_32
>> prob_shift
) & 0xff;
4067 den
= tree_left
+ tree_right
;
4070 new_prob
= pre_prob
;
4072 m_count
= (den
< MODE_MV_COUNT_SAT
) ?
4073 den
: MODE_MV_COUNT_SAT
;
4074 get_prob
= clip_prob(
4075 div_r32(((int64_t)tree_left
* 256 + (den
>> 1)),
4078 factor
= count_to_update_factor
[m_count
];
4079 new_prob
= ROUND_POWER_OF_TWO(pre_prob
* (256 - factor
)
4080 + get_prob
* factor
, 8);
4082 cur_prob
[coef_node_start
/ 4 * 2] = (cur_prob
[coef_node_start
/ 4 * 2]
4083 & (~(0xff << prob_shift
))) | (new_prob
<< prob_shift
);
4085 /*pr_info(" - [%d][%d] 0x%02X --> 0x%02X (0x%X 0x%X) (%X)\n",
4086 *tree_i, node, pre_prob, new_prob, tree_left, tree_right,
4087 *cur_prob[coef_node_start/4*2]);
4092 /*void adapt_coef_probs(void)*/
4093 void adapt_coef_probs(int pic_count
, int prev_kf
, int cur_kf
, int pre_fc
,
4094 unsigned int *prev_prob
, unsigned int *cur_prob
, unsigned int *count
)
4096 /* 80 * 64bits = 0xF00 ( use 0x1000 4K bytes)
4097 *unsigned int prev_prob[496*2];
4098 *unsigned int cur_prob[496*2];
4099 *0x300 * 128bits = 0x3000 (32K Bytes)
4100 *unsigned int count[0x300*4];
4103 int tx_size
, coef_tx_size_start
, coef_count_tx_size_start
;
4104 int plane
, coef_plane_start
, coef_count_plane_start
;
4105 int type
, coef_type_start
, coef_count_type_start
;
4106 int band
, coef_band_start
, coef_count_band_start
;
4108 int cxt
, coef_cxt_start
, coef_count_cxt_start
;
4109 int node
, coef_node_start
, coef_count_node_start
;
4111 int tree_i
, tree_left
, tree_right
;
4115 /*int update_factor = 112;*/ /*If COEF_MAX_UPDATE_FACTOR_AFTER_KEY,
4118 /* If COEF_MAX_UPDATE_FACTOR_AFTER_KEY, use 128*/
4119 /*int update_factor = (pic_count == 1) ? 128 : 112;*/
4120 int update_factor
= cur_kf
? 112 :
4121 prev_kf
? 128 : 112;
4135 if (debug
& VP9_DEBUG_MERGE
)
4137 ("\n ##adapt_coef_probs (pre_fc : %d ,prev_kf : %d,cur_kf : %d)##\n\n",
4138 pre_fc
, prev_kf
, cur_kf
);
4140 /*adapt_coef_probs*/
4141 for (tx_size
= 0; tx_size
< 4; tx_size
++) {
4142 coef_tx_size_start
= VP9_COEF_START
4143 + tx_size
* 4 * VP9_COEF_SIZE_ONE_SET
;
4144 coef_count_tx_size_start
= VP9_COEF_COUNT_START
4145 + tx_size
* 4 * VP9_COEF_COUNT_SIZE_ONE_SET
;
4146 coef_plane_start
= coef_tx_size_start
;
4147 coef_count_plane_start
= coef_count_tx_size_start
;
4148 for (plane
= 0; plane
< 2; plane
++) {
4149 coef_type_start
= coef_plane_start
;
4150 coef_count_type_start
= coef_count_plane_start
;
4151 for (type
= 0; type
< 2; type
++) {
4152 coef_band_start
= coef_type_start
;
4153 coef_count_band_start
= coef_count_type_start
;
4154 for (band
= 0; band
< 6; band
++) {
4159 coef_cxt_start
= coef_band_start
;
4160 coef_count_cxt_start
=
4161 coef_count_band_start
;
4162 for (cxt
= 0; cxt
< cxt_num
; cxt
++) {
4164 count
[coef_count_cxt_start
];
4166 count
[coef_count_cxt_start
+ 1];
4168 count
[coef_count_cxt_start
+ 2];
4170 count
[coef_count_cxt_start
+ 3];
4172 count
[coef_count_cxt_start
+ 4];
4182 (node
= 0; node
< 3; node
++) {
4188 coef_node_start
& 3;
4192 (prob_32
>> prob_shift
)
4199 branch_ct
[node
][0] +
4214 update_factor
* m_count
4220 get_prob
* factor
, 8);
4222 cur_prob
[coef_node_start
4226 / 4 * 2] & (~(0xff <<
4231 coef_node_start
+= 1;
4236 coef_count_cxt_start
=
4237 coef_count_cxt_start
4241 coef_band_start
+= 10;
4242 coef_count_band_start
+= 15;
4244 coef_band_start
+= 18;
4245 coef_count_band_start
+= 30;
4248 coef_type_start
+= VP9_COEF_SIZE_ONE_SET
;
4249 coef_count_type_start
+=
4250 VP9_COEF_COUNT_SIZE_ONE_SET
;
4252 coef_plane_start
+= 2 * VP9_COEF_SIZE_ONE_SET
;
4253 coef_count_plane_start
+=
4254 2 * VP9_COEF_COUNT_SIZE_ONE_SET
;
4259 /*mode_mv_merge_probs - merge_intra_inter_prob*/
4260 for (coef_count_node_start
= VP9_INTRA_INTER_COUNT_START
;
4261 coef_count_node_start
< (VP9_MV_CLASS0_HP_1_COUNT_START
+
4262 VP9_MV_CLASS0_HP_1_COUNT_SIZE
); coef_count_node_start
+= 2) {
4264 if (coef_count_node_start
==
4265 VP9_INTRA_INTER_COUNT_START
) {
4266 if (debug
& VP9_DEBUG_MERGE
)
4267 pr_info(" # merge_intra_inter_prob\n");
4268 coef_node_start
= VP9_INTRA_INTER_START
;
4269 } else if (coef_count_node_start
==
4270 VP9_COMP_INTER_COUNT_START
) {
4271 if (debug
& VP9_DEBUG_MERGE
)
4272 pr_info(" # merge_comp_inter_prob\n");
4273 coef_node_start
= VP9_COMP_INTER_START
;
4276 *else if (coef_count_node_start ==
4277 * VP9_COMP_REF_COUNT_START) {
4278 * pr_info(" # merge_comp_inter_prob\n");
4279 * coef_node_start = VP9_COMP_REF_START;
4281 *else if (coef_count_node_start ==
4282 * VP9_SINGLE_REF_COUNT_START) {
4283 * pr_info(" # merge_comp_inter_prob\n");
4284 * coef_node_start = VP9_SINGLE_REF_START;
4287 else if (coef_count_node_start
==
4288 VP9_TX_MODE_COUNT_START
) {
4289 if (debug
& VP9_DEBUG_MERGE
)
4290 pr_info(" # merge_tx_mode_probs\n");
4291 coef_node_start
= VP9_TX_MODE_START
;
4292 } else if (coef_count_node_start
==
4293 VP9_SKIP_COUNT_START
) {
4294 if (debug
& VP9_DEBUG_MERGE
)
4295 pr_info(" # merge_skip_probs\n");
4296 coef_node_start
= VP9_SKIP_START
;
4297 } else if (coef_count_node_start
==
4298 VP9_MV_SIGN_0_COUNT_START
) {
4299 if (debug
& VP9_DEBUG_MERGE
)
4300 pr_info(" # merge_sign_0\n");
4301 coef_node_start
= VP9_MV_SIGN_0_START
;
4302 } else if (coef_count_node_start
==
4303 VP9_MV_SIGN_1_COUNT_START
) {
4304 if (debug
& VP9_DEBUG_MERGE
)
4305 pr_info(" # merge_sign_1\n");
4306 coef_node_start
= VP9_MV_SIGN_1_START
;
4307 } else if (coef_count_node_start
==
4308 VP9_MV_BITS_0_COUNT_START
) {
4309 if (debug
& VP9_DEBUG_MERGE
)
4310 pr_info(" # merge_bits_0\n");
4311 coef_node_start
= VP9_MV_BITS_0_START
;
4312 } else if (coef_count_node_start
==
4313 VP9_MV_BITS_1_COUNT_START
) {
4314 if (debug
& VP9_DEBUG_MERGE
)
4315 pr_info(" # merge_bits_1\n");
4316 coef_node_start
= VP9_MV_BITS_1_START
;
4317 } else if (coef_count_node_start
==
4318 VP9_MV_CLASS0_HP_0_COUNT_START
) {
4319 if (debug
& VP9_DEBUG_MERGE
)
4320 pr_info(" # merge_class0_hp\n");
4321 coef_node_start
= VP9_MV_CLASS0_HP_0_START
;
4325 den
= count
[coef_count_node_start
] +
4326 count
[coef_count_node_start
+ 1];
4328 prob_32
= prev_prob
[coef_node_start
/ 4 * 2];
4329 prob_res
= coef_node_start
& 3;
4330 prob_shift
= prob_res
* 8;
4331 pre_prob
= (prob_32
>> prob_shift
) & 0xff;
4334 new_prob
= pre_prob
;
4336 m_count
= (den
< MODE_MV_COUNT_SAT
) ?
4337 den
: MODE_MV_COUNT_SAT
;
4340 div_r32(((int64_t)count
[coef_count_node_start
]
4341 * 256 + (den
>> 1)),
4344 factor
= count_to_update_factor
[m_count
];
4346 ROUND_POWER_OF_TWO(pre_prob
* (256 - factor
)
4347 + get_prob
* factor
, 8);
4349 cur_prob
[coef_node_start
/ 4 * 2] =
4350 (cur_prob
[coef_node_start
/ 4 * 2] &
4351 (~(0xff << prob_shift
)))
4352 | (new_prob
<< prob_shift
);
4354 coef_node_start
= coef_node_start
+ 1;
4356 if (debug
& VP9_DEBUG_MERGE
)
4357 pr_info(" # merge_vp9_inter_mode_tree\n");
4358 coef_node_start
= VP9_INTER_MODE_START
;
4359 coef_count_node_start
= VP9_INTER_MODE_COUNT_START
;
4360 for (tree_i
= 0; tree_i
< 7; tree_i
++) {
4361 for (node
= 0; node
< 3; node
++) {
4365 count
[coef_count_node_start
+ 1];
4367 count
[coef_count_node_start
+ 3];
4371 count
[coef_count_node_start
+ 0];
4373 count
[coef_count_node_start
+ 1]
4374 + count
[coef_count_node_start
+ 3];
4378 count
[coef_count_node_start
+ 2];
4380 count
[coef_count_node_start
+ 0]
4381 + count
[coef_count_node_start
+ 1]
4382 + count
[coef_count_node_start
+ 3];
4387 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4388 coef_node_start
, tree_left
, tree_right
,
4391 coef_node_start
= coef_node_start
+ 1;
4393 coef_count_node_start
= coef_count_node_start
+ 4;
4395 if (debug
& VP9_DEBUG_MERGE
)
4396 pr_info(" # merge_vp9_intra_mode_tree\n");
4397 coef_node_start
= VP9_IF_Y_MODE_START
;
4398 coef_count_node_start
= VP9_IF_Y_MODE_COUNT_START
;
4399 for (tree_i
= 0; tree_i
< 14; tree_i
++) {
4400 for (node
= 0; node
< 9; node
++) {
4404 count
[coef_count_node_start
+D153_PRED
];
4406 count
[coef_count_node_start
+D207_PRED
];
4410 count
[coef_count_node_start
+D63_PRED
];
4412 count
[coef_count_node_start
+D207_PRED
] +
4413 count
[coef_count_node_start
+D153_PRED
];
4417 count
[coef_count_node_start
+ D45_PRED
];
4419 count
[coef_count_node_start
+D207_PRED
] +
4420 count
[coef_count_node_start
+D153_PRED
] +
4421 count
[coef_count_node_start
+D63_PRED
];
4425 count
[coef_count_node_start
+D135_PRED
];
4427 count
[coef_count_node_start
+D117_PRED
];
4431 count
[coef_count_node_start
+H_PRED
];
4433 count
[coef_count_node_start
+D117_PRED
] +
4434 count
[coef_count_node_start
+D135_PRED
];
4438 count
[coef_count_node_start
+H_PRED
] +
4439 count
[coef_count_node_start
+D117_PRED
] +
4440 count
[coef_count_node_start
+D135_PRED
];
4442 count
[coef_count_node_start
+D45_PRED
] +
4443 count
[coef_count_node_start
+D207_PRED
] +
4444 count
[coef_count_node_start
+D153_PRED
] +
4445 count
[coef_count_node_start
+D63_PRED
];
4449 count
[coef_count_node_start
+V_PRED
];
4451 count
[coef_count_node_start
+H_PRED
] +
4452 count
[coef_count_node_start
+D117_PRED
] +
4453 count
[coef_count_node_start
+D135_PRED
] +
4454 count
[coef_count_node_start
+D45_PRED
] +
4455 count
[coef_count_node_start
+D207_PRED
] +
4456 count
[coef_count_node_start
+D153_PRED
] +
4457 count
[coef_count_node_start
+D63_PRED
];
4461 count
[coef_count_node_start
+TM_PRED
];
4463 count
[coef_count_node_start
+V_PRED
] +
4464 count
[coef_count_node_start
+H_PRED
] +
4465 count
[coef_count_node_start
+D117_PRED
] +
4466 count
[coef_count_node_start
+D135_PRED
] +
4467 count
[coef_count_node_start
+D45_PRED
] +
4468 count
[coef_count_node_start
+D207_PRED
] +
4469 count
[coef_count_node_start
+D153_PRED
] +
4470 count
[coef_count_node_start
+D63_PRED
];
4474 count
[coef_count_node_start
+DC_PRED
];
4476 count
[coef_count_node_start
+TM_PRED
] +
4477 count
[coef_count_node_start
+V_PRED
] +
4478 count
[coef_count_node_start
+H_PRED
] +
4479 count
[coef_count_node_start
+D117_PRED
] +
4480 count
[coef_count_node_start
+D135_PRED
] +
4481 count
[coef_count_node_start
+D45_PRED
] +
4482 count
[coef_count_node_start
+D207_PRED
] +
4483 count
[coef_count_node_start
+D153_PRED
] +
4484 count
[coef_count_node_start
+D63_PRED
];
4489 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4490 coef_node_start
, tree_left
, tree_right
,
4493 coef_node_start
= coef_node_start
+ 1;
4495 coef_count_node_start
= coef_count_node_start
+ 10;
4498 if (debug
& VP9_DEBUG_MERGE
)
4499 pr_info(" # merge_vp9_partition_tree\n");
4500 coef_node_start
= VP9_PARTITION_P_START
;
4501 coef_count_node_start
= VP9_PARTITION_P_COUNT_START
;
4502 for (tree_i
= 0; tree_i
< 16; tree_i
++) {
4503 for (node
= 0; node
< 3; node
++) {
4507 count
[coef_count_node_start
+ 2];
4509 count
[coef_count_node_start
+ 3];
4513 count
[coef_count_node_start
+ 1];
4515 count
[coef_count_node_start
+ 2] +
4516 count
[coef_count_node_start
+ 3];
4520 count
[coef_count_node_start
+ 0];
4522 count
[coef_count_node_start
+ 1] +
4523 count
[coef_count_node_start
+ 2] +
4524 count
[coef_count_node_start
+ 3];
4529 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4531 tree_left
, tree_right
, tree_i
, node
);
4533 coef_node_start
= coef_node_start
+ 1;
4535 coef_count_node_start
= coef_count_node_start
+ 4;
4538 if (debug
& VP9_DEBUG_MERGE
)
4539 pr_info(" # merge_vp9_switchable_interp_tree\n");
4540 coef_node_start
= VP9_INTERP_START
;
4541 coef_count_node_start
= VP9_INTERP_COUNT_START
;
4542 for (tree_i
= 0; tree_i
< 4; tree_i
++) {
4543 for (node
= 0; node
< 2; node
++) {
4547 count
[coef_count_node_start
+ 1];
4549 count
[coef_count_node_start
+ 2];
4553 count
[coef_count_node_start
+ 0];
4555 count
[coef_count_node_start
+ 1] +
4556 count
[coef_count_node_start
+ 2];
4561 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4563 tree_left
, tree_right
, tree_i
, node
);
4565 coef_node_start
= coef_node_start
+ 1;
4567 coef_count_node_start
= coef_count_node_start
+ 3;
4570 if (debug
& VP9_DEBUG_MERGE
)
4571 pr_info("# merge_vp9_mv_joint_tree\n");
4572 coef_node_start
= VP9_MV_JOINTS_START
;
4573 coef_count_node_start
= VP9_MV_JOINTS_COUNT_START
;
4574 for (tree_i
= 0; tree_i
< 1; tree_i
++) {
4575 for (node
= 0; node
< 3; node
++) {
4579 count
[coef_count_node_start
+ 2];
4581 count
[coef_count_node_start
+ 3];
4585 count
[coef_count_node_start
+ 1];
4587 count
[coef_count_node_start
+ 2] +
4588 count
[coef_count_node_start
+ 3];
4592 count
[coef_count_node_start
+ 0];
4594 count
[coef_count_node_start
+ 1] +
4595 count
[coef_count_node_start
+ 2] +
4596 count
[coef_count_node_start
+ 3];
4600 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4602 tree_left
, tree_right
, tree_i
, node
);
4604 coef_node_start
= coef_node_start
+ 1;
4606 coef_count_node_start
= coef_count_node_start
+ 4;
4609 for (mvd_i
= 0; mvd_i
< 2; mvd_i
++) {
4610 if (debug
& VP9_DEBUG_MERGE
)
4611 pr_info(" # merge_vp9_mv_class_tree [%d] -\n", mvd_i
);
4613 mvd_i
? VP9_MV_CLASSES_1_START
: VP9_MV_CLASSES_0_START
;
4614 coef_count_node_start
=
4615 mvd_i
? VP9_MV_CLASSES_1_COUNT_START
4616 : VP9_MV_CLASSES_0_COUNT_START
;
4618 for (node
= 0; node
< 10; node
++) {
4622 count
[coef_count_node_start
+ 9];
4624 count
[coef_count_node_start
+ 10];
4628 count
[coef_count_node_start
+ 7];
4630 count
[coef_count_node_start
+ 8];
4634 count
[coef_count_node_start
+ 7] +
4635 count
[coef_count_node_start
+ 8];
4637 count
[coef_count_node_start
+ 9] +
4638 count
[coef_count_node_start
+ 10];
4642 count
[coef_count_node_start
+ 6];
4644 count
[coef_count_node_start
+ 7] +
4645 count
[coef_count_node_start
+ 8] +
4646 count
[coef_count_node_start
+ 9] +
4647 count
[coef_count_node_start
+ 10];
4651 count
[coef_count_node_start
+ 4];
4653 count
[coef_count_node_start
+ 5];
4657 count
[coef_count_node_start
+ 4] +
4658 count
[coef_count_node_start
+ 5];
4660 count
[coef_count_node_start
+ 6] +
4661 count
[coef_count_node_start
+ 7] +
4662 count
[coef_count_node_start
+ 8] +
4663 count
[coef_count_node_start
+ 9] +
4664 count
[coef_count_node_start
+ 10];
4668 count
[coef_count_node_start
+ 2];
4670 count
[coef_count_node_start
+ 3];
4674 count
[coef_count_node_start
+ 2] +
4675 count
[coef_count_node_start
+ 3];
4677 count
[coef_count_node_start
+ 4] +
4678 count
[coef_count_node_start
+ 5] +
4679 count
[coef_count_node_start
+ 6] +
4680 count
[coef_count_node_start
+ 7] +
4681 count
[coef_count_node_start
+ 8] +
4682 count
[coef_count_node_start
+ 9] +
4683 count
[coef_count_node_start
+ 10];
4687 count
[coef_count_node_start
+ 1];
4689 count
[coef_count_node_start
+ 2] +
4690 count
[coef_count_node_start
+ 3] +
4691 count
[coef_count_node_start
+ 4] +
4692 count
[coef_count_node_start
+ 5] +
4693 count
[coef_count_node_start
+ 6] +
4694 count
[coef_count_node_start
+ 7] +
4695 count
[coef_count_node_start
+ 8] +
4696 count
[coef_count_node_start
+ 9] +
4697 count
[coef_count_node_start
+ 10];
4701 count
[coef_count_node_start
+ 0];
4703 count
[coef_count_node_start
+ 1] +
4704 count
[coef_count_node_start
+ 2] +
4705 count
[coef_count_node_start
+ 3] +
4706 count
[coef_count_node_start
+ 4] +
4707 count
[coef_count_node_start
+ 5] +
4708 count
[coef_count_node_start
+ 6] +
4709 count
[coef_count_node_start
+ 7] +
4710 count
[coef_count_node_start
+ 8] +
4711 count
[coef_count_node_start
+ 9] +
4712 count
[coef_count_node_start
+ 10];
4717 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4718 coef_node_start
, tree_left
, tree_right
,
4721 coef_node_start
= coef_node_start
+ 1;
4724 if (debug
& VP9_DEBUG_MERGE
)
4725 pr_info(" # merge_vp9_mv_class0_tree [%d] -\n", mvd_i
);
4727 mvd_i
? VP9_MV_CLASS0_1_START
: VP9_MV_CLASS0_0_START
;
4728 coef_count_node_start
=
4729 mvd_i
? VP9_MV_CLASS0_1_COUNT_START
:
4730 VP9_MV_CLASS0_0_COUNT_START
;
4733 tree_left
= count
[coef_count_node_start
+ 0];
4734 tree_right
= count
[coef_count_node_start
+ 1];
4736 vp9_tree_merge_probs(prev_prob
, cur_prob
, coef_node_start
,
4737 tree_left
, tree_right
, tree_i
, node
);
4738 if (debug
& VP9_DEBUG_MERGE
)
4739 pr_info(" # merge_vp9_mv_fp_tree_class0_fp [%d] -\n",
4742 mvd_i
? VP9_MV_CLASS0_FP_1_START
:
4743 VP9_MV_CLASS0_FP_0_START
;
4744 coef_count_node_start
=
4745 mvd_i
? VP9_MV_CLASS0_FP_1_COUNT_START
:
4746 VP9_MV_CLASS0_FP_0_COUNT_START
;
4747 for (tree_i
= 0; tree_i
< 3; tree_i
++) {
4748 for (node
= 0; node
< 3; node
++) {
4752 count
[coef_count_node_start
+ 2];
4754 count
[coef_count_node_start
+ 3];
4758 count
[coef_count_node_start
+ 1];
4760 count
[coef_count_node_start
+ 2]
4761 + count
[coef_count_node_start
+ 3];
4765 count
[coef_count_node_start
+ 0];
4767 count
[coef_count_node_start
+ 1]
4768 + count
[coef_count_node_start
+ 2]
4769 + count
[coef_count_node_start
+ 3];
4774 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4775 coef_node_start
, tree_left
, tree_right
,
4778 coef_node_start
= coef_node_start
+ 1;
4780 coef_count_node_start
= coef_count_node_start
+ 4;
4783 } /* for mvd_i (mvd_y or mvd_x)*/
4788 static bool v4l_is_there_vframe_bound(struct VP9Decoder_s
*pbi
)
4791 struct VP9_Common_s
*const cm
= &pbi
->common
;
4792 struct RefCntBuffer_s
*frame_bufs
= cm
->buffer_pool
->frame_bufs
;
4794 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
4795 if (frame_bufs
[i
].buf
.vframe_bound
)
4802 static void v4l_mmu_buffer_release(struct VP9Decoder_s
*pbi
)
4804 struct VP9_Common_s
*const cm
= &pbi
->common
;
4805 struct RefCntBuffer_s
*frame_bufs
= cm
->buffer_pool
->frame_bufs
;
4808 /* release workspace */
4810 decoder_bmmu_box_free_idx(pbi
->bmmu_box
,
4813 * it's only when vframe get back to driver, right now we can be sure
4814 * that vframe and fd are related. if the playback exits, the capture
4815 * requires the upper app to release when the fd is closed, and others
4816 * buffers drivers are released by driver.
4818 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
4819 if (!frame_bufs
[i
].buf
.vframe_bound
) {
4821 decoder_bmmu_box_free_idx(pbi
->bmmu_box
,
4822 HEADER_BUFFER_IDX(i
));
4824 decoder_mmu_box_free_idx(pbi
->mmu_box
, i
);
4826 vp9_print(pbi
, PRINT_FLAG_V4L_DETAIL
,
4827 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
4828 __func__
, i
, pbi
->bmmu_box
, pbi
->mmu_box
);
4833 static void uninit_mmu_buffers(struct VP9Decoder_s
*pbi
)
4835 #ifndef MV_USE_FIXED_BUF
4836 dealloc_mv_bufs(pbi
);
4838 if (pbi
->is_used_v4l
&&
4839 v4l_is_there_vframe_bound(pbi
)) {
4840 if (get_double_write_mode(pbi
) != 0x10) {
4841 v4l_mmu_buffer_release(pbi
);
4847 decoder_mmu_box_free(pbi
->mmu_box
);
4848 pbi
->mmu_box
= NULL
;
4851 decoder_bmmu_box_free(pbi
->bmmu_box
);
4852 pbi
->bmmu_box
= NULL
;
4855 static int calc_luc_quantity(u32 w
, u32 h
)
4857 int lcu_size
= 64; /*fixed 64*/
4858 int pic_width_64
= (w
+ 63) & (~0x3f);
4859 int pic_height_32
= (h
+ 31) & (~0x1f);
4860 int pic_width_lcu
= (pic_width_64
% lcu_size
) ?
4861 pic_width_64
/ lcu_size
+ 1 : pic_width_64
/ lcu_size
;
4862 int pic_height_lcu
= (pic_height_32
% lcu_size
) ?
4863 pic_height_32
/ lcu_size
+ 1 : pic_height_32
/ lcu_size
;
4865 return pic_width_lcu
* pic_height_lcu
;
4868 static int v4l_alloc_and_config_pic(struct VP9Decoder_s
*pbi
,
4869 struct PIC_BUFFER_CONFIG_s
*pic
)
4873 int dw_mode
= get_double_write_mode_init(pbi
);
4874 int lcu_total
= calc_luc_quantity(pbi
->frame_width
, pbi
->frame_height
);
4875 #ifdef MV_USE_FIXED_BUF
4876 u32 mpred_mv_end
= pbi
->work_space_buf
->mpred_mv
.buf_start
+
4877 pbi
->work_space_buf
->mpred_mv
.buf_size
;
4879 struct vdec_v4l2_buffer
*fb
= NULL
;
4884 ret
= vdec_v4l_get_buffer(pbi
->v4l2_ctx
, &fb
);
4886 vp9_print(pbi
, 0, "[%d] VP9 get buffer fail.\n",
4887 ((struct aml_vcodec_ctx
*) (pbi
->v4l2_ctx
))->id
);
4891 if (pbi
->mmu_enable
) {
4892 pbi
->m_BUF
[i
].header_addr
= decoder_bmmu_box_get_phy_addr(
4893 pbi
->bmmu_box
, HEADER_BUFFER_IDX(i
));
4894 if (debug
& VP9_DEBUG_BUFMGR_MORE
) {
4895 pr_info("MMU header_adr %d: %ld\n",
4896 i
, pbi
->m_BUF
[i
].header_addr
);
4900 #ifdef MV_USE_FIXED_BUF
4901 if ((pbi
->work_space_buf
->mpred_mv
.buf_start
+
4902 (((i
+ 1) * lcu_total
) * MV_MEM_UNIT
))
4905 pbi
->m_BUF
[i
].v4l_ref_buf_addr
= (ulong
)fb
;
4906 pic
->cma_alloc_addr
= fb
->m
.mem
[0].addr
;
4907 if (fb
->num_planes
== 1) {
4908 pbi
->m_BUF
[i
].start_adr
= fb
->m
.mem
[0].addr
;
4909 pbi
->m_BUF
[i
].luma_size
= fb
->m
.mem
[0].offset
;
4910 pbi
->m_BUF
[i
].size
= fb
->m
.mem
[0].size
;
4911 fb
->m
.mem
[0].bytes_used
= fb
->m
.mem
[0].size
;
4912 pic
->dw_y_adr
= pbi
->m_BUF
[i
].start_adr
;
4913 pic
->dw_u_v_adr
= pic
->dw_y_adr
+ pbi
->m_BUF
[i
].luma_size
;
4914 } else if (fb
->num_planes
== 2) {
4915 pbi
->m_BUF
[i
].start_adr
= fb
->m
.mem
[0].addr
;
4916 pbi
->m_BUF
[i
].size
= fb
->m
.mem
[0].size
;
4917 pbi
->m_BUF
[i
].chroma_addr
= fb
->m
.mem
[1].addr
;
4918 pbi
->m_BUF
[i
].chroma_size
= fb
->m
.mem
[1].size
;
4919 fb
->m
.mem
[0].bytes_used
= fb
->m
.mem
[0].size
;
4920 fb
->m
.mem
[1].bytes_used
= fb
->m
.mem
[1].size
;
4921 pic
->dw_y_adr
= pbi
->m_BUF
[i
].start_adr
;
4922 pic
->dw_u_v_adr
= pbi
->m_BUF
[i
].chroma_addr
;
4925 /* config frame buffer */
4926 if (pbi
->mmu_enable
)
4927 pic
->header_adr
= pbi
->m_BUF
[i
].header_addr
;
4930 pic
->lcu_total
= lcu_total
;
4931 pic
->mc_canvas_y
= pic
->index
;
4932 pic
->mc_canvas_u_v
= pic
->index
;
4934 if (dw_mode
& 0x10) {
4935 pic
->mc_canvas_y
= (pic
->index
<< 1);
4936 pic
->mc_canvas_u_v
= (pic
->index
<< 1) + 1;
4939 #ifdef MV_USE_FIXED_BUF
4940 pic
->mpred_mv_wr_start_addr
=
4941 pbi
->work_space_buf
->mpred_mv
.buf_start
+
4942 ((pic
->index
* lcu_total
) * MV_MEM_UNIT
);
4945 pr_info("%s index %d BUF_index %d ",
4946 __func__
, pic
->index
,
4948 pr_info("comp_body_size %x comp_buf_size %x ",
4949 pic
->comp_body_size
,
4951 pr_info("mpred_mv_wr_start_adr %ld\n",
4952 pic
->mpred_mv_wr_start_addr
);
4953 pr_info("dw_y_adr %d, pic_config->dw_u_v_adr =%d\n",
4957 #ifdef MV_USE_FIXED_BUF
4963 static int config_pic(struct VP9Decoder_s
*pbi
,
4964 struct PIC_BUFFER_CONFIG_s
*pic_config
)
4968 int pic_width
= pbi
->init_pic_w
;
4969 int pic_height
= pbi
->init_pic_h
;
4970 int lcu_size
= 64; /*fixed 64*/
4971 int pic_width_64
= (pic_width
+ 63) & (~0x3f);
4972 int pic_height_32
= (pic_height
+ 31) & (~0x1f);
4973 int pic_width_lcu
= (pic_width_64
% lcu_size
) ?
4974 pic_width_64
/ lcu_size
+ 1
4975 : pic_width_64
/ lcu_size
;
4976 int pic_height_lcu
= (pic_height_32
% lcu_size
) ?
4977 pic_height_32
/ lcu_size
+ 1
4978 : pic_height_32
/ lcu_size
;
4979 int lcu_total
= pic_width_lcu
* pic_height_lcu
;
4980 #ifdef MV_USE_FIXED_BUF
4981 u32 mpred_mv_end
= pbi
->work_space_buf
->mpred_mv
.buf_start
+
4982 pbi
->work_space_buf
->mpred_mv
.buf_size
;
4987 int losless_comp_header_size
=
4988 compute_losless_comp_header_size(pic_width
,
4990 int losless_comp_body_size
= compute_losless_comp_body_size(pic_width
,
4991 pic_height
, buf_alloc_depth
== 10);
4992 int mc_buffer_size
= losless_comp_header_size
+ losless_comp_body_size
;
4993 int mc_buffer_size_h
= (mc_buffer_size
+ 0xffff) >> 16;
4994 int mc_buffer_size_u_v
= 0;
4995 int mc_buffer_size_u_v_h
= 0;
4996 int dw_mode
= get_double_write_mode_init(pbi
);
4998 pbi
->lcu_total
= lcu_total
;
5001 int pic_width_dw
= pic_width
/
5002 get_double_write_ratio(pbi
, dw_mode
);
5003 int pic_height_dw
= pic_height
/
5004 get_double_write_ratio(pbi
, dw_mode
);
5006 int pic_width_64_dw
= (pic_width_dw
+ 63) & (~0x3f);
5007 int pic_height_32_dw
= (pic_height_dw
+ 31) & (~0x1f);
5008 int pic_width_lcu_dw
= (pic_width_64_dw
% lcu_size
) ?
5009 pic_width_64_dw
/ lcu_size
+ 1
5010 : pic_width_64_dw
/ lcu_size
;
5011 int pic_height_lcu_dw
= (pic_height_32_dw
% lcu_size
) ?
5012 pic_height_32_dw
/ lcu_size
+ 1
5013 : pic_height_32_dw
/ lcu_size
;
5014 int lcu_total_dw
= pic_width_lcu_dw
* pic_height_lcu_dw
;
5015 mc_buffer_size_u_v
= lcu_total_dw
* lcu_size
* lcu_size
/ 2;
5016 mc_buffer_size_u_v_h
= (mc_buffer_size_u_v
+ 0xffff) >> 16;
5018 buf_size
= ((mc_buffer_size_u_v_h
<< 16) * 3);
5019 buf_size
= ((buf_size
+ 0xffff) >> 16) << 16;
5022 if (mc_buffer_size
& 0xffff) /*64k alignment*/
5023 mc_buffer_size_h
+= 1;
5024 if ((!pbi
->mmu_enable
) && ((dw_mode
& 0x10) == 0))
5025 buf_size
+= (mc_buffer_size_h
<< 16);
5027 if (pbi
->mmu_enable
) {
5028 pic_config
->header_adr
= decoder_bmmu_box_get_phy_addr(
5029 pbi
->bmmu_box
, HEADER_BUFFER_IDX(pic_config
->index
));
5031 if (debug
& VP9_DEBUG_BUFMGR_MORE
) {
5032 pr_info("MMU header_adr %d: %ld\n",
5033 pic_config
->index
, pic_config
->header_adr
);
5037 i
= pic_config
->index
;
5038 #ifdef MV_USE_FIXED_BUF
5039 if ((pbi
->work_space_buf
->mpred_mv
.buf_start
+
5040 (((i
+ 1) * lcu_total
) * MV_MEM_UNIT
))
5045 ret
= decoder_bmmu_box_alloc_buf_phy(pbi
->bmmu_box
,
5047 buf_size
, DRIVER_NAME
,
5048 &pic_config
->cma_alloc_addr
);
5051 "decoder_bmmu_box_alloc_buf_phy idx %d size %d fail\n",
5058 if (pic_config
->cma_alloc_addr
)
5059 y_adr
= pic_config
->cma_alloc_addr
;
5062 "decoder_bmmu_box_alloc_buf_phy idx %d size %d return null\n",
5070 /*ensure get_pic_by_POC()
5071 not get the buffer not decoded*/
5072 pic_config
->BUF_index
= i
;
5073 pic_config
->lcu_total
= lcu_total
;
5075 pic_config
->comp_body_size
= losless_comp_body_size
;
5076 pic_config
->buf_size
= buf_size
;
5078 pic_config
->mc_canvas_y
= pic_config
->index
;
5079 pic_config
->mc_canvas_u_v
= pic_config
->index
;
5080 if (dw_mode
& 0x10) {
5081 pic_config
->dw_y_adr
= y_adr
;
5082 pic_config
->dw_u_v_adr
= y_adr
+
5083 ((mc_buffer_size_u_v_h
<< 16) << 1);
5085 pic_config
->mc_canvas_y
=
5086 (pic_config
->index
<< 1);
5087 pic_config
->mc_canvas_u_v
=
5088 (pic_config
->index
<< 1) + 1;
5089 } else if (dw_mode
) {
5090 pic_config
->dw_y_adr
= y_adr
;
5091 pic_config
->dw_u_v_adr
= pic_config
->dw_y_adr
+
5092 ((mc_buffer_size_u_v_h
<< 16) << 1);
5094 #ifdef MV_USE_FIXED_BUF
5095 pic_config
->mpred_mv_wr_start_addr
=
5096 pbi
->work_space_buf
->mpred_mv
.buf_start
+
5097 ((pic_config
->index
* lcu_total
)
5102 ("%s index %d BUF_index %d ",
5103 __func__
, pic_config
->index
,
5104 pic_config
->BUF_index
);
5106 ("comp_body_size %x comp_buf_size %x ",
5107 pic_config
->comp_body_size
,
5108 pic_config
->buf_size
);
5110 ("mpred_mv_wr_start_adr %ld\n",
5111 pic_config
->mpred_mv_wr_start_addr
);
5112 pr_info("dw_y_adr %d, pic_config->dw_u_v_adr =%d\n",
5113 pic_config
->dw_y_adr
,
5114 pic_config
->dw_u_v_adr
);
5118 #ifdef MV_USE_FIXED_BUF
5124 static int vvp9_mmu_compress_header_size(struct VP9Decoder_s
*pbi
)
5126 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) &&
5127 IS_8K_SIZE(pbi
->max_pic_w
, pbi
->max_pic_h
))
5128 return (MMU_COMPRESS_8K_HEADER_SIZE
);
5130 return (MMU_COMPRESS_HEADER_SIZE
);
5133 /*#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)*/
5134 static int vvp9_frame_mmu_map_size(struct VP9Decoder_s
*pbi
)
5136 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) &&
5137 IS_8K_SIZE(pbi
->max_pic_w
, pbi
->max_pic_h
))
5138 return (MAX_FRAME_8K_NUM
* 4);
5140 return (MAX_FRAME_4K_NUM
* 4);
5143 static void init_pic_list(struct VP9Decoder_s
*pbi
)
5146 struct VP9_Common_s
*cm
= &pbi
->common
;
5147 struct PIC_BUFFER_CONFIG_s
*pic_config
;
5149 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
5151 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
5152 header_size
= vvp9_mmu_compress_header_size(pbi
);
5153 /*alloc VP9 compress header first*/
5154 for (i
= 0; i
< pbi
->used_buf_num
; i
++) {
5155 unsigned long buf_addr
;
5156 if (decoder_bmmu_box_alloc_buf_phy
5158 HEADER_BUFFER_IDX(i
), header_size
,
5161 pr_info("%s malloc compress header failed %d\n",
5162 DRIVER_HEADER_NAME
, i
);
5163 pbi
->fatal_error
|= DECODER_FATAL_ERROR_NO_MEM
;
5168 for (i
= 0; i
< pbi
->used_buf_num
; i
++) {
5169 pic_config
= &cm
->buffer_pool
->frame_bufs
[i
].buf
;
5170 pic_config
->index
= i
;
5171 pic_config
->BUF_index
= -1;
5172 pic_config
->mv_buf_index
= -1;
5173 if (vdec
->parallel_dec
== 1) {
5174 pic_config
->y_canvas_index
= -1;
5175 pic_config
->uv_canvas_index
= -1;
5177 pic_config
->y_crop_width
= pbi
->init_pic_w
;
5178 pic_config
->y_crop_height
= pbi
->init_pic_h
;
5179 pic_config
->double_write_mode
= get_double_write_mode(pbi
);
5181 if (!pbi
->is_used_v4l
) {
5182 if (config_pic(pbi
, pic_config
) < 0) {
5184 pr_info("Config_pic %d fail\n",
5186 pic_config
->index
= -1;
5190 if (pic_config
->double_write_mode
) {
5191 set_canvas(pbi
, pic_config
);
5195 for (; i
< pbi
->used_buf_num
; i
++) {
5196 pic_config
= &cm
->buffer_pool
->frame_bufs
[i
].buf
;
5197 pic_config
->index
= -1;
5198 pic_config
->BUF_index
= -1;
5199 pic_config
->mv_buf_index
= -1;
5200 if (vdec
->parallel_dec
== 1) {
5201 pic_config
->y_canvas_index
= -1;
5202 pic_config
->uv_canvas_index
= -1;
5205 pr_info("%s ok, used_buf_num = %d\n",
5206 __func__
, pbi
->used_buf_num
);
5209 static void init_pic_list_hw(struct VP9Decoder_s
*pbi
)
5212 struct VP9_Common_s
*cm
= &pbi
->common
;
5213 struct PIC_BUFFER_CONFIG_s
*pic_config
;
5214 /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);*/
5215 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR
,
5216 (0x1 << 1) | (0x1 << 2));
5219 for (i
= 0; i
< pbi
->used_buf_num
; i
++) {
5220 pic_config
= &cm
->buffer_pool
->frame_bufs
[i
].buf
;
5221 if (pic_config
->index
< 0)
5224 if (pbi
->mmu_enable
&& ((pic_config
->double_write_mode
& 0x10) == 0)) {
5226 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA
,
5227 pic_config
->header_adr
>> 5);
5229 /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
5230 * pic_config->mc_y_adr
5231 * | (pic_config->mc_canvas_y << 8) | 0x1);
5233 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA
,
5234 pic_config
->dw_y_adr
>> 5);
5236 #ifndef LOSLESS_COMPRESS_MODE
5237 /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
5238 * pic_config->mc_u_v_adr
5239 * | (pic_config->mc_canvas_u_v << 8)| 0x1);
5241 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA
,
5242 pic_config
->header_adr
>> 5);
5244 if (pic_config
->double_write_mode
& 0x10) {
5245 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA
,
5246 pic_config
->dw_u_v_adr
>> 5);
5250 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR
, 0x1);
5252 /*Zero out canvas registers in IPP -- avoid simulation X*/
5253 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
,
5254 (0 << 8) | (0 << 1) | 1);
5255 for (i
= 0; i
< 32; i
++)
5256 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
, 0);
5260 static void dump_pic_list(struct VP9Decoder_s
*pbi
)
5262 struct VP9_Common_s
*const cm
= &pbi
->common
;
5263 struct PIC_BUFFER_CONFIG_s
*pic_config
;
5265 for (i
= 0; i
< FRAME_BUFFERS
; i
++) {
5266 pic_config
= &cm
->buffer_pool
->frame_bufs
[i
].buf
;
5268 "Buf(%d) index %d mv_buf_index %d ref_count %d vf_ref %d dec_idx %d slice_type %d w/h %d/%d adr%ld\n",
5271 #ifndef MV_USE_FIXED_BUF
5272 pic_config
->mv_buf_index
,
5277 frame_bufs
[i
].ref_count
,
5279 pic_config
->decode_idx
,
5280 pic_config
->slice_type
,
5281 pic_config
->y_crop_width
,
5282 pic_config
->y_crop_height
,
5283 pic_config
->cma_alloc_addr
5289 static int config_pic_size(struct VP9Decoder_s
*pbi
, unsigned short bit_depth
)
5291 #ifdef LOSLESS_COMPRESS_MODE
5292 unsigned int data32
;
5294 int losless_comp_header_size
, losless_comp_body_size
;
5295 struct VP9_Common_s
*cm
= &pbi
->common
;
5296 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
= &cm
->cur_frame
->buf
;
5298 frame_width
= cur_pic_config
->y_crop_width
;
5299 frame_height
= cur_pic_config
->y_crop_height
;
5300 cur_pic_config
->bit_depth
= bit_depth
;
5301 cur_pic_config
->double_write_mode
= get_double_write_mode(pbi
);
5302 losless_comp_header_size
=
5303 compute_losless_comp_header_size(cur_pic_config
->y_crop_width
,
5304 cur_pic_config
->y_crop_height
);
5305 losless_comp_body_size
=
5306 compute_losless_comp_body_size(cur_pic_config
->y_crop_width
,
5307 cur_pic_config
->y_crop_height
, (bit_depth
== VPX_BITS_10
));
5308 cur_pic_config
->comp_body_size
= losless_comp_body_size
;
5309 #ifdef LOSLESS_COMPRESS_MODE
5310 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5311 if (bit_depth
== VPX_BITS_10
)
5312 data32
&= ~(1 << 9);
5316 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5318 if (pbi
->mmu_enable
) {
5319 /*bit[4] : paged_mem_mode*/
5320 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, (0x1 << 4));
5322 /*bit[3] smem mdoe*/
5323 if (bit_depth
== VPX_BITS_10
)
5324 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, (0 << 3));
5326 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, (1 << 3));
5328 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_SM1
)
5329 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2
, (losless_comp_body_size
>> 5));
5330 /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);*/
5331 WRITE_VREG(HEVC_CM_BODY_LENGTH
, losless_comp_body_size
);
5332 WRITE_VREG(HEVC_CM_HEADER_OFFSET
, losless_comp_body_size
);
5333 WRITE_VREG(HEVC_CM_HEADER_LENGTH
, losless_comp_header_size
);
5334 if (get_double_write_mode(pbi
) & 0x10)
5335 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, 0x1 << 31);
5337 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, 0x1 << 31);
5342 static int config_mc_buffer(struct VP9Decoder_s
*pbi
, unsigned short bit_depth
)
5345 struct VP9_Common_s
*cm
= &pbi
->common
;
5346 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
= &cm
->cur_frame
->buf
;
5347 uint8_t scale_enable
= 0;
5349 if (debug
&VP9_DEBUG_BUFMGR_MORE
)
5350 pr_info("config_mc_buffer entered .....\n");
5352 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
,
5353 (0 << 8) | (0 << 1) | 1);
5354 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
5355 struct PIC_BUFFER_CONFIG_s
*pic_config
= cm
->frame_refs
[i
].buf
;
5358 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
,
5359 (pic_config
->mc_canvas_u_v
<< 16)
5360 | (pic_config
->mc_canvas_u_v
<< 8)
5361 | pic_config
->mc_canvas_y
);
5362 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
5363 pr_info("refid %x mc_canvas_u_v %x mc_canvas_y %x\n",
5364 i
, pic_config
->mc_canvas_u_v
,
5365 pic_config
->mc_canvas_y
);
5367 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
,
5368 (16 << 8) | (0 << 1) | 1);
5369 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
5370 struct PIC_BUFFER_CONFIG_s
*pic_config
= cm
->frame_refs
[i
].buf
;
5373 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
,
5374 (pic_config
->mc_canvas_u_v
<< 16)
5375 | (pic_config
->mc_canvas_u_v
<< 8)
5376 | pic_config
->mc_canvas_y
);
5379 /*auto_inc start index:0 field:0*/
5380 WRITE_VREG(VP9D_MPP_REFINFO_TBL_ACCCONFIG
, 0x1 << 2);
5381 /*index 0:last 1:golden 2:altref*/
5382 for (i
= 0; i
< REFS_PER_FRAME
; i
++) {
5383 int ref_pic_body_size
;
5384 struct PIC_BUFFER_CONFIG_s
*pic_config
= cm
->frame_refs
[i
].buf
;
5387 WRITE_VREG(VP9D_MPP_REFINFO_DATA
, pic_config
->y_crop_width
);
5388 WRITE_VREG(VP9D_MPP_REFINFO_DATA
, pic_config
->y_crop_height
);
5390 if (pic_config
->y_crop_width
!= cur_pic_config
->y_crop_width
||
5391 pic_config
->y_crop_height
!= cur_pic_config
->y_crop_height
) {
5392 scale_enable
|= (1 << i
);
5395 compute_losless_comp_body_size(pic_config
->y_crop_width
,
5396 pic_config
->y_crop_height
, (bit_depth
== VPX_BITS_10
));
5397 WRITE_VREG(VP9D_MPP_REFINFO_DATA
,
5398 (pic_config
->y_crop_width
<< 14)
5399 / cur_pic_config
->y_crop_width
);
5400 WRITE_VREG(VP9D_MPP_REFINFO_DATA
,
5401 (pic_config
->y_crop_height
<< 14)
5402 / cur_pic_config
->y_crop_height
);
5403 if (pbi
->mmu_enable
)
5404 WRITE_VREG(VP9D_MPP_REFINFO_DATA
, 0);
5406 WRITE_VREG(VP9D_MPP_REFINFO_DATA
, ref_pic_body_size
>> 5);
5408 WRITE_VREG(VP9D_MPP_REF_SCALE_ENBL
, scale_enable
);
5412 static void clear_mpred_hw(struct VP9Decoder_s
*pbi
)
5414 unsigned int data32
;
5416 data32
= READ_VREG(HEVC_MPRED_CTRL4
);
5417 data32
&= (~(1 << 6));
5418 WRITE_VREG(HEVC_MPRED_CTRL4
, data32
);
5421 static void config_mpred_hw(struct VP9Decoder_s
*pbi
)
5423 struct VP9_Common_s
*cm
= &pbi
->common
;
5424 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
= &cm
->cur_frame
->buf
;
5425 struct PIC_BUFFER_CONFIG_s
*last_frame_pic_config
=
5426 &cm
->prev_frame
->buf
;
5428 unsigned int data32
;
5429 int mpred_curr_lcu_x
;
5430 int mpred_curr_lcu_y
;
5431 int mpred_mv_rd_end_addr
;
5434 mpred_mv_rd_end_addr
= last_frame_pic_config
->mpred_mv_wr_start_addr
5435 + (last_frame_pic_config
->lcu_total
* MV_MEM_UNIT
);
5437 data32
= READ_VREG(HEVC_MPRED_CURR_LCU
);
5438 mpred_curr_lcu_x
= data32
& 0xffff;
5439 mpred_curr_lcu_y
= (data32
>> 16) & 0xffff;
5441 if (debug
& VP9_DEBUG_BUFMGR
)
5442 pr_info("cur pic_config index %d col pic_config index %d\n",
5443 cur_pic_config
->index
, last_frame_pic_config
->index
);
5444 WRITE_VREG(HEVC_MPRED_CTRL3
, 0x24122412);
5445 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR
,
5446 pbi
->work_space_buf
->mpred_above
.buf_start
);
5448 data32
= READ_VREG(HEVC_MPRED_CTRL4
);
5450 data32
&= (~(1 << 6));
5451 data32
|= (cm
->use_prev_frame_mvs
<< 6);
5452 WRITE_VREG(HEVC_MPRED_CTRL4
, data32
);
5454 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR
,
5455 cur_pic_config
->mpred_mv_wr_start_addr
);
5456 WRITE_VREG(HEVC_MPRED_MV_WPTR
, cur_pic_config
->mpred_mv_wr_start_addr
);
5458 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR
,
5459 last_frame_pic_config
->mpred_mv_wr_start_addr
);
5460 WRITE_VREG(HEVC_MPRED_MV_RPTR
,
5461 last_frame_pic_config
->mpred_mv_wr_start_addr
);
5462 /*data32 = ((pbi->lcu_x_num - pbi->tile_width_lcu)*MV_MEM_UNIT);*/
5463 /*WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP,data32);*/
5464 /*WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP,data32);*/
5465 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR
, mpred_mv_rd_end_addr
);
5469 static void config_sao_hw(struct VP9Decoder_s
*pbi
, union param_u
*params
)
5471 struct VP9_Common_s
*cm
= &pbi
->common
;
5472 struct PIC_BUFFER_CONFIG_s
*pic_config
= &cm
->cur_frame
->buf
;
5474 unsigned int data32
;
5476 int mc_buffer_size_u_v
=
5477 pic_config
->lcu_total
* lcu_size
*lcu_size
/2;
5478 int mc_buffer_size_u_v_h
=
5479 (mc_buffer_size_u_v
+ 0xffff) >> 16;/*64k alignment*/
5480 struct aml_vcodec_ctx
* v4l2_ctx
= pbi
->v4l2_ctx
;
5482 if (get_double_write_mode(pbi
)) {
5483 WRITE_VREG(HEVC_SAO_Y_START_ADDR
, pic_config
->dw_y_adr
);
5484 WRITE_VREG(HEVC_SAO_C_START_ADDR
, pic_config
->dw_u_v_adr
);
5485 WRITE_VREG(HEVC_SAO_Y_WPTR
, pic_config
->dw_y_adr
);
5486 WRITE_VREG(HEVC_SAO_C_WPTR
, pic_config
->dw_u_v_adr
);
5488 WRITE_VREG(HEVC_SAO_Y_START_ADDR
, 0xffffffff);
5489 WRITE_VREG(HEVC_SAO_C_START_ADDR
, 0xffffffff);
5491 if (pbi
->mmu_enable
)
5492 WRITE_VREG(HEVC_CM_HEADER_START_ADDR
, pic_config
->header_adr
);
5494 data32
= (mc_buffer_size_u_v_h
<< 16) << 1;
5495 /*pr_info("data32=%x,mc_buffer_size_u_v_h=%x,lcu_total=%x\n",
5496 * data32, mc_buffer_size_u_v_h, pic_config->lcu_total);
5498 WRITE_VREG(HEVC_SAO_Y_LENGTH
, data32
);
5500 data32
= (mc_buffer_size_u_v_h
<< 16);
5501 WRITE_VREG(HEVC_SAO_C_LENGTH
, data32
);
5505 data32
= READ_VREG(HEVC_SAO_CTRL1
);
5506 data32
&= (~0x3000);
5507 /*[13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32*/
5508 data32
|= (pbi
->mem_map_mode
<< 12);
5510 data32
|= 0x1; /* [1]:dw_disable [0]:cm_disable*/
5511 WRITE_VREG(HEVC_SAO_CTRL1
, data32
);
5512 /*[23:22] dw_v1_ctrl [21:20] dw_v0_ctrl [19:18] dw_h1_ctrl
5513 * [17:16] dw_h0_ctrl
5515 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5516 /*set them all 0 for H265_NV21 (no down-scale)*/
5517 data32
&= ~(0xff << 16);
5518 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5519 data32
= READ_VREG(HEVCD_IPP_AXIIF_CONFIG
);
5521 /*[5:4] address_format 00:linear 01:32x32 10:64x32*/
5522 data32
|= (pbi
->mem_map_mode
<< 4);
5523 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG
, data32
);
5526 data32
= READ_VREG(HEVC_SAO_CTRL1
);
5527 data32
&= (~0x3000);
5528 /*[13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32*/
5529 data32
|= (pbi
->mem_map_mode
<< 12);
5531 /*data32 |= 0x670;*/ /*Big-Endian per 64-bit*/
5532 data32
|= 0x880; /*.Big-Endian per 64-bit */
5534 data32
|= 0x1; /*[1]:dw_disable [0]:cm_disable*/
5535 WRITE_VREG(HEVC_SAO_CTRL1
, data32
);
5536 /* [23:22] dw_v1_ctrl [21:20] dw_v0_ctrl
5537 *[19:18] dw_h1_ctrl [17:16] dw_h0_ctrl
5539 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5540 /* set them all 0 for H265_NV21 (no down-scale)*/
5541 data32
&= ~(0xff << 16);
5542 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5544 data32
= READ_VREG(HEVCD_IPP_AXIIF_CONFIG
);
5546 /*[5:4] address_format 00:linear 01:32x32 10:64x32*/
5547 data32
|= (pbi
->mem_map_mode
<< 4);
5549 data32
|= 0x8; /*Big-Endian per 64-bit*/
5550 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG
, data32
);
5553 data32
= READ_VREG(HEVC_SAO_CTRL1
);
5554 data32
&= (~0x3000);
5555 data32
|= (pbi
->mem_map_mode
<<
5558 /* [13:12] axi_aformat, 0-Linear,
5562 /* data32 |= 0x670; // Big-Endian per 64-bit */
5563 data32
|= endian
; /* Big-Endian per 64-bit */
5564 data32
&= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5565 if (get_double_write_mode(pbi
) == 0)
5566 data32
|= 0x2; /*disable double write*/
5567 else if (get_double_write_mode(pbi
) & 0x10)
5568 data32
|= 0x1; /*disable cm*/
5569 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) { /* >= G12A dw write control */
5571 data
= READ_VREG(HEVC_DBLK_CFGB
);
5572 data
&= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5573 if (get_double_write_mode(pbi
) == 0)
5574 data
|= (0x1 << 8); /*enable first write*/
5575 else if (get_double_write_mode(pbi
) & 0x10)
5576 data
|= (0x1 << 9); /*double write only*/
5578 data
|= ((0x1 << 8) |(0x1 << 9));
5579 WRITE_VREG(HEVC_DBLK_CFGB
, data
);
5583 if (pbi
->is_used_v4l
) {
5584 if ((v4l2_ctx
->q_data
[AML_Q_DATA_DST
].fmt
->fourcc
== V4L2_PIX_FMT_NV21
) ||
5585 (v4l2_ctx
->q_data
[AML_Q_DATA_DST
].fmt
->fourcc
== V4L2_PIX_FMT_NV21M
))
5586 data32
&= ~(1 << 8); /* NV21 */
5588 data32
|= (1 << 8); /* NV12 */
5592 * [31:24] ar_fifo1_axi_thred
5593 * [23:16] ar_fifo0_axi_thred
5594 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5595 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5596 * [11:08] axi_lendian_C
5597 * [07:04] axi_lendian_Y
5600 * [1] dw_disable:disable double write output
5601 * [0] cm_disable:disable compress output
5603 WRITE_VREG(HEVC_SAO_CTRL1
, data32
);
5605 if (get_double_write_mode(pbi
) & 0x10) {
5606 /* [23:22] dw_v1_ctrl
5611 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5612 /*set them all 0 for H265_NV21 (no down-scale)*/
5613 data32
&= ~(0xff << 16);
5614 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5616 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5617 data32
&= (~(0xff << 16));
5618 if (get_double_write_mode(pbi
) == 2 ||
5619 get_double_write_mode(pbi
) == 3)
5620 data32
|= (0xff<<16);
5621 else if (get_double_write_mode(pbi
) == 4)
5622 data32
|= (0x33<<16);
5623 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5626 data32
= READ_VREG(HEVCD_IPP_AXIIF_CONFIG
);
5628 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5629 data32
|= (pbi
->mem_map_mode
<<
5632 data32
|= 0xf; /* valid only when double write only */
5633 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5636 if (pbi
->is_used_v4l
) {
5637 if ((v4l2_ctx
->q_data
[AML_Q_DATA_DST
].fmt
->fourcc
== V4L2_PIX_FMT_NV21
) ||
5638 (v4l2_ctx
->q_data
[AML_Q_DATA_DST
].fmt
->fourcc
== V4L2_PIX_FMT_NV21M
))
5639 data32
|= (1 << 12); /* NV21 */
5641 data32
&= ~(1 << 12); /* NV12 */
5645 * [3:0] little_endian
5646 * [5:4] address_format 00:linear 01:32x32 10:64x32
5648 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5650 * [12] CbCr_byte_swap
5653 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG
, data32
);
5657 static void vp9_config_work_space_hw(struct VP9Decoder_s
*pbi
, u32 mask
)
5659 struct BuffInfo_s
*buf_spec
= pbi
->work_space_buf
;
5660 unsigned int data32
;
5662 if (debug
&& pbi
->init_flag
== 0)
5663 pr_info("%s %x %x %x %x %x %x %x %x %x %x %x %x\n",
5665 buf_spec
->ipp
.buf_start
,
5666 buf_spec
->start_adr
,
5667 buf_spec
->short_term_rps
.buf_start
,
5668 buf_spec
->vps
.buf_start
,
5669 buf_spec
->sps
.buf_start
,
5670 buf_spec
->pps
.buf_start
,
5671 buf_spec
->sao_up
.buf_start
,
5672 buf_spec
->swap_buf
.buf_start
,
5673 buf_spec
->swap_buf2
.buf_start
,
5674 buf_spec
->scalelut
.buf_start
,
5675 buf_spec
->dblk_para
.buf_start
,
5676 buf_spec
->dblk_data
.buf_start
);
5678 if (mask
& HW_MASK_FRONT
) {
5679 if ((debug
& VP9_DEBUG_SEND_PARAM_WITH_REG
) == 0)
5680 WRITE_VREG(HEVC_RPM_BUFFER
, (u32
)pbi
->rpm_phy_addr
);
5682 WRITE_VREG(HEVC_SHORT_TERM_RPS
,
5683 buf_spec
->short_term_rps
.buf_start
);
5684 /*WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);*/
5685 /*WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);*/
5686 WRITE_VREG(HEVC_PPS_BUFFER
, buf_spec
->pps
.buf_start
);
5687 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER
,
5688 buf_spec
->swap_buf
.buf_start
);
5689 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2
,
5690 buf_spec
->swap_buf2
.buf_start
);
5691 WRITE_VREG(LMEM_DUMP_ADR
, (u32
)pbi
->lmem_phy_addr
);
5695 if (mask
& HW_MASK_BACK
) {
5696 #ifdef LOSLESS_COMPRESS_MODE
5697 int losless_comp_header_size
=
5698 compute_losless_comp_header_size(pbi
->init_pic_w
,
5700 int losless_comp_body_size
=
5701 compute_losless_comp_body_size(pbi
->init_pic_w
,
5702 pbi
->init_pic_h
, buf_alloc_depth
== 10);
5704 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE
,
5705 buf_spec
->ipp
.buf_start
);
5706 WRITE_VREG(HEVC_SAO_UP
, buf_spec
->sao_up
.buf_start
);
5707 WRITE_VREG(HEVC_SCALELUT
, buf_spec
->scalelut
.buf_start
);
5708 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) {
5710 WRITE_VREG(HEVC_DBLK_CFGE
, buf_spec
->dblk_para
.buf_start
);
5711 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
5712 pr_info("Write HEVC_DBLK_CFGE\n");
5715 WRITE_VREG(HEVC_DBLK_CFG4
, buf_spec
->dblk_para
.buf_start
);
5717 WRITE_VREG(HEVC_DBLK_CFG5
, buf_spec
->dblk_data
.buf_start
);
5719 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
5721 * data32 = (READ_VREG(P_HEVC_DBLK_CFG3)>>8) & 0xff; // xio left offset, default is 0x40
5722 * data32 = data32 * 2;
5723 * data32 = (READ_VREG(P_HEVC_DBLK_CFG3)>>16) & 0xff; // adp left offset, default is 0x040
5724 * data32 = data32 * 2;
5726 WRITE_VREG(HEVC_DBLK_CFG3
, 0x808010); // make left storage 2 x 4k]
5728 #ifdef LOSLESS_COMPRESS_MODE
5729 if (pbi
->mmu_enable
) {
5730 /*bit[4] : paged_mem_mode*/
5731 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, (0x1 << 4));
5732 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_SM1
)
5733 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2
, 0);
5735 /*if(cur_pic_config->bit_depth == VPX_BITS_10)
5736 * WRITE_VREG(P_HEVCD_MPP_DECOMP_CTL1, (0<<3));
5738 /*bit[3] smem mdoe*/
5739 /*else WRITE_VREG(P_HEVCD_MPP_DECOMP_CTL1, (1<<3));*/
5740 /*bit[3] smem mdoe*/
5741 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2
,
5742 (losless_comp_body_size
>> 5));
5744 /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL2,
5745 (losless_comp_body_size >> 5));*/
5746 /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,
5747 (0xff<<20) | (0xff<<10) | 0xff);*/
5749 WRITE_VREG(HEVC_CM_BODY_LENGTH
, losless_comp_body_size
);
5750 WRITE_VREG(HEVC_CM_HEADER_OFFSET
, losless_comp_body_size
);
5751 WRITE_VREG(HEVC_CM_HEADER_LENGTH
, losless_comp_header_size
);
5752 if (get_double_write_mode(pbi
) & 0x10)
5753 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, 0x1 << 31);
5755 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, 0x1 << 31);
5758 if (pbi
->mmu_enable
) {
5759 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR
, buf_spec
->mmu_vbh
.buf_start
);
5760 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR
, buf_spec
->mmu_vbh
.buf_start
5761 + buf_spec
->mmu_vbh
.buf_size
/2);
5762 /*data32 = READ_VREG(P_HEVC_SAO_CTRL9);*/
5764 /*WRITE_VREG(P_HEVC_SAO_CTRL9, data32);*/
5766 /* use HEVC_CM_HEADER_START_ADDR */
5767 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5769 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5772 WRITE_VREG(VP9_SEG_MAP_BUFFER
, buf_spec
->seg_map
.buf_start
);
5774 WRITE_VREG(LMEM_DUMP_ADR
, (u32
)pbi
->lmem_phy_addr
);
5776 WRITE_VREG(VP9_PROB_SWAP_BUFFER
, pbi
->prob_buffer_phy_addr
);
5777 WRITE_VREG(VP9_COUNT_SWAP_BUFFER
, pbi
->count_buffer_phy_addr
);
5778 if (pbi
->mmu_enable
) {
5779 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
)
5780 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR
, pbi
->frame_mmu_map_phy_addr
);
5782 WRITE_VREG(VP9_MMU_MAP_BUFFER
, pbi
->frame_mmu_map_phy_addr
);
5788 #ifdef VP9_LPF_LVL_UPDATE
5790 * Defines, declarations, sub-functions for vp9 de-block loop
5791 filter Thr/Lvl table update
5792 * - struct segmentation is for loop filter only (removed something)
5793 * - function "vp9_loop_filter_init" and "vp9_loop_filter_frame_init" will
5794 be instantiated in C_Entry
5795 * - vp9_loop_filter_init run once before decoding start
5796 * - vp9_loop_filter_frame_init run before every frame decoding start
5797 * - set video format to VP9 is in vp9_loop_filter_init
5799 #define MAX_LOOP_FILTER 63
5800 #define MAX_REF_LF_DELTAS 4
5801 #define MAX_MODE_LF_DELTAS 2
5802 /*#define INTRA_FRAME 0*/
5803 /*#define LAST_FRAME 1*/
5804 /*#define MAX_REF_FRAMES 4*/
5805 #define SEGMENT_DELTADATA 0
5806 #define SEGMENT_ABSDATA 1
5807 #define MAX_SEGMENTS 8
5808 /*.#define SEG_TREE_PROBS (MAX_SEGMENTS-1)*/
5809 /*no use for loop filter, if this struct for common use, pls add it back*/
5810 /*#define PREDICTION_PROBS 3*/
5811 /* no use for loop filter, if this struct for common use, pls add it back*/
5813 enum SEG_LVL_FEATURES
{
5814 SEG_LVL_ALT_Q
= 0, /*Use alternate Quantizer ....*/
5815 SEG_LVL_ALT_LF
= 1, /*Use alternate loop filter value...*/
5816 SEG_LVL_REF_FRAME
= 2, /*Optional Segment reference frame*/
5817 SEG_LVL_SKIP
= 3, /*Optional Segment (0,0) + skip mode*/
5818 SEG_LVL_MAX
= 4 /*Number of features supported*/
5821 struct segmentation
{
5824 uint8_t update_data
;
5826 uint8_t temporal_update
;
5828 /*no use for loop filter, if this struct
5829 *for common use, pls add it back
5831 /*vp9_prob tree_probs[SEG_TREE_PROBS]; */
5832 /* no use for loop filter, if this struct
5833 * for common use, pls add it back
5835 /*vp9_prob pred_probs[PREDICTION_PROBS];*/
5837 int16_t feature_data
[MAX_SEGMENTS
][SEG_LVL_MAX
];
5838 unsigned int feature_mask
[MAX_SEGMENTS
];
5841 struct loop_filter_thresh
{
5847 struct loop_filter_info_n
{
5848 struct loop_filter_thresh lfthr
[MAX_LOOP_FILTER
+ 1];
5849 uint8_t lvl
[MAX_SEGMENTS
][MAX_REF_FRAMES
][MAX_MODE_LF_DELTAS
];
5855 int sharpness_level
;
5856 int last_sharpness_level
;
5858 uint8_t mode_ref_delta_enabled
;
5859 uint8_t mode_ref_delta_update
;
5861 /*0 = Intra, Last, GF, ARF*/
5862 signed char ref_deltas
[MAX_REF_LF_DELTAS
];
5863 signed char last_ref_deltas
[MAX_REF_LF_DELTAS
];
5866 signed char mode_deltas
[MAX_MODE_LF_DELTAS
];
5867 signed char last_mode_deltas
[MAX_MODE_LF_DELTAS
];
5870 static int vp9_clamp(int value
, int low
, int high
)
5872 return value
< low
? low
: (value
> high
? high
: value
);
5875 int segfeature_active(struct segmentation
*seg
,
5877 enum SEG_LVL_FEATURES feature_id
) {
5878 return seg
->enabled
&&
5879 (seg
->feature_mask
[segment_id
] & (1 << feature_id
));
5882 int get_segdata(struct segmentation
*seg
, int segment_id
,
5883 enum SEG_LVL_FEATURES feature_id
) {
5884 return seg
->feature_data
[segment_id
][feature_id
];
5887 static void vp9_update_sharpness(struct loop_filter_info_n
*lfi
,
5891 /*For each possible value for the loop filter fill out limits*/
5892 for (lvl
= 0; lvl
<= MAX_LOOP_FILTER
; lvl
++) {
5893 /*Set loop filter parameters that control sharpness.*/
5894 int block_inside_limit
= lvl
>> ((sharpness_lvl
> 0) +
5895 (sharpness_lvl
> 4));
5897 if (sharpness_lvl
> 0) {
5898 if (block_inside_limit
> (9 - sharpness_lvl
))
5899 block_inside_limit
= (9 - sharpness_lvl
);
5902 if (block_inside_limit
< 1)
5903 block_inside_limit
= 1;
5905 lfi
->lfthr
[lvl
].lim
= (uint8_t)block_inside_limit
;
5906 lfi
->lfthr
[lvl
].mblim
= (uint8_t)(2 * (lvl
+ 2) +
5907 block_inside_limit
);
5911 /*instantiate this function once when decode is started*/
5912 void vp9_loop_filter_init(struct VP9Decoder_s
*pbi
)
5914 struct loop_filter_info_n
*lfi
= pbi
->lfi
;
5915 struct loopfilter
*lf
= pbi
->lf
;
5916 struct segmentation
*seg_4lf
= pbi
->seg_4lf
;
5918 unsigned int data32
;
5920 memset(lfi
, 0, sizeof(struct loop_filter_info_n
));
5921 memset(lf
, 0, sizeof(struct loopfilter
));
5922 memset(seg_4lf
, 0, sizeof(struct segmentation
));
5923 lf
->sharpness_level
= 0; /*init to 0 */
5924 /*init limits for given sharpness*/
5925 vp9_update_sharpness(lfi
, lf
->sharpness_level
);
5926 lf
->last_sharpness_level
= lf
->sharpness_level
;
5927 /*init hev threshold const vectors (actually no use)
5928 *for (i = 0; i <= MAX_LOOP_FILTER; i++)
5929 * lfi->lfthr[i].hev_thr = (uint8_t)(i >> 4);
5932 /*Write to register*/
5933 for (i
= 0; i
< 32; i
++) {
5936 thr
= ((lfi
->lfthr
[i
* 2 + 1].lim
& 0x3f)<<8) |
5937 (lfi
->lfthr
[i
* 2 + 1].mblim
& 0xff);
5938 thr
= (thr
<<16) | ((lfi
->lfthr
[i
*2].lim
& 0x3f)<<8) |
5939 (lfi
->lfthr
[i
* 2].mblim
& 0xff);
5940 WRITE_VREG(HEVC_DBLK_CFG9
, thr
);
5943 /*video format is VP9*/
5944 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
5945 data32
= (0x3 << 14) | // (dw fifo thres r and b)
5946 (0x3 << 12) | // (dw fifo thres r or b)
5947 (0x3 << 10) | // (dw fifo thres not r/b)
5948 (0x3 << 8) | // 1st/2nd write both enable
5949 (0x1 << 0); // vp9 video format
5950 if (get_double_write_mode(pbi
) == 0x10)
5952 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) {
5953 data32
= (0x57 << 8) | /*1st/2nd write both enable*/
5954 (0x1 << 0); /*vp9 video format*/
5955 if (get_double_write_mode(pbi
) == 0x10)
5958 data32
= 0x40400001;
5960 WRITE_VREG(HEVC_DBLK_CFGB
, data32
);
5961 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
5962 pr_info("[DBLK DEBUG] CFGB : 0x%x\n", data32
);
5964 /* perform this function per frame*/
5965 void vp9_loop_filter_frame_init(struct segmentation
*seg
,
5966 struct loop_filter_info_n
*lfi
, struct loopfilter
*lf
,
5967 int default_filt_lvl
) {
5970 /*n_shift is the multiplier for lf_deltas
5971 *the multiplier is 1 for when filter_lvl is between 0 and 31;
5972 *2 when filter_lvl is between 32 and 63
5974 const int scale
= 1 << (default_filt_lvl
>> 5);
5976 /*update limits if sharpness has changed*/
5977 if (lf
->last_sharpness_level
!= lf
->sharpness_level
) {
5978 vp9_update_sharpness(lfi
, lf
->sharpness_level
);
5979 lf
->last_sharpness_level
= lf
->sharpness_level
;
5981 /*Write to register*/
5982 for (i
= 0; i
< 32; i
++) {
5985 thr
= ((lfi
->lfthr
[i
* 2 + 1].lim
& 0x3f) << 8)
5986 | (lfi
->lfthr
[i
* 2 + 1].mblim
& 0xff);
5987 thr
= (thr
<< 16) | ((lfi
->lfthr
[i
* 2].lim
& 0x3f) << 8)
5988 | (lfi
->lfthr
[i
* 2].mblim
& 0xff);
5989 WRITE_VREG(HEVC_DBLK_CFG9
, thr
);
5993 for (seg_id
= 0; seg_id
< MAX_SEGMENTS
; seg_id
++) {/*MAX_SEGMENTS = 8*/
5994 int lvl_seg
= default_filt_lvl
;
5996 if (segfeature_active(seg
, seg_id
, SEG_LVL_ALT_LF
)) {
5997 const int data
= get_segdata(seg
, seg_id
,
5999 lvl_seg
= vp9_clamp(seg
->abs_delta
== SEGMENT_ABSDATA
?
6000 data
: default_filt_lvl
+ data
,
6001 0, MAX_LOOP_FILTER
);
6003 pr_info("segfeature_active!!!seg_id=%d,lvl_seg=%d\n", seg_id
, lvl_seg
);
6007 if (!lf
->mode_ref_delta_enabled
) {
6008 /*we could get rid of this if we assume that deltas are set to
6009 *zero when not in use; encoder always uses deltas
6011 memset(lfi
->lvl
[seg_id
], lvl_seg
, sizeof(lfi
->lvl
[seg_id
]));
6014 const int intra_lvl
= lvl_seg
+ lf
->ref_deltas
[INTRA_FRAME
]
6017 pr_info("LF_PRINT:vp9_loop_filter_frame_init,seg_id=%d\n", seg_id
);
6018 pr_info("ref_deltas[INTRA_FRAME]=%d\n", lf
->ref_deltas
[INTRA_FRAME
]);
6020 lfi
->lvl
[seg_id
][INTRA_FRAME
][0] =
6021 vp9_clamp(intra_lvl
, 0, MAX_LOOP_FILTER
);
6023 for (ref
= LAST_FRAME
; ref
< MAX_REF_FRAMES
; ++ref
) {
6024 /* LAST_FRAME = 1, MAX_REF_FRAMES = 4*/
6025 for (mode
= 0; mode
< MAX_MODE_LF_DELTAS
; ++mode
) {
6026 /*MAX_MODE_LF_DELTAS = 2*/
6027 const int inter_lvl
=
6028 lvl_seg
+ lf
->ref_deltas
[ref
] * scale
6029 + lf
->mode_deltas
[mode
] * scale
;
6032 lfi
->lvl
[seg_id
][ref
][mode
] =
6033 vp9_clamp(inter_lvl
, 0,
6041 /*print out thr/lvl table per frame*/
6042 for (i
= 0; i
<= MAX_LOOP_FILTER
; i
++) {
6043 pr_info("LF_PRINT:(%d)thr=%d,blim=%d,lim=%d\n",
6044 i
, lfi
->lfthr
[i
].hev_thr
, lfi
->lfthr
[i
].mblim
,
6047 for (seg_id
= 0; seg_id
< MAX_SEGMENTS
; seg_id
++) {
6048 pr_info("LF_PRINT:lvl(seg_id=%d)(mode=0,%d,%d,%d,%d)\n",
6049 seg_id
, lfi
->lvl
[seg_id
][0][0],
6050 lfi
->lvl
[seg_id
][1][0], lfi
->lvl
[seg_id
][2][0],
6051 lfi
->lvl
[seg_id
][3][0]);
6052 pr_info("i(mode=1,%d,%d,%d,%d)\n", lfi
->lvl
[seg_id
][0][1],
6053 lfi
->lvl
[seg_id
][1][1], lfi
->lvl
[seg_id
][2][1],
6054 lfi
->lvl
[seg_id
][3][1]);
6058 /*Write to register */
6059 for (i
= 0; i
< 16; i
++) {
6062 level
= ((lfi
->lvl
[i
>> 1][3][i
& 1] & 0x3f) << 24) |
6063 ((lfi
->lvl
[i
>> 1][2][i
& 1] & 0x3f) << 16) |
6064 ((lfi
->lvl
[i
>> 1][1][i
& 1] & 0x3f) << 8) |
6065 (lfi
->lvl
[i
>> 1][0][i
& 1] & 0x3f);
6066 if (!default_filt_lvl
)
6068 WRITE_VREG(HEVC_DBLK_CFGA
, level
);
6071 /* VP9_LPF_LVL_UPDATE */
6074 static void vp9_init_decoder_hw(struct VP9Decoder_s
*pbi
, u32 mask
)
6076 unsigned int data32
;
6078 const unsigned short parser_cmd
[PARSER_CMD_NUMBER
] = {
6079 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
6080 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
6081 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
6082 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
6083 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
6084 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
6088 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A
) {
6089 /* Set MCR fetch priorities*/
6090 data32
= 0x1 | (0x1 << 2) | (0x1 <<3) |
6091 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
6092 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL
, data32
);
6095 /*if (debug & VP9_DEBUG_BUFMGR_MORE)
6096 pr_info("%s\n", __func__);*/
6097 if (mask
& HW_MASK_FRONT
) {
6098 data32
= READ_VREG(HEVC_PARSER_INT_CONTROL
);
6100 /* set bit 31~29 to 3 if HEVC_STREAM_FIFO_CTL[29] is 1 */
6101 data32
&= ~(7 << 29);
6102 data32
|= (3 << 29);
6105 (1 << 24) |/*stream_buffer_empty_int_amrisc_enable*/
6106 (1 << 22) |/*stream_fifo_empty_int_amrisc_enable*/
6107 (1 << 7) |/*dec_done_int_cpu_enable*/
6108 (1 << 4) |/*startcode_found_int_cpu_enable*/
6109 (0 << 3) |/*startcode_found_int_amrisc_enable*/
6110 (1 << 0) /*parser_int_enable*/
6112 #ifdef SUPPORT_FB_DECODING
6113 #ifndef FB_DECODING_TEST_SCHEDULE
6114 /*fed_fb_slice_done_int_cpu_enable*/
6115 if (pbi
->used_stage_buf_num
> 0)
6116 data32
|= (1 << 10);
6119 WRITE_VREG(HEVC_PARSER_INT_CONTROL
, data32
);
6121 data32
= READ_VREG(HEVC_SHIFT_STATUS
);
6123 (0 << 1) |/*emulation_check_off VP9
6124 do not have emulation*/
6125 (1 << 0)/*startcode_check_on*/
6127 WRITE_VREG(HEVC_SHIFT_STATUS
, data32
);
6128 WRITE_VREG(HEVC_SHIFT_CONTROL
,
6129 (0 << 14) | /*disable_start_code_protect*/
6130 (1 << 10) | /*length_zero_startcode_en for VP9*/
6131 (1 << 9) | /*length_valid_startcode_en for VP9*/
6132 (3 << 6) | /*sft_valid_wr_position*/
6133 (2 << 4) | /*emulate_code_length_sub_1*/
6134 (3 << 1) | /*start_code_length_sub_1
6135 VP9 use 0x00000001 as startcode (4 Bytes)*/
6136 (1 << 0) /*stream_shift_enable*/
6139 WRITE_VREG(HEVC_CABAC_CONTROL
,
6140 (1 << 0)/*cabac_enable*/
6143 WRITE_VREG(HEVC_PARSER_CORE_CONTROL
,
6144 (1 << 0)/* hevc_parser_core_clk_en*/
6148 WRITE_VREG(HEVC_DEC_STATUS_REG
, 0);
6152 if (mask
& HW_MASK_BACK
) {
6153 /*Initial IQIT_SCALELUT memory
6154 -- just to avoid X in simulation*/
6156 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR
, 0);/*cfg_p_addr*/
6157 for (i
= 0; i
< 1024; i
++)
6158 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA
, 0);
6161 if (mask
& HW_MASK_FRONT
) {
6163 #ifdef ENABLE_SWAP_TEST
6164 WRITE_VREG(HEVC_STREAM_SWAP_TEST
, 100);
6166 WRITE_VREG(HEVC_STREAM_SWAP_TEST
, 0);
6168 #ifdef MULTI_INSTANCE_SUPPORT
6169 if (!pbi
->m_ins_flag
) {
6170 if (pbi
->low_latency_flag
)
6171 decode_mode
= DECODE_MODE_SINGLE_LOW_LATENCY
;
6173 decode_mode
= DECODE_MODE_SINGLE
;
6174 } else if (vdec_frame_based(hw_to_vdec(pbi
)))
6175 decode_mode
= pbi
->no_head
?
6176 DECODE_MODE_MULTI_FRAMEBASE_NOHEAD
:
6177 DECODE_MODE_MULTI_FRAMEBASE
;
6179 decode_mode
= DECODE_MODE_MULTI_STREAMBASE
;
6180 #ifdef SUPPORT_FB_DECODING
6181 #ifndef FB_DECODING_TEST_SCHEDULE
6182 if (pbi
->used_stage_buf_num
> 0)
6183 decode_mode
|= (0x01 << 24);
6186 WRITE_VREG(DECODE_MODE
, decode_mode
);
6187 WRITE_VREG(HEVC_DECODE_SIZE
, 0);
6188 WRITE_VREG(HEVC_DECODE_COUNT
, 0);
6190 WRITE_VREG(DECODE_MODE
, DECODE_MODE_SINGLE
);
6191 WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG
, 0);
6192 WRITE_VREG(HEVC_DECODE_PIC_NUM_REG
, 0x7fffffff); /*to remove*/
6195 WRITE_VREG(HEVC_PARSER_CMD_WRITE
, (1 << 16) | (0 << 0));
6196 for (i
= 0; i
< PARSER_CMD_NUMBER
; i
++)
6197 WRITE_VREG(HEVC_PARSER_CMD_WRITE
, parser_cmd
[i
]);
6198 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0
, PARSER_CMD_SKIP_CFG_0
);
6199 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1
, PARSER_CMD_SKIP_CFG_1
);
6200 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2
, PARSER_CMD_SKIP_CFG_2
);
6203 WRITE_VREG(HEVC_PARSER_IF_CONTROL
,
6204 /* (1 << 8) |*/ /*sao_sw_pred_enable*/
6205 (1 << 5) | /*parser_sao_if_en*/
6206 (1 << 2) | /*parser_mpred_if_en*/
6207 (1 << 0) /*parser_scaler_if_en*/
6211 if (mask
& HW_MASK_BACK
) {
6212 /*Changed to Start MPRED in microcode*/
6214 pr_info("[test.c] Start MPRED\n");
6215 WRITE_VREG(HEVC_MPRED_INT_STATUS,
6219 WRITE_VREG(HEVCD_IPP_TOP_CNTL
,
6220 (0 << 1) | /*enable ipp*/
6221 (1 << 0) /*software reset ipp and mpp*/
6223 WRITE_VREG(HEVCD_IPP_TOP_CNTL
,
6224 (1 << 1) | /*enable ipp*/
6225 (0 << 0) /*software reset ipp and mpp*/
6227 if (get_double_write_mode(pbi
) & 0x10) {
6228 /*Enable NV21 reference read mode for MC*/
6229 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, 0x1 << 31);
6232 /*Initialize mcrcc and decomp perf counters*/
6233 if (mcrcc_cache_alg_flag
&&
6234 pbi
->init_flag
== 0) {
6235 mcrcc_perfcount_reset();
6236 decomp_perfcount_reset();
6243 #ifdef CONFIG_HEVC_CLK_FORCED_ON
6244 static void config_vp9_clk_forced_on(void)
6246 unsigned int rdata32
;
6248 rdata32
= READ_VREG(HEVC_IQIT_CLK_RST_CTRL
);
6249 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL
, rdata32
| (0x1 << 2));
6252 rdata32
= READ_VREG(HEVC_DBLK_CFG0
);
6253 WRITE_VREG(HEVC_DBLK_CFG0
, rdata32
| (0x1 << 2));
6256 rdata32
= READ_VREG(HEVC_SAO_CTRL1
);
6257 WRITE_VREG(HEVC_SAO_CTRL1
, rdata32
| (0x1 << 2));
6260 rdata32
= READ_VREG(HEVC_MPRED_CTRL1
);
6261 WRITE_VREG(HEVC_MPRED_CTRL1
, rdata32
| (0x1 << 24));
6264 rdata32
= READ_VREG(HEVC_STREAM_CONTROL
);
6265 WRITE_VREG(HEVC_STREAM_CONTROL
, rdata32
| (0x1 << 15));
6266 rdata32
= READ_VREG(HEVC_SHIFT_CONTROL
);
6267 WRITE_VREG(HEVC_SHIFT_CONTROL
, rdata32
| (0x1 << 15));
6268 rdata32
= READ_VREG(HEVC_CABAC_CONTROL
);
6269 WRITE_VREG(HEVC_CABAC_CONTROL
, rdata32
| (0x1 << 13));
6270 rdata32
= READ_VREG(HEVC_PARSER_CORE_CONTROL
);
6271 WRITE_VREG(HEVC_PARSER_CORE_CONTROL
, rdata32
| (0x1 << 15));
6272 rdata32
= READ_VREG(HEVC_PARSER_INT_CONTROL
);
6273 WRITE_VREG(HEVC_PARSER_INT_CONTROL
, rdata32
| (0x1 << 15));
6274 rdata32
= READ_VREG(HEVC_PARSER_IF_CONTROL
);
6275 WRITE_VREG(HEVC_PARSER_IF_CONTROL
,
6276 rdata32
| (0x1 << 6) | (0x1 << 3) | (0x1 << 1));
6279 rdata32
= READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG
);
6280 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG
, rdata32
| 0xffffffff);
6283 rdata32
= READ_VREG(HEVCD_MCRCC_CTL1
);
6284 WRITE_VREG(HEVCD_MCRCC_CTL1
, rdata32
| (0x1 << 3));
6290 static void dump_hit_rate(struct VP9Decoder_s
*pbi
)
6292 if (debug
& VP9_DEBUG_CACHE_HIT_RATE
) {
6293 mcrcc_get_hitrate(pbi
->m_ins_flag
);
6294 decomp_get_hitrate();
6295 decomp_get_comprate();
6299 static void config_mcrcc_axi_hw(struct VP9Decoder_s
*pbi
)
6301 unsigned int rdata32
;
6302 unsigned short is_inter
;
6303 /*pr_info("Entered config_mcrcc_axi_hw...\n");*/
6304 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0x2);/* reset mcrcc*/
6305 is_inter
= ((pbi
->common
.frame_type
!= KEY_FRAME
) &&
6306 (!pbi
->common
.intra_only
)) ? 1 : 0;
6307 if (!is_inter
) { /* I-PIC*/
6308 /*remove reset -- disables clock*/
6309 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0x0);
6313 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
6314 mcrcc_get_hitrate(pbi
->m_ins_flag
);
6315 decomp_get_hitrate();
6316 decomp_get_comprate();
6319 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
,
6320 (0 << 8) | (1 << 1) | 0);
6321 rdata32
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6322 rdata32
= rdata32
& 0xffff;
6323 rdata32
= rdata32
| (rdata32
<< 16);
6324 WRITE_VREG(HEVCD_MCRCC_CTL2
, rdata32
);
6325 /*Programme canvas1 */
6326 rdata32
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6327 rdata32
= rdata32
& 0xffff;
6328 rdata32
= rdata32
| (rdata32
<< 16);
6329 WRITE_VREG(HEVCD_MCRCC_CTL3
, rdata32
);
6330 /*enable mcrcc progressive-mode*/
6331 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0xff0);
6334 static void config_mcrcc_axi_hw_new(struct VP9Decoder_s
*pbi
)
6336 u32 curr_picnum
= -1;
6337 u32 lastref_picnum
= -1;
6338 u32 goldenref_picnum
= -1;
6339 u32 altref_picnum
= -1;
6341 u32 lastref_delta_picnum
;
6342 u32 goldenref_delta_picnum
;
6343 u32 altref_delta_picnum
;
6353 u16 goldenref_inref
;
6356 u32 refcanvas_array
[3], utmp
;
6357 int deltapicnum_array
[3], tmp
;
6359 struct VP9_Common_s
*cm
= &pbi
->common
;
6360 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
6361 = &cm
->cur_frame
->buf
;
6362 curr_picnum
= cur_pic_config
->decode_idx
;
6363 if (cm
->frame_refs
[0].buf
)
6364 lastref_picnum
= cm
->frame_refs
[0].buf
->decode_idx
;
6365 if (cm
->frame_refs
[1].buf
)
6366 goldenref_picnum
= cm
->frame_refs
[1].buf
->decode_idx
;
6367 if (cm
->frame_refs
[2].buf
)
6368 altref_picnum
= cm
->frame_refs
[2].buf
->decode_idx
;
6370 lastref_delta_picnum
= (lastref_picnum
>= curr_picnum
) ?
6371 (lastref_picnum
- curr_picnum
) : (curr_picnum
- lastref_picnum
);
6372 goldenref_delta_picnum
= (goldenref_picnum
>= curr_picnum
) ?
6373 (goldenref_picnum
- curr_picnum
) :
6374 (curr_picnum
- goldenref_picnum
);
6375 altref_delta_picnum
=
6376 (altref_picnum
>= curr_picnum
) ?
6377 (altref_picnum
- curr_picnum
) : (curr_picnum
- altref_picnum
);
6379 lastref_inref
= (cm
->frame_refs
[0].idx
!= INVALID_IDX
) ? 1 : 0;
6380 goldenref_inref
= (cm
->frame_refs
[1].idx
!= INVALID_IDX
) ? 1 : 0;
6381 altref_inref
= (cm
->frame_refs
[2].idx
!= INVALID_IDX
) ? 1 : 0;
6383 if (debug
& VP9_DEBUG_CACHE
)
6384 pr_info("%s--0--lastref_inref:%d goldenref_inref:%d altref_inref:%d\n",
6385 __func__
, lastref_inref
, goldenref_inref
, altref_inref
);
6387 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0x2); /* reset mcrcc */
6389 is_inter
= ((pbi
->common
.frame_type
!= KEY_FRAME
)
6390 && (!pbi
->common
.intra_only
)) ? 1 : 0;
6392 if (!is_inter
) { /* I-PIC */
6393 /* remove reset -- disables clock */
6394 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0x0);
6398 if (!pbi
->m_ins_flag
)
6401 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
, (0 << 8) | (1<<1) | 0);
6402 lastcanvas
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6403 goldencanvas
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6404 altrefcanvas
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6406 if (debug
& VP9_DEBUG_CACHE
)
6407 pr_info("[test.c] lastref_canv:%x goldenref_canv:%x altref_canv:%x\n",
6408 lastcanvas
, goldencanvas
, altrefcanvas
);
6410 altref_inref
= ((altref_inref
== 1) &&
6411 (altrefcanvas
!= (goldenref_inref
6412 ? goldencanvas
: 0xffffffff)) &&
6413 (altrefcanvas
!= (lastref_inref
?
6414 lastcanvas
: 0xffffffff))) ? 1 : 0;
6415 goldenref_inref
= ((goldenref_inref
== 1) &&
6416 (goldencanvas
!= (lastref_inref
?
6417 lastcanvas
: 0xffffffff))) ? 1 : 0;
6418 if (debug
& VP9_DEBUG_CACHE
)
6419 pr_info("[test.c]--1--lastref_inref:%d goldenref_inref:%d altref_inref:%d\n",
6420 lastref_inref
, goldenref_inref
, altref_inref
);
6422 altref_delta_picnum
= altref_inref
? altref_delta_picnum
: 0x7fffffff;
6423 goldenref_delta_picnum
= goldenref_inref
?
6424 goldenref_delta_picnum
: 0x7fffffff;
6425 lastref_delta_picnum
= lastref_inref
?
6426 lastref_delta_picnum
: 0x7fffffff;
6427 if (debug
& VP9_DEBUG_CACHE
)
6428 pr_info("[test.c]--1--lastref_delta_picnum:%d goldenref_delta_picnum:%d altref_delta_picnum:%d\n",
6429 lastref_delta_picnum
, goldenref_delta_picnum
,
6430 altref_delta_picnum
);
6431 /*ARRAY SORT HERE DELTA/CANVAS ARRAY SORT -- use DELTA*/
6433 refcanvas_array
[0] = lastcanvas
;
6434 refcanvas_array
[1] = goldencanvas
;
6435 refcanvas_array
[2] = altrefcanvas
;
6437 deltapicnum_array
[0] = lastref_delta_picnum
;
6438 deltapicnum_array
[1] = goldenref_delta_picnum
;
6439 deltapicnum_array
[2] = altref_delta_picnum
;
6441 /* sort0 : 2-to-1 */
6442 if (deltapicnum_array
[2] < deltapicnum_array
[1]) {
6443 utmp
= refcanvas_array
[2];
6444 refcanvas_array
[2] = refcanvas_array
[1];
6445 refcanvas_array
[1] = utmp
;
6446 tmp
= deltapicnum_array
[2];
6447 deltapicnum_array
[2] = deltapicnum_array
[1];
6448 deltapicnum_array
[1] = tmp
;
6450 /* sort1 : 1-to-0 */
6451 if (deltapicnum_array
[1] < deltapicnum_array
[0]) {
6452 utmp
= refcanvas_array
[1];
6453 refcanvas_array
[1] = refcanvas_array
[0];
6454 refcanvas_array
[0] = utmp
;
6455 tmp
= deltapicnum_array
[1];
6456 deltapicnum_array
[1] = deltapicnum_array
[0];
6457 deltapicnum_array
[0] = tmp
;
6459 /* sort2 : 2-to-1 */
6460 if (deltapicnum_array
[2] < deltapicnum_array
[1]) {
6461 utmp
= refcanvas_array
[2]; refcanvas_array
[2] =
6462 refcanvas_array
[1]; refcanvas_array
[1] = utmp
;
6463 tmp
= deltapicnum_array
[2]; deltapicnum_array
[2] =
6464 deltapicnum_array
[1]; deltapicnum_array
[1] = tmp
;
6466 if (mcrcc_cache_alg_flag
==
6467 THODIYIL_MCRCC_CANVAS_ALGX
) { /*09/15/2017*/
6468 /* lowest delta_picnum */
6469 rdata32
= refcanvas_array
[0];
6470 rdata32
= rdata32
& 0xffff;
6471 rdata32
= rdata32
| (rdata32
<< 16);
6472 WRITE_VREG(HEVCD_MCRCC_CTL2
, rdata32
);
6474 /* 2nd-lowest delta_picnum */
6475 rdata32
= refcanvas_array
[1];
6476 rdata32
= rdata32
& 0xffff;
6477 rdata32
= rdata32
| (rdata32
<< 16);
6478 WRITE_VREG(HEVCD_MCRCC_CTL3
, rdata32
);
6480 /* previous version -- LAST/GOLDEN ALWAYS -- before 09/13/2017*/
6481 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
,
6482 (0 << 8) | (1<<1) | 0);
6483 rdata32
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6484 rdata32
= rdata32
& 0xffff;
6485 rdata32
= rdata32
| (rdata32
<< 16);
6486 WRITE_VREG(HEVCD_MCRCC_CTL2
, rdata32
);
6488 /* Programme canvas1 */
6489 rdata32
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6490 rdata32
= rdata32
& 0xffff;
6491 rdata32
= rdata32
| (rdata32
<< 16);
6492 WRITE_VREG(HEVCD_MCRCC_CTL3
, rdata32
);
6495 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0xff0); /* enable mcrcc progressive-mode */
6502 static void free_lf_buf(struct VP9Decoder_s
*pbi
)
6509 vfree(pbi
->seg_4lf
);
6512 pbi
->seg_4lf
= NULL
;
6515 static int alloc_lf_buf(struct VP9Decoder_s
*pbi
)
6517 pbi
->lfi
= vmalloc(sizeof(struct loop_filter_info_n
));
6518 pbi
->lf
= vmalloc(sizeof(struct loopfilter
));
6519 pbi
->seg_4lf
= vmalloc(sizeof(struct segmentation
));
6520 if (pbi
->lfi
== NULL
|| pbi
->lf
== NULL
|| pbi
->seg_4lf
== NULL
) {
6522 pr_err("[test.c] vp9_loop_filter init malloc error!!!\n");
6528 static void vp9_local_uninit(struct VP9Decoder_s
*pbi
)
6530 pbi
->rpm_ptr
= NULL
;
6531 pbi
->lmem_ptr
= NULL
;
6532 if (pbi
->rpm_addr
) {
6533 dma_free_coherent(amports_get_dma_device(),
6537 pbi
->rpm_addr
= NULL
;
6539 if (pbi
->lmem_addr
) {
6540 if (pbi
->lmem_phy_addr
)
6541 dma_free_coherent(amports_get_dma_device(),
6542 LMEM_BUF_SIZE
, pbi
->lmem_addr
,
6543 pbi
->lmem_phy_addr
);
6544 pbi
->lmem_addr
= NULL
;
6546 if (pbi
->prob_buffer_addr
) {
6547 if (pbi
->prob_buffer_phy_addr
)
6548 dma_free_coherent(amports_get_dma_device(),
6549 PROB_BUF_SIZE
, pbi
->prob_buffer_addr
,
6550 pbi
->prob_buffer_phy_addr
);
6552 pbi
->prob_buffer_addr
= NULL
;
6554 if (pbi
->count_buffer_addr
) {
6555 if (pbi
->count_buffer_phy_addr
)
6556 dma_free_coherent(amports_get_dma_device(),
6557 COUNT_BUF_SIZE
, pbi
->count_buffer_addr
,
6558 pbi
->count_buffer_phy_addr
);
6560 pbi
->count_buffer_addr
= NULL
;
6562 if (pbi
->mmu_enable
) {
6563 u32 mmu_map_size
= vvp9_frame_mmu_map_size(pbi
);
6564 if (pbi
->frame_mmu_map_addr
) {
6565 if (pbi
->frame_mmu_map_phy_addr
)
6566 dma_free_coherent(amports_get_dma_device(),
6568 pbi
->frame_mmu_map_addr
,
6569 pbi
->frame_mmu_map_phy_addr
);
6570 pbi
->frame_mmu_map_addr
= NULL
;
6573 #ifdef SUPPORT_FB_DECODING
6574 if (pbi
->stage_mmu_map_addr
) {
6575 if (pbi
->stage_mmu_map_phy_addr
)
6576 dma_free_coherent(amports_get_dma_device(),
6577 STAGE_MMU_MAP_SIZE
* STAGE_MAX_BUFFERS
,
6578 pbi
->stage_mmu_map_addr
,
6579 pbi
->stage_mmu_map_phy_addr
);
6580 pbi
->stage_mmu_map_addr
= NULL
;
6583 uninit_stage_buf(pbi
);
6586 #ifdef VP9_LPF_LVL_UPDATE
6594 static int vp9_local_init(struct VP9Decoder_s
*pbi
)
6597 /*int losless_comp_header_size, losless_comp_body_size;*/
6599 struct BuffInfo_s
*cur_buf_info
= NULL
;
6601 memset(&pbi
->param
, 0, sizeof(union param_u
));
6602 memset(&pbi
->common
, 0, sizeof(struct VP9_Common_s
));
6603 #ifdef MULTI_INSTANCE_SUPPORT
6604 cur_buf_info
= &pbi
->work_space_buf_store
;
6606 if (vdec_is_support_4k()) {
6607 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
6608 memcpy(cur_buf_info
, &amvvp9_workbuff_spec
[2], /* 8k */
6609 sizeof(struct BuffInfo_s
));
6611 memcpy(cur_buf_info
, &amvvp9_workbuff_spec
[1], /* 4k */
6612 sizeof(struct BuffInfo_s
));
6614 memcpy(cur_buf_info
, &amvvp9_workbuff_spec
[0],/* 1080p */
6615 sizeof(struct BuffInfo_s
));
6617 cur_buf_info
->start_adr
= pbi
->buf_start
;
6618 if (!pbi
->mmu_enable
)
6619 pbi
->mc_buf_spec
.buf_end
= pbi
->buf_start
+ pbi
->buf_size
;
6622 /*! MULTI_INSTANCE_SUPPORT*/
6623 if (vdec_is_support_4k()) {
6624 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
)
6625 cur_buf_info
= &amvvp9_workbuff_spec
[2];/* 8k work space */
6627 cur_buf_info
= &amvvp9_workbuff_spec
[1];/* 4k2k work space */
6629 cur_buf_info
= &amvvp9_workbuff_spec
[0];/* 1080p work space */
6633 init_buff_spec(pbi
, cur_buf_info
);
6634 vp9_bufmgr_init(pbi
, cur_buf_info
, NULL
);
6636 if (!vdec_is_support_4k()
6637 && (buf_alloc_width
> 1920 && buf_alloc_height
> 1088)) {
6638 buf_alloc_width
= 1920;
6639 buf_alloc_height
= 1088;
6640 if (pbi
->max_pic_w
> 1920 && pbi
->max_pic_h
> 1088) {
6641 pbi
->max_pic_w
= 1920;
6642 pbi
->max_pic_h
= 1088;
6644 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
6645 buf_alloc_width
= 8192;
6646 buf_alloc_height
= 4608;
6648 pbi
->init_pic_w
= pbi
->max_pic_w
? pbi
->max_pic_w
:
6649 (buf_alloc_width
? buf_alloc_width
:
6650 (pbi
->vvp9_amstream_dec_info
.width
?
6651 pbi
->vvp9_amstream_dec_info
.width
:
6652 pbi
->work_space_buf
->max_width
));
6653 pbi
->init_pic_h
= pbi
->max_pic_h
? pbi
->max_pic_h
:
6654 (buf_alloc_height
? buf_alloc_height
:
6655 (pbi
->vvp9_amstream_dec_info
.height
?
6656 pbi
->vvp9_amstream_dec_info
.height
:
6657 pbi
->work_space_buf
->max_height
));
6659 /* video is not support unaligned with 64 in tl1
6660 ** vdec canvas mode will be linear when dump yuv is set
6662 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) &&
6663 (pbi
->double_write_mode
!= 0) &&
6664 (((pbi
->max_pic_w
% 64) != 0) ||
6665 (pbi
->vvp9_amstream_dec_info
.width
% 64) != 0)) {
6666 if (hw_to_vdec(pbi
)->canvas_mode
!=
6667 CANVAS_BLKMODE_LINEAR
)
6668 pbi
->mem_map_mode
= 2;
6670 pbi
->mem_map_mode
= 0;
6671 pr_info("vdec blkmod linear, force mem_map_mode 0\n");
6675 #ifndef MV_USE_FIXED_BUF
6676 if (init_mv_buf_list(pbi
) < 0) {
6677 pr_err("%s: init_mv_buf_list fail\n", __func__
);
6681 if (pbi
->save_buffer_mode
)
6682 pbi
->used_buf_num
= MAX_BUF_NUM_SAVE_BUF
;
6684 if (pbi
->is_used_v4l
)
6685 pbi
->used_buf_num
= 5 + pbi
->dynamic_buf_num_margin
;
6687 pbi
->used_buf_num
= max_buf_num
;
6690 if (pbi
->used_buf_num
> MAX_BUF_NUM
)
6691 pbi
->used_buf_num
= MAX_BUF_NUM
;
6692 if (pbi
->used_buf_num
> FRAME_BUFFERS
)
6693 pbi
->used_buf_num
= FRAME_BUFFERS
;
6695 pbi
->pts_unstable
= ((unsigned long)(pbi
->vvp9_amstream_dec_info
.param
)
6698 if ((debug
& VP9_DEBUG_SEND_PARAM_WITH_REG
) == 0) {
6699 pbi
->rpm_addr
= dma_alloc_coherent(amports_get_dma_device(),
6701 &pbi
->rpm_phy_addr
, GFP_KERNEL
);
6702 if (pbi
->rpm_addr
== NULL
) {
6703 pr_err("%s: failed to alloc rpm buffer\n", __func__
);
6707 pbi
->rpm_ptr
= pbi
->rpm_addr
;
6710 pbi
->lmem_addr
= dma_alloc_coherent(amports_get_dma_device(),
6712 &pbi
->lmem_phy_addr
, GFP_KERNEL
);
6713 if (pbi
->lmem_addr
== NULL
) {
6714 pr_err("%s: failed to alloc lmem buffer\n", __func__
);
6717 pbi
->lmem_ptr
= pbi
->lmem_addr
;
6719 pbi
->prob_buffer_addr
= dma_alloc_coherent(amports_get_dma_device(),
6721 &pbi
->prob_buffer_phy_addr
, GFP_KERNEL
);
6722 if (pbi
->prob_buffer_addr
== NULL
) {
6723 pr_err("%s: failed to alloc prob_buffer\n", __func__
);
6726 memset(pbi
->prob_buffer_addr
, 0, PROB_BUF_SIZE
);
6727 pbi
->count_buffer_addr
= dma_alloc_coherent(amports_get_dma_device(),
6729 &pbi
->count_buffer_phy_addr
, GFP_KERNEL
);
6730 if (pbi
->count_buffer_addr
== NULL
) {
6731 pr_err("%s: failed to alloc count_buffer\n", __func__
);
6734 memset(pbi
->count_buffer_addr
, 0, COUNT_BUF_SIZE
);
6736 if (pbi
->mmu_enable
) {
6737 u32 mmu_map_size
= vvp9_frame_mmu_map_size(pbi
);
6738 pbi
->frame_mmu_map_addr
=
6739 dma_alloc_coherent(amports_get_dma_device(),
6741 &pbi
->frame_mmu_map_phy_addr
, GFP_KERNEL
);
6742 if (pbi
->frame_mmu_map_addr
== NULL
) {
6743 pr_err("%s: failed to alloc count_buffer\n", __func__
);
6746 memset(pbi
->frame_mmu_map_addr
, 0, COUNT_BUF_SIZE
);
6748 #ifdef SUPPORT_FB_DECODING
6749 if (pbi
->m_ins_flag
&& stage_buf_num
> 0) {
6750 pbi
->stage_mmu_map_addr
=
6751 dma_alloc_coherent(amports_get_dma_device(),
6752 STAGE_MMU_MAP_SIZE
* STAGE_MAX_BUFFERS
,
6753 &pbi
->stage_mmu_map_phy_addr
, GFP_KERNEL
);
6754 if (pbi
->stage_mmu_map_addr
== NULL
) {
6755 pr_err("%s: failed to alloc count_buffer\n", __func__
);
6758 memset(pbi
->stage_mmu_map_addr
,
6759 0, STAGE_MMU_MAP_SIZE
* STAGE_MAX_BUFFERS
);
6761 init_stage_buf(pbi
);
6769 /********************************************
6771 ********************************************/
6772 #define CMD_FINISHED 0
6773 #define CMD_ALLOC_VIEW 1
6774 #define CMD_FRAME_DISPLAY 3
6775 #define CMD_DEBUG 10
6778 #define DECODE_BUFFER_NUM_MAX 32
6779 #define DISPLAY_BUFFER_NUM 6
6781 #define video_domain_addr(adr) (adr&0x7fffffff)
6782 #define DECODER_WORK_SPACE_SIZE 0x800000
6784 #define spec2canvas(x) \
6785 (((x)->uv_canvas_index << 16) | \
6786 ((x)->uv_canvas_index << 8) | \
6787 ((x)->y_canvas_index << 0))
6790 static void set_canvas(struct VP9Decoder_s
*pbi
,
6791 struct PIC_BUFFER_CONFIG_s
*pic_config
)
6793 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
6794 int canvas_w
= ALIGN(pic_config
->y_crop_width
, 64)/4;
6795 int canvas_h
= ALIGN(pic_config
->y_crop_height
, 32)/4;
6796 int blkmode
= pbi
->mem_map_mode
;
6797 /*CANVAS_BLKMODE_64X32*/
6798 if (pic_config
->double_write_mode
) {
6799 canvas_w
= pic_config
->y_crop_width
/
6800 get_double_write_ratio(pbi
,
6801 pic_config
->double_write_mode
);
6802 canvas_h
= pic_config
->y_crop_height
/
6803 get_double_write_ratio(pbi
,
6804 pic_config
->double_write_mode
);
6806 if (pbi
->mem_map_mode
== 0)
6807 canvas_w
= ALIGN(canvas_w
, 32);
6809 canvas_w
= ALIGN(canvas_w
, 64);
6810 canvas_h
= ALIGN(canvas_h
, 32);
6812 if (vdec
->parallel_dec
== 1) {
6813 if (pic_config
->y_canvas_index
== -1)
6814 pic_config
->y_canvas_index
=
6815 vdec
->get_canvas_ex(CORE_MASK_HEVC
, vdec
->id
);
6816 if (pic_config
->uv_canvas_index
== -1)
6817 pic_config
->uv_canvas_index
=
6818 vdec
->get_canvas_ex(CORE_MASK_HEVC
, vdec
->id
);
6820 pic_config
->y_canvas_index
= 128 + pic_config
->index
* 2;
6821 pic_config
->uv_canvas_index
= 128 + pic_config
->index
* 2 + 1;
6824 canvas_config_ex(pic_config
->y_canvas_index
,
6825 pic_config
->dw_y_adr
, canvas_w
, canvas_h
,
6826 CANVAS_ADDR_NOWRAP
, blkmode
, pbi
->is_used_v4l
? 0 : 7);
6827 canvas_config_ex(pic_config
->uv_canvas_index
,
6828 pic_config
->dw_u_v_adr
, canvas_w
, canvas_h
,
6829 CANVAS_ADDR_NOWRAP
, blkmode
, pbi
->is_used_v4l
? 0 : 7);
6831 #ifdef MULTI_INSTANCE_SUPPORT
6832 pic_config
->canvas_config
[0].phy_addr
=
6833 pic_config
->dw_y_adr
;
6834 pic_config
->canvas_config
[0].width
=
6836 pic_config
->canvas_config
[0].height
=
6838 pic_config
->canvas_config
[0].block_mode
=
6840 pic_config
->canvas_config
[0].endian
= pbi
->is_used_v4l
? 0 : 7;
6842 pic_config
->canvas_config
[1].phy_addr
=
6843 pic_config
->dw_u_v_adr
;
6844 pic_config
->canvas_config
[1].width
=
6846 pic_config
->canvas_config
[1].height
=
6848 pic_config
->canvas_config
[1].block_mode
=
6850 pic_config
->canvas_config
[1].endian
= pbi
->is_used_v4l
? 0 : 7;
6856 static void set_frame_info(struct VP9Decoder_s
*pbi
, struct vframe_s
*vf
)
6859 vf
->duration
= pbi
->frame_dur
;
6860 vf
->duration_pulldown
= 0;
6862 vf
->prop
.master_display_colour
= pbi
->vf_dp
;
6863 vf
->signal_type
= pbi
->video_signal_type
;
6864 if (vf
->compWidth
&& vf
->compHeight
)
6865 pbi
->frame_ar
= vf
->compHeight
* 0x100 / vf
->compWidth
;
6866 ar
= min_t(u32
, pbi
->frame_ar
, DISP_RATIO_ASPECT_RATIO_MAX
);
6867 vf
->ratio_control
= (ar
<< DISP_RATIO_ASPECT_RATIO_BIT
);
6869 if (pbi
->is_used_v4l
&& pbi
->vf_dp
.present_flag
) {
6870 struct aml_vdec_hdr_infos hdr
;
6871 struct aml_vcodec_ctx
*ctx
=
6872 (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
6874 memset(&hdr
, 0, sizeof(hdr
));
6875 hdr
.signal_type
= vf
->signal_type
;
6876 hdr
.color_parms
= pbi
->vf_dp
;
6877 vdec_v4l_set_hdr_infos(ctx
, &hdr
);
6880 vf
->sidebind_type
= pbi
->sidebind_type
;
6881 vf
->sidebind_channel_id
= pbi
->sidebind_channel_id
;
6884 static int vvp9_vf_states(struct vframe_states
*states
, void *op_arg
)
6886 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)op_arg
;
6888 states
->vf_pool_size
= VF_POOL_SIZE
;
6889 states
->buf_free_num
= kfifo_len(&pbi
->newframe_q
);
6890 states
->buf_avail_num
= kfifo_len(&pbi
->display_q
);
6893 states
->buf_avail_num
= 0;
6897 static struct vframe_s
*vvp9_vf_peek(void *op_arg
)
6899 struct vframe_s
*vf
[2] = {0, 0};
6900 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)op_arg
;
6905 if (kfifo_out_peek(&pbi
->display_q
, (void *)&vf
, 2)) {
6907 vf
[0]->next_vf_pts_valid
= true;
6908 vf
[0]->next_vf_pts
= vf
[1]->pts
;
6910 vf
[0]->next_vf_pts_valid
= false;
6917 static struct vframe_s
*vvp9_vf_get(void *op_arg
)
6919 struct vframe_s
*vf
;
6920 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)op_arg
;
6927 if (kfifo_get(&pbi
->display_q
, &vf
)) {
6928 struct vframe_s
*next_vf
;
6929 uint8_t index
= vf
->index
& 0xff;
6930 if (index
< pbi
->used_buf_num
||
6931 (vf
->type
& VIDTYPE_V4L_EOS
)) {
6932 vf
->index_disp
= pbi
->vf_get_count
;
6933 pbi
->vf_get_count
++;
6934 if (debug
& VP9_DEBUG_BUFMGR
)
6935 pr_info("%s type 0x%x w/h %d/%d, pts %d, %lld\n",
6937 vf
->width
, vf
->height
,
6941 if (kfifo_peek(&pbi
->display_q
, &next_vf
)) {
6942 vf
->next_vf_pts_valid
= true;
6943 vf
->next_vf_pts
= next_vf
->pts
;
6945 vf
->next_vf_pts_valid
= false;
6953 static void vvp9_vf_put(struct vframe_s
*vf
, void *op_arg
)
6955 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)op_arg
;
6956 uint8_t index
= vf
->index
& 0xff;
6958 if (vf
== (&pbi
->vframe_dummy
))
6961 if (pbi
->enable_fence
&& vf
->fence
) {
6962 vdec_fence_put(vf
->fence
);
6966 kfifo_put(&pbi
->newframe_q
, (const struct vframe_s
*)vf
);
6967 pbi
->vf_put_count
++;
6968 if (index
< pbi
->used_buf_num
) {
6969 struct VP9_Common_s
*cm
= &pbi
->common
;
6970 struct BufferPool_s
*pool
= cm
->buffer_pool
;
6971 unsigned long flags
;
6973 lock_buffer_pool(pool
, flags
);
6974 if (pool
->frame_bufs
[index
].buf
.vf_ref
> 0)
6975 pool
->frame_bufs
[index
].buf
.vf_ref
--;
6977 if (pbi
->is_used_v4l
)
6978 pool
->frame_bufs
[index
].buf
.vframe_bound
= true;
6981 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG
,
6983 pbi
->last_put_idx
= index
;
6984 pbi
->new_frame_displayed
++;
6985 unlock_buffer_pool(pool
, flags
);
6986 #ifdef SUPPORT_FB_DECODING
6987 if (pbi
->used_stage_buf_num
> 0 &&
6988 pbi
->back_not_run_ready
)
6989 trigger_schedule(pbi
);
6995 static int vvp9_event_cb(int type
, void *data
, void *private_data
)
6997 if (type
& VFRAME_EVENT_RECEIVER_RESET
) {
6999 unsigned long flags
;
7002 #ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
7003 vf_light_unreg_provider(&vvp9_vf_prov
);
7005 spin_lock_irqsave(&pbi
->lock
, flags
);
7008 spin_unlock_irqrestore(&pbi
->lock
, flags
);
7009 #ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
7010 vf_reg_provider(&vvp9_vf_prov
);
7019 void inc_vf_ref(struct VP9Decoder_s
*pbi
, int index
)
7021 struct VP9_Common_s
*cm
= &pbi
->common
;
7023 cm
->buffer_pool
->frame_bufs
[index
].buf
.vf_ref
++;
7025 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
7026 pr_info("%s index = %d new vf_ref = %d\r\n",
7028 cm
->buffer_pool
->frame_bufs
[index
].buf
.vf_ref
);
7031 static int frame_duration_adapt(struct VP9Decoder_s
*pbi
, struct vframe_s
*vf
, u32 valid
)
7033 u32 old_duration
, pts_duration
= 0;
7036 if (pbi
->get_frame_dur
== true)
7039 pbi
->frame_cnt_window
++;
7040 if (!(pbi
->vp9_first_pts_ready
== 1)) {
7043 pbi
->frame_cnt_window
= 0;
7044 pbi
->duration_from_pts_done
= 0;
7045 pbi
->vp9_first_pts_ready
= 1;
7050 if (pts
< pbi
->pts1
) {
7051 if (pbi
->frame_cnt_window
> FRAME_CNT_WINDOW_SIZE
) {
7053 pbi
->frame_cnt_window
= 0;
7057 if (valid
&& (pbi
->frame_cnt_window
> FRAME_CNT_WINDOW_SIZE
) &&
7058 (pts
> pbi
->pts1
) && (pbi
->duration_from_pts_done
== 0)) {
7059 old_duration
= pbi
->frame_dur
;
7061 pts_duration
= (((pbi
->pts2
- pbi
->pts1
) * 16) /
7062 (pbi
->frame_cnt_window
* 15));
7064 if (close_to(pts_duration
, old_duration
, 2000)) {
7065 pbi
->frame_dur
= pts_duration
;
7066 if ((debug
& VP9_DEBUG_OUT_PTS
) != 0)
7067 pr_info("use calc duration %d\n", pts_duration
);
7070 if (pbi
->duration_from_pts_done
== 0) {
7071 if (close_to(pts_duration
, old_duration
, RATE_CORRECTION_THRESHOLD
)) {
7072 pbi
->duration_from_pts_done
= 1;
7074 if (!close_to(pts_duration
,
7075 old_duration
, 1000) &&
7076 !close_to(pts_duration
,
7077 pbi
->frame_dur
, 1000) &&
7078 close_to(pts_duration
,
7079 pbi
->last_duration
, 200)) {
7083 pbi
->frame_dur
= pts_duration
;
7085 pbi
->pts1
= pbi
->pts2
;
7086 pbi
->frame_cnt_window
= 0;
7087 pbi
->duration_from_pts_done
= 0;
7090 pbi
->last_duration
= pts_duration
;
7096 static void update_vf_memhandle(struct VP9Decoder_s
*pbi
,
7097 struct vframe_s
*vf
, struct PIC_BUFFER_CONFIG_s
*pic
)
7099 if (pic
->index
< 0) {
7100 vf
->mem_handle
= NULL
;
7101 vf
->mem_head_handle
= NULL
;
7102 vf
->mem_dw_handle
= NULL
;
7103 } else if (vf
->type
& VIDTYPE_SCATTER
) {
7105 decoder_mmu_box_get_mem_handle(
7106 pbi
->mmu_box
, pic
->index
);
7107 vf
->mem_head_handle
=
7108 decoder_bmmu_box_get_mem_handle(
7110 HEADER_BUFFER_IDX(pic
->BUF_index
));
7111 if (pbi
->double_write_mode
== 3)
7113 decoder_bmmu_box_get_mem_handle(
7115 VF_BUFFER_IDX(pic
->BUF_index
));
7117 vf
->mem_dw_handle
= NULL
;
7120 decoder_bmmu_box_get_mem_handle(
7121 pbi
->bmmu_box
, VF_BUFFER_IDX(pic
->BUF_index
));
7122 vf
->mem_head_handle
= NULL
;
7123 vf
->mem_dw_handle
= NULL
;
7124 /*vf->mem_head_handle =
7125 *decoder_bmmu_box_get_mem_handle(
7126 *hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));
7131 static inline void pbi_update_gvs(struct VP9Decoder_s
*pbi
)
7133 if (pbi
->gvs
->frame_height
!= frame_height
) {
7134 pbi
->gvs
->frame_width
= frame_width
;
7135 pbi
->gvs
->frame_height
= frame_height
;
7137 if (pbi
->gvs
->frame_dur
!= pbi
->frame_dur
) {
7138 pbi
->gvs
->frame_dur
= pbi
->frame_dur
;
7139 if (pbi
->frame_dur
!= 0)
7140 pbi
->gvs
->frame_rate
= 96000 / pbi
->frame_dur
;
7142 pbi
->gvs
->frame_rate
= -1;
7144 pbi
->gvs
->status
= pbi
->stat
| pbi
->fatal_error
;
7147 static int prepare_display_buf(struct VP9Decoder_s
*pbi
,
7148 struct PIC_BUFFER_CONFIG_s
*pic_config
)
7150 struct vframe_s
*vf
= NULL
;
7151 struct vdec_s
*pvdec
= hw_to_vdec(pbi
);
7152 int stream_offset
= pic_config
->stream_offset
;
7153 unsigned short slice_type
= pic_config
->slice_type
;
7154 struct aml_vcodec_ctx
* v4l2_ctx
= pbi
->v4l2_ctx
;
7155 ulong nv_order
= VIDTYPE_VIU_NV21
;
7156 u32 pts_valid
= 0, pts_us64_valid
= 0;
7163 if (debug
& VP9_DEBUG_BUFMGR
)
7164 pr_info("%s index = %d\r\n", __func__
, pic_config
->index
);
7165 if (kfifo_get(&pbi
->newframe_q
, &vf
) == 0) {
7166 pr_info("fatal error, no available buffer slot.");
7171 if (pbi
->is_used_v4l
) {
7172 if ((v4l2_ctx
->cap_pix_fmt
== V4L2_PIX_FMT_NV12
) ||
7173 (v4l2_ctx
->cap_pix_fmt
== V4L2_PIX_FMT_NV12M
))
7174 nv_order
= VIDTYPE_VIU_NV12
;
7177 if (pic_config
->double_write_mode
)
7178 set_canvas(pbi
, pic_config
);
7180 display_frame_count
[pbi
->index
]++;
7182 if (!force_pts_unstable
) {
7183 if ((pic_config
->pts
== 0) || (pic_config
->pts
<= pbi
->last_pts
)) {
7184 for (i
= (FRAME_BUFFERS
- 1); i
> 0; i
--) {
7185 if ((pbi
->last_pts
== pbi
->frame_mode_pts_save
[i
]) ||
7186 (pbi
->last_pts_us64
== pbi
->frame_mode_pts64_save
[i
])) {
7187 pic_config
->pts
= pbi
->frame_mode_pts_save
[i
- 1];
7188 pic_config
->pts64
= pbi
->frame_mode_pts64_save
[i
- 1];
7192 if ((i
== 0) || (pic_config
->pts
<= pbi
->last_pts
)) {
7193 vp9_print(pbi
, VP9_DEBUG_OUT_PTS
,
7194 "no found pts %d, set 0. %d, %d\n",
7195 i
, pic_config
->pts
, pbi
->last_pts
);
7196 pic_config
->pts
= 0;
7197 pic_config
->pts64
= 0;
7202 if (pbi
->is_used_v4l
) {
7204 = pbi
->m_BUF
[pic_config
->BUF_index
].v4l_ref_buf_addr
;
7205 if (pbi
->mmu_enable
) {
7206 vf
->mm_box
.bmmu_box
= pbi
->bmmu_box
;
7207 vf
->mm_box
.bmmu_idx
= HEADER_BUFFER_IDX(pic_config
->BUF_index
);
7208 vf
->mm_box
.mmu_box
= pbi
->mmu_box
;
7209 vf
->mm_box
.mmu_idx
= pic_config
->index
;
7213 if (pbi
->enable_fence
) {
7214 /* fill fence information. */
7215 if (pbi
->fence_usage
== FENCE_USE_FOR_DRIVER
)
7216 vf
->fence
= pic_config
->fence
;
7219 #ifdef MULTI_INSTANCE_SUPPORT
7220 if (vdec_frame_based(pvdec
)) {
7221 vf
->pts
= pic_config
->pts
;
7222 vf
->pts_us64
= pic_config
->pts64
;
7223 vf
->timestamp
= pic_config
->timestamp
;
7224 if (vf
->pts
!= 0 || vf
->pts_us64
!= 0) {
7233 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
7234 * stream_offset, &vf->pts, 0) != 0) {
7236 if (pts_lookup_offset_us64
7237 (PTS_TYPE_VIDEO
, stream_offset
, &vf
->pts
,
7239 &vf
->pts_us64
) != 0) {
7255 fill_frame_info(pbi
, pic_config
, frame_size
, vf
->pts
);
7258 pts_us64_save
= vf
->pts_us64
;
7259 if (pbi
->pts_unstable
) {
7260 frame_duration_adapt(pbi
, vf
, pts_valid
);
7261 if (pbi
->duration_from_pts_done
) {
7262 pbi
->pts_mode
= PTS_NONE_REF_USE_DURATION
;
7264 if (pts_valid
|| pts_us64_valid
)
7265 pbi
->pts_mode
= PTS_NORMAL
;
7269 if ((pbi
->pts_mode
== PTS_NORMAL
) && (vf
->pts
!= 0)
7270 && pbi
->get_frame_dur
) {
7271 int pts_diff
= (int)vf
->pts
- pbi
->last_lookup_pts
;
7274 pbi
->pts_mode_switching_count
++;
7275 pbi
->pts_mode_recovery_count
= 0;
7277 if (pbi
->pts_mode_switching_count
>=
7278 PTS_MODE_SWITCHING_THRESHOLD
) {
7280 PTS_NONE_REF_USE_DURATION
;
7282 ("HEVC: switch to n_d mode.\n");
7286 int p
= PTS_MODE_SWITCHING_RECOVERY_THREASHOLD
;
7288 pbi
->pts_mode_recovery_count
++;
7289 if (pbi
->pts_mode_recovery_count
> p
) {
7290 pbi
->pts_mode_switching_count
= 0;
7291 pbi
->pts_mode_recovery_count
= 0;
7297 pbi
->last_lookup_pts
= vf
->pts
;
7299 if ((pbi
->pts_mode
== PTS_NONE_REF_USE_DURATION
)
7300 && (slice_type
!= KEY_FRAME
))
7301 vf
->pts
= pbi
->last_pts
+ DUR2PTS(pbi
->frame_dur
);
7302 pbi
->last_pts
= vf
->pts
;
7304 if (vf
->pts_us64
!= 0)
7305 pbi
->last_lookup_pts_us64
= vf
->pts_us64
;
7307 if ((pbi
->pts_mode
== PTS_NONE_REF_USE_DURATION
)
7308 && (slice_type
!= KEY_FRAME
)) {
7310 pbi
->last_pts_us64
+
7311 (DUR2PTS(pbi
->frame_dur
) * 100 / 9);
7313 pbi
->last_pts_us64
= vf
->pts_us64
;
7314 if ((debug
& VP9_DEBUG_OUT_PTS
) != 0) {
7316 ("VP9 dec out pts: pts_mode=%d,dur=%d,pts(%d,%lld)(%d,%lld)\n",
7317 pbi
->pts_mode
, pbi
->frame_dur
, vf
->pts
,
7318 vf
->pts_us64
, pts_save
, pts_us64_save
);
7321 if (pbi
->pts_mode
== PTS_NONE_REF_USE_DURATION
) {
7322 vf
->disp_pts
= vf
->pts
;
7323 vf
->disp_pts_us64
= vf
->pts_us64
;
7325 vf
->pts_us64
= pts_us64_save
;
7328 vf
->disp_pts_us64
= 0;
7331 vf
->index
= 0xff00 | pic_config
->index
;
7333 if (pic_config
->double_write_mode
& 0x10) {
7334 /* double write only */
7335 vf
->compBodyAddr
= 0;
7336 vf
->compHeadAddr
= 0;
7338 if (pbi
->mmu_enable
) {
7339 vf
->compBodyAddr
= 0;
7340 vf
->compHeadAddr
= pic_config
->header_adr
;
7342 /*vf->compBodyAddr = pic_config->mc_y_adr;
7343 *vf->compHeadAddr = pic_config->mc_y_adr +
7344 *pic_config->comp_body_size; */
7347 vf
->canvas0Addr
= vf
->canvas1Addr
= 0;
7349 if (pic_config
->double_write_mode
) {
7350 vf
->type
= VIDTYPE_PROGRESSIVE
|
7352 vf
->type
|= nv_order
;
7353 if ((pic_config
->double_write_mode
== 3) &&
7354 (!IS_8K_SIZE(pic_config
->y_crop_width
,
7355 pic_config
->y_crop_height
))) {
7356 vf
->type
|= VIDTYPE_COMPRESS
;
7357 if (pbi
->mmu_enable
)
7358 vf
->type
|= VIDTYPE_SCATTER
;
7360 #ifdef MULTI_INSTANCE_SUPPORT
7361 if (pbi
->m_ins_flag
) {
7362 vf
->canvas0Addr
= vf
->canvas1Addr
= -1;
7364 vf
->canvas0_config
[0] =
7365 pic_config
->canvas_config
[0];
7366 vf
->canvas0_config
[1] =
7367 pic_config
->canvas_config
[1];
7368 vf
->canvas1_config
[0] =
7369 pic_config
->canvas_config
[0];
7370 vf
->canvas1_config
[1] =
7371 pic_config
->canvas_config
[1];
7375 vf
->canvas0Addr
= vf
->canvas1Addr
=
7376 spec2canvas(pic_config
);
7378 vf
->canvas0Addr
= vf
->canvas1Addr
= 0;
7379 vf
->type
= VIDTYPE_COMPRESS
| VIDTYPE_VIU_FIELD
;
7380 if (pbi
->mmu_enable
)
7381 vf
->type
|= VIDTYPE_SCATTER
;
7384 switch (pic_config
->bit_depth
) {
7386 vf
->bitdepth
= BITDEPTH_Y8
|
7387 BITDEPTH_U8
| BITDEPTH_V8
;
7391 vf
->bitdepth
= BITDEPTH_Y10
|
7392 BITDEPTH_U10
| BITDEPTH_V10
;
7395 vf
->bitdepth
= BITDEPTH_Y10
|
7396 BITDEPTH_U10
| BITDEPTH_V10
;
7399 if ((vf
->type
& VIDTYPE_COMPRESS
) == 0)
7401 BITDEPTH_Y8
| BITDEPTH_U8
| BITDEPTH_V8
;
7402 if (pic_config
->bit_depth
== VPX_BITS_8
)
7403 vf
->bitdepth
|= BITDEPTH_SAVING_MODE
;
7405 /* if((vf->width!=pic_config->width)|
7406 * (vf->height!=pic_config->height))
7408 /* pr_info("aaa: %d/%d, %d/%d\n",
7409 vf->width,vf->height, pic_config->width,
7410 pic_config->height); */
7411 vf
->width
= pic_config
->y_crop_width
/
7412 get_double_write_ratio(pbi
,
7413 pic_config
->double_write_mode
);
7414 vf
->height
= pic_config
->y_crop_height
/
7415 get_double_write_ratio(pbi
,
7416 pic_config
->double_write_mode
);
7417 if (force_w_h
!= 0) {
7418 vf
->width
= (force_w_h
>> 16) & 0xffff;
7419 vf
->height
= force_w_h
& 0xffff;
7421 vf
->compWidth
= pic_config
->y_crop_width
;
7422 vf
->compHeight
= pic_config
->y_crop_height
;
7423 set_frame_info(pbi
, vf
);
7424 if (force_fps
& 0x100) {
7425 u32 rate
= force_fps
& 0xff;
7428 vf
->duration
= 96000/rate
;
7432 update_vf_memhandle(pbi
, vf
, pic_config
);
7434 if (!(pic_config
->y_crop_width
== 196
7435 && pic_config
->y_crop_height
== 196
7436 && (debug
& VP9_DEBUG_NO_TRIGGER_FRAME
) == 0
7437 && (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX
))) {
7438 struct vdec_info tmp4x
;
7440 inc_vf_ref(pbi
, pic_config
->index
);
7441 decoder_do_frame_check(pvdec
, vf
);
7442 kfifo_put(&pbi
->display_q
, (const struct vframe_s
*)vf
);
7443 ATRACE_COUNTER(MODULE_NAME
, vf
->pts
);
7444 pbi
->vf_pre_count
++;
7445 pbi_update_gvs(pbi
);
7447 vdec_count_info(pbi
->gvs
, 0, stream_offset
);
7448 memcpy(&tmp4x
, pbi
->gvs
, sizeof(struct vdec_info
));
7449 tmp4x
.bit_depth_luma
= pbi
->vp9_param
.p
.bit_depth
;
7450 tmp4x
.bit_depth_chroma
= pbi
->vp9_param
.p
.bit_depth
;
7451 tmp4x
.double_write_mode
= get_double_write_mode(pbi
);
7452 vdec_fill_vdec_frame(pvdec
, &pbi
->vframe_qos
, &tmp4x
,
7453 vf
, pic_config
->hw_decode_time
);
7454 pvdec
->vdec_fps_detec(pvdec
->id
);
7455 if (without_display_mode
== 0) {
7456 vf_notify_receiver(pbi
->provider_name
,
7457 VFRAME_EVENT_PROVIDER_VFRAME_READY
, NULL
);
7459 vvp9_vf_put(vvp9_vf_get(pbi
), pbi
);
7461 pbi
->stat
|= VP9_TRIGGER_FRAME_DONE
;
7462 hevc_source_changed(VFORMAT_VP9
, 196, 196, 30);
7463 pr_debug("[%s %d] drop trigger frame width %d height %d state 0x%x\n",
7464 __func__
, __LINE__
, vf
->width
,
7465 vf
->height
, pbi
->stat
);
7472 static int notify_v4l_eos(struct vdec_s
*vdec
)
7474 struct VP9Decoder_s
*hw
= (struct VP9Decoder_s
*)vdec
->private;
7475 struct aml_vcodec_ctx
*ctx
= (struct aml_vcodec_ctx
*)(hw
->v4l2_ctx
);
7476 struct vframe_s
*vf
= &hw
->vframe_dummy
;
7477 struct vdec_v4l2_buffer
*fb
= NULL
;
7478 int index
= INVALID_IDX
;
7482 if (hw
->is_used_v4l
) {
7483 expires
= jiffies
+ msecs_to_jiffies(2000);
7484 while (INVALID_IDX
== (index
= v4l_get_free_fb(hw
))) {
7485 if (time_after(jiffies
, expires
) ||
7486 v4l2_m2m_num_dst_bufs_ready(ctx
->m2m_ctx
))
7490 if (index
== INVALID_IDX
) {
7491 if (vdec_v4l_get_buffer(hw
->v4l2_ctx
, &fb
) < 0) {
7492 pr_err("[%d] EOS get free buff fail.\n", ctx
->id
);
7498 vf
->type
|= VIDTYPE_V4L_EOS
;
7499 vf
->timestamp
= ULONG_MAX
;
7500 vf
->flag
= VFRAME_FLAG_EMPTY_FRAME_V4L
;
7501 vf
->v4l_mem_handle
= (index
== INVALID_IDX
) ? (ulong
)fb
:
7502 hw
->m_BUF
[index
].v4l_ref_buf_addr
;
7504 kfifo_put(&hw
->display_q
, (const struct vframe_s
*)vf
);
7505 vf_notify_receiver(vdec
->vf_provider_name
,
7506 VFRAME_EVENT_PROVIDER_VFRAME_READY
, NULL
);
7508 pr_info("[%d] VP9 EOS notify.\n", (hw
->is_used_v4l
)?ctx
->id
:vdec
->id
);
7514 static void get_rpm_param(union param_u
*params
)
7517 unsigned int data32
;
7519 if (debug
& VP9_DEBUG_BUFMGR
)
7520 pr_info("enter %s\r\n", __func__
);
7521 for (i
= 0; i
< 128; i
++) {
7523 data32
= READ_VREG(RPM_CMD_REG
);
7524 /*pr_info("%x\n", data32);*/
7525 } while ((data32
& 0x10000) == 0);
7526 params
->l
.data
[i
] = data32
&0xffff;
7527 /*pr_info("%x\n", data32);*/
7528 WRITE_VREG(RPM_CMD_REG
, 0);
7530 if (debug
& VP9_DEBUG_BUFMGR
)
7531 pr_info("leave %s\r\n", __func__
);
7533 static void debug_buffer_mgr_more(struct VP9Decoder_s
*pbi
)
7537 if (!(debug
& VP9_DEBUG_BUFMGR_MORE
))
7539 pr_info("vp9_param: (%d)\n", pbi
->slice_idx
);
7540 for (i
= 0; i
< (RPM_END
-RPM_BEGIN
); i
++) {
7541 pr_info("%04x ", pbi
->vp9_param
.l
.data
[i
]);
7542 if (((i
+ 1) & 0xf) == 0)
7545 pr_info("=============param==========\r\n");
7546 pr_info("profile %x\r\n", pbi
->vp9_param
.p
.profile
);
7547 pr_info("show_existing_frame %x\r\n",
7548 pbi
->vp9_param
.p
.show_existing_frame
);
7549 pr_info("frame_to_show_idx %x\r\n",
7550 pbi
->vp9_param
.p
.frame_to_show_idx
);
7551 pr_info("frame_type %x\r\n", pbi
->vp9_param
.p
.frame_type
);
7552 pr_info("show_frame %x\r\n", pbi
->vp9_param
.p
.show_frame
);
7553 pr_info("e.r.r.o.r_resilient_mode %x\r\n",
7554 pbi
->vp9_param
.p
.error_resilient_mode
);
7555 pr_info("intra_only %x\r\n", pbi
->vp9_param
.p
.intra_only
);
7556 pr_info("display_size_present %x\r\n",
7557 pbi
->vp9_param
.p
.display_size_present
);
7558 pr_info("reset_frame_context %x\r\n",
7559 pbi
->vp9_param
.p
.reset_frame_context
);
7560 pr_info("refresh_frame_flags %x\r\n",
7561 pbi
->vp9_param
.p
.refresh_frame_flags
);
7562 pr_info("bit_depth %x\r\n", pbi
->vp9_param
.p
.bit_depth
);
7563 pr_info("width %x\r\n", pbi
->vp9_param
.p
.width
);
7564 pr_info("height %x\r\n", pbi
->vp9_param
.p
.height
);
7565 pr_info("display_width %x\r\n", pbi
->vp9_param
.p
.display_width
);
7566 pr_info("display_height %x\r\n", pbi
->vp9_param
.p
.display_height
);
7567 pr_info("ref_info %x\r\n", pbi
->vp9_param
.p
.ref_info
);
7568 pr_info("same_frame_size %x\r\n", pbi
->vp9_param
.p
.same_frame_size
);
7569 if (!(debug
& VP9_DEBUG_DBG_LF_PRINT
))
7571 pr_info("mode_ref_delta_enabled: 0x%x\r\n",
7572 pbi
->vp9_param
.p
.mode_ref_delta_enabled
);
7573 pr_info("sharpness_level: 0x%x\r\n",
7574 pbi
->vp9_param
.p
.sharpness_level
);
7575 pr_info("ref_deltas: 0x%x, 0x%x, 0x%x, 0x%x\r\n",
7576 pbi
->vp9_param
.p
.ref_deltas
[0], pbi
->vp9_param
.p
.ref_deltas
[1],
7577 pbi
->vp9_param
.p
.ref_deltas
[2], pbi
->vp9_param
.p
.ref_deltas
[3]);
7578 pr_info("mode_deltas: 0x%x, 0x%x\r\n", pbi
->vp9_param
.p
.mode_deltas
[0],
7579 pbi
->vp9_param
.p
.mode_deltas
[1]);
7580 pr_info("filter_level: 0x%x\r\n", pbi
->vp9_param
.p
.filter_level
);
7581 pr_info("seg_enabled: 0x%x\r\n", pbi
->vp9_param
.p
.seg_enabled
);
7582 pr_info("seg_abs_delta: 0x%x\r\n", pbi
->vp9_param
.p
.seg_abs_delta
);
7583 pr_info("seg_lf_feature_enabled: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\r\n",
7584 (pbi
->vp9_param
.p
.seg_lf_info
[0]>>15 & 1),
7585 (pbi
->vp9_param
.p
.seg_lf_info
[1]>>15 & 1),
7586 (pbi
->vp9_param
.p
.seg_lf_info
[2]>>15 & 1),
7587 (pbi
->vp9_param
.p
.seg_lf_info
[3]>>15 & 1),
7588 (pbi
->vp9_param
.p
.seg_lf_info
[4]>>15 & 1),
7589 (pbi
->vp9_param
.p
.seg_lf_info
[5]>>15 & 1),
7590 (pbi
->vp9_param
.p
.seg_lf_info
[6]>>15 & 1),
7591 (pbi
->vp9_param
.p
.seg_lf_info
[7]>>15 & 1));
7592 pr_info("seg_lf_feature_data: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\r\n",
7593 (pbi
->vp9_param
.p
.seg_lf_info
[0] & 0x13f),
7594 (pbi
->vp9_param
.p
.seg_lf_info
[1] & 0x13f),
7595 (pbi
->vp9_param
.p
.seg_lf_info
[2] & 0x13f),
7596 (pbi
->vp9_param
.p
.seg_lf_info
[3] & 0x13f),
7597 (pbi
->vp9_param
.p
.seg_lf_info
[4] & 0x13f),
7598 (pbi
->vp9_param
.p
.seg_lf_info
[5] & 0x13f),
7599 (pbi
->vp9_param
.p
.seg_lf_info
[6] & 0x13f),
7600 (pbi
->vp9_param
.p
.seg_lf_info
[7] & 0x13f));
7605 static void vp9_recycle_mmu_buf_tail(struct VP9Decoder_s
*pbi
)
7607 struct VP9_Common_s
*const cm
= &pbi
->common
;
7608 if (pbi
->double_write_mode
& 0x10)
7610 if (cm
->cur_fb_idx_mmu
!= INVALID_IDX
) {
7611 if (pbi
->used_4k_num
== -1) {
7613 (READ_VREG(HEVC_SAO_MMU_STATUS
) >> 16);
7614 if (pbi
->m_ins_flag
)
7615 hevc_mmu_dma_check(hw_to_vdec(pbi
));
7617 decoder_mmu_box_free_idx_tail(pbi
->mmu_box
,
7618 cm
->cur_fb_idx_mmu
, pbi
->used_4k_num
);
7619 cm
->cur_fb_idx_mmu
= INVALID_IDX
;
7620 pbi
->used_4k_num
= -1;
7624 #ifdef MULTI_INSTANCE_SUPPORT
7625 static void vp9_recycle_mmu_buf(struct VP9Decoder_s
*pbi
)
7627 struct VP9_Common_s
*const cm
= &pbi
->common
;
7628 if (pbi
->double_write_mode
& 0x10)
7630 if (cm
->cur_fb_idx_mmu
!= INVALID_IDX
) {
7631 decoder_mmu_box_free_idx(pbi
->mmu_box
,
7632 cm
->cur_fb_idx_mmu
);
7634 cm
->cur_fb_idx_mmu
= INVALID_IDX
;
7635 pbi
->used_4k_num
= -1;
7639 void vp9_recycle_mmu_work(struct work_struct
*work
)
7641 struct VP9Decoder_s
*pbi
= container_of(work
,
7642 struct VP9Decoder_s
, recycle_mmu_work
);
7644 vp9_recycle_mmu_buf(pbi
);
7649 static void dec_again_process(struct VP9Decoder_s
*pbi
)
7652 pbi
->dec_result
= DEC_RESULT_AGAIN
;
7653 if (pbi
->process_state
==
7654 PROC_STATE_DECODESLICE
) {
7655 pbi
->process_state
=
7656 PROC_STATE_SENDAGAIN
;
7657 if (pbi
->mmu_enable
) {
7659 * Because vp9_recycle_mmu_buf has sleep function,we can't
7660 * call it directly. Use a recycle_mmu_work to substitude it.
7662 vdec_schedule_work(&pbi
->recycle_mmu_work
);
7665 reset_process_time(pbi
);
7666 vdec_schedule_work(&pbi
->work
);
7669 int continue_decoding(struct VP9Decoder_s
*pbi
)
7673 struct VP9_Common_s
*const cm
= &pbi
->common
;
7674 struct aml_vcodec_ctx
*ctx
= (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
7675 debug_buffer_mgr_more(pbi
);
7677 if (pbi
->is_used_v4l
&& ctx
->param_sets_from_ucode
)
7678 pbi
->res_ch_flag
= 0;
7679 bit_depth_luma
= pbi
->vp9_param
.p
.bit_depth
;
7680 bit_depth_chroma
= pbi
->vp9_param
.p
.bit_depth
;
7682 if ((pbi
->vp9_param
.p
.bit_depth
>= VPX_BITS_10
) &&
7683 (get_double_write_mode(pbi
) == 0x10)) {
7684 pbi
->fatal_error
|= DECODER_FATAL_ERROR_SIZE_OVERFLOW
;
7685 pr_err("fatal err, bit_depth %d, unsupport dw 0x10\n",
7686 pbi
->vp9_param
.p
.bit_depth
);
7690 if (pbi
->process_state
!= PROC_STATE_SENDAGAIN
) {
7691 ret
= vp9_bufmgr_process(pbi
, &pbi
->vp9_param
);
7692 if (!pbi
->m_ins_flag
)
7695 union param_u
*params
= &pbi
->vp9_param
;
7696 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
7697 ret
= vp9_alloc_mmu(pbi
,
7701 params
->p
.bit_depth
,
7702 pbi
->frame_mmu_map_addr
);
7704 cm
->cur_fb_idx_mmu
= cm
->new_fb_idx
;
7706 pr_err("can't alloc need mmu1,idx %d ret =%d\n",
7712 WRITE_VREG(HEVC_PARSER_PICTURE_SIZE
,
7713 (params
->p
.height
<< 16) | params
->p
.width
);
7716 pr_info("vp9_bufmgr_process=> %d, VP9_10B_DISCARD_NAL\r\n",
7718 WRITE_VREG(HEVC_DEC_STATUS_REG
, VP9_10B_DISCARD_NAL
);
7720 if (pbi
->mmu_enable
)
7721 vp9_recycle_mmu_buf(pbi
);
7722 #ifdef MULTI_INSTANCE_SUPPORT
7723 if (pbi
->m_ins_flag
) {
7724 pbi
->dec_result
= DEC_RESULT_DONE
;
7725 #ifdef SUPPORT_FB_DECODING
7726 if (pbi
->used_stage_buf_num
== 0)
7729 vdec_schedule_work(&pbi
->work
);
7733 } else if (ret
== 0) {
7734 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
7735 = &cm
->cur_frame
->buf
;
7736 cur_pic_config
->decode_idx
= pbi
->frame_count
;
7738 if (pbi
->process_state
!= PROC_STATE_SENDAGAIN
) {
7739 if (!pbi
->m_ins_flag
) {
7741 decode_frame_count
[pbi
->index
]
7744 #ifdef MULTI_INSTANCE_SUPPORT
7746 cur_pic_config
->pts
= pbi
->chunk
->pts
;
7747 cur_pic_config
->pts64
= pbi
->chunk
->pts64
;
7748 cur_pic_config
->timestamp
= pbi
->chunk
->timestamp
;
7752 /*pr_info("Decode Frame Data %d\n", pbi->frame_count);*/
7753 config_pic_size(pbi
, pbi
->vp9_param
.p
.bit_depth
);
7755 if ((pbi
->common
.frame_type
!= KEY_FRAME
)
7756 && (!pbi
->common
.intra_only
)) {
7757 config_mc_buffer(pbi
, pbi
->vp9_param
.p
.bit_depth
);
7758 #ifdef SUPPORT_FB_DECODING
7759 if (pbi
->used_stage_buf_num
== 0)
7761 config_mpred_hw(pbi
);
7763 #ifdef SUPPORT_FB_DECODING
7764 if (pbi
->used_stage_buf_num
== 0)
7766 clear_mpred_hw(pbi
);
7769 if (mcrcc_cache_alg_flag
)
7770 config_mcrcc_axi_hw_new(pbi
);
7772 config_mcrcc_axi_hw(pbi
);
7774 config_sao_hw(pbi
, &pbi
->vp9_param
);
7776 #ifdef VP9_LPF_LVL_UPDATE
7778 * Get loop filter related picture level parameters from Parser
7780 pbi
->lf
->mode_ref_delta_enabled
= pbi
->vp9_param
.p
.mode_ref_delta_enabled
;
7781 pbi
->lf
->sharpness_level
= pbi
->vp9_param
.p
.sharpness_level
;
7782 for (i
= 0; i
< 4; i
++)
7783 pbi
->lf
->ref_deltas
[i
] = pbi
->vp9_param
.p
.ref_deltas
[i
];
7784 for (i
= 0; i
< 2; i
++)
7785 pbi
->lf
->mode_deltas
[i
] = pbi
->vp9_param
.p
.mode_deltas
[i
];
7786 pbi
->default_filt_lvl
= pbi
->vp9_param
.p
.filter_level
;
7787 pbi
->seg_4lf
->enabled
= pbi
->vp9_param
.p
.seg_enabled
;
7788 pbi
->seg_4lf
->abs_delta
= pbi
->vp9_param
.p
.seg_abs_delta
;
7789 for (i
= 0; i
< MAX_SEGMENTS
; i
++)
7790 pbi
->seg_4lf
->feature_mask
[i
] = (pbi
->vp9_param
.p
.seg_lf_info
[i
] &
7791 0x8000) ? (1 << SEG_LVL_ALT_LF
) : 0;
7792 for (i
= 0; i
< MAX_SEGMENTS
; i
++)
7793 pbi
->seg_4lf
->feature_data
[i
][SEG_LVL_ALT_LF
]
7794 = (pbi
->vp9_param
.p
.seg_lf_info
[i
]
7795 & 0x100) ? -(pbi
->vp9_param
.p
.seg_lf_info
[i
]
7796 & 0x3f) : (pbi
->vp9_param
.p
.seg_lf_info
[i
] & 0x3f);
7797 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) {
7798 /*Set pipeline mode*/
7799 uint32_t lpf_data32
= READ_VREG(HEVC_DBLK_CFGB
);
7800 /*dblk pipeline mode=1 for performance*/
7801 if (pbi
->vp9_param
.p
.width
>= 1280)
7802 lpf_data32
|= (0x1 << 4);
7804 lpf_data32
&= ~(0x3 << 4);
7805 WRITE_VREG(HEVC_DBLK_CFGB
, lpf_data32
);
7808 * Update loop filter Thr/Lvl table for every frame
7811 ("vp9_loop_filter (run before every frame decoding start)\n");*/
7812 vp9_loop_filter_frame_init(pbi
->seg_4lf
,
7813 pbi
->lfi
, pbi
->lf
, pbi
->default_filt_lvl
);
7815 /*pr_info("HEVC_DEC_STATUS_REG <= VP9_10B_DECODE_SLICE\n");*/
7816 WRITE_VREG(HEVC_DEC_STATUS_REG
, VP9_10B_DECODE_SLICE
);
7818 pr_info("Skip search next start code\n");
7819 cm
->prev_fb_idx
= INVALID_IDX
;
7820 /*skip, search next start code*/
7821 WRITE_VREG(HEVC_DEC_STATUS_REG
, VP9_10B_DECODE_SLICE
);
7823 pbi
->process_state
= PROC_STATE_DECODESLICE
;
7824 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
7825 if (pbi
->last_put_idx
< pbi
->used_buf_num
) {
7826 struct RefCntBuffer_s
*frame_bufs
=
7827 cm
->buffer_pool
->frame_bufs
;
7828 int i
= pbi
->last_put_idx
;
7829 /*free not used buffers.*/
7830 if ((frame_bufs
[i
].ref_count
== 0) &&
7831 (frame_bufs
[i
].buf
.vf_ref
== 0) &&
7832 (frame_bufs
[i
].buf
.index
!= -1)) {
7833 decoder_mmu_box_free_idx(pbi
->mmu_box
, i
);
7835 pbi
->last_put_idx
= -1;
7841 static void fill_frame_info(struct VP9Decoder_s
*pbi
,
7842 struct PIC_BUFFER_CONFIG_s
*frame
,
7843 unsigned int framesize
,
7846 struct vframe_qos_s
*vframe_qos
= &pbi
->vframe_qos
;
7848 if (frame
->slice_type
== KEY_FRAME
)
7849 vframe_qos
->type
= 1;
7850 else if (frame
->slice_type
== INTER_FRAME
)
7851 vframe_qos
->type
= 2;
7853 #define SHOW_QOS_INFO
7855 if (input_frame_based(hw_to_vdec(pbi
)))
7856 vframe_qos
->size
= frame
->frame_size2
;
7858 vframe_qos
->size
= framesize
;
7859 vframe_qos
->pts
= pts
;
7860 #ifdef SHOW_QOS_INFO
7861 vp9_print(pbi
, 0, "slice:%d\n", frame
->slice_type
);
7863 vframe_qos
->max_mv
= frame
->max_mv
;
7864 vframe_qos
->avg_mv
= frame
->avg_mv
;
7865 vframe_qos
->min_mv
= frame
->min_mv
;
7866 #ifdef SHOW_QOS_INFO
7867 vp9_print(pbi
, 0, "mv: max:%d, avg:%d, min:%d\n",
7870 vframe_qos
->min_mv
);
7872 vframe_qos
->max_qp
= frame
->max_qp
;
7873 vframe_qos
->avg_qp
= frame
->avg_qp
;
7874 vframe_qos
->min_qp
= frame
->min_qp
;
7875 #ifdef SHOW_QOS_INFO
7876 vp9_print(pbi
, 0, "qp: max:%d, avg:%d, min:%d\n",
7879 vframe_qos
->min_qp
);
7881 vframe_qos
->max_skip
= frame
->max_skip
;
7882 vframe_qos
->avg_skip
= frame
->avg_skip
;
7883 vframe_qos
->min_skip
= frame
->min_skip
;
7884 #ifdef SHOW_QOS_INFO
7885 vp9_print(pbi
, 0, "skip: max:%d, avg:%d, min:%d\n",
7886 vframe_qos
->max_skip
,
7887 vframe_qos
->avg_skip
,
7888 vframe_qos
->min_skip
);
7893 /* only when we decoded one field or one frame,
7894 we can call this function to get qos info*/
7895 static void get_picture_qos_info(struct VP9Decoder_s
*pbi
)
7897 struct PIC_BUFFER_CONFIG_s
*frame
= &pbi
->cur_buf
->buf
;
7898 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
7903 frame
->frame_size2
= vdec
->mvfrm
->frame_size
;
7904 frame
->hw_decode_time
=
7905 local_clock() - vdec
->mvfrm
->hw_decode_start
;
7908 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A
) {
7910 unsigned char i
, j
, t
;
7913 data
= READ_VREG(HEVC_MV_INFO
);
7914 if (frame
->slice_type
== KEY_FRAME
)
7917 a
[1] = (data
>> 8) & 0xff;
7918 a
[2] = (data
>> 16) & 0xff;
7920 for (i
= 0; i
< 3; i
++) {
7921 for (j
= i
+1; j
< 3; j
++) {
7926 } else if (a
[j
] == a
[i
]) {
7934 frame
->max_mv
= a
[2];
7935 frame
->avg_mv
= a
[1];
7936 frame
->min_mv
= a
[0];
7938 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7939 "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
7940 data
, a
[0], a
[1], a
[2]);
7942 data
= READ_VREG(HEVC_QP_INFO
);
7944 a
[1] = (data
>> 8) & 0x3f;
7945 a
[2] = (data
>> 16) & 0x7f;
7947 for (i
= 0; i
< 3; i
++) {
7948 for (j
= i
+1; j
< 3; j
++) {
7953 } else if (a
[j
] == a
[i
]) {
7961 frame
->max_qp
= a
[2];
7962 frame
->avg_qp
= a
[1];
7963 frame
->min_qp
= a
[0];
7965 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7966 "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
7967 data
, a
[0], a
[1], a
[2]);
7969 data
= READ_VREG(HEVC_SKIP_INFO
);
7971 a
[1] = (data
>> 8) & 0x3f;
7972 a
[2] = (data
>> 16) & 0x7f;
7974 for (i
= 0; i
< 3; i
++) {
7975 for (j
= i
+1; j
< 3; j
++) {
7980 } else if (a
[j
] == a
[i
]) {
7988 frame
->max_skip
= a
[2];
7989 frame
->avg_skip
= a
[1];
7990 frame
->min_skip
= a
[0];
7992 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7993 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
7994 data
, a
[0], a
[1], a
[2]);
7996 uint32_t blk88_y_count
;
7997 uint32_t blk88_c_count
;
7998 uint32_t blk22_mv_count
;
8008 uint64_t temp_value
;
8009 int pic_number
= frame
->decode_idx
;
8015 frame
->max_skip
= 0;
8016 frame
->avg_skip
= 0;
8017 frame
->min_skip
= 0;
8023 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
, "slice_type:%d, poc:%d\n",
8027 /* set rd_idx to 0 */
8028 WRITE_VREG(HEVC_PIC_QUALITY_CTRL
, 0);
8030 blk88_y_count
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8031 if (blk88_y_count
== 0) {
8033 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8034 "[Picture %d Quality] NO Data yet.\n",
8037 /* reset all counts */
8038 WRITE_VREG(HEVC_PIC_QUALITY_CTRL
, (1<<8));
8042 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8044 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8045 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
8046 pic_number
, rdata32
/blk88_y_count
,
8047 rdata32
, blk88_y_count
);
8049 frame
->avg_qp
= rdata32
/blk88_y_count
;
8051 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8053 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8054 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
8055 pic_number
, rdata32
*100/blk88_y_count
,
8058 /* skipped_y_count */
8059 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8061 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8062 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
8063 pic_number
, rdata32
*100/blk88_y_count
,
8066 frame
->avg_skip
= rdata32
*100/blk88_y_count
;
8067 /* coeff_non_zero_y_count */
8068 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8070 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8071 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
8072 pic_number
, (100 - rdata32
*100/(blk88_y_count
*1)),
8076 blk88_c_count
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8077 if (blk88_c_count
== 0) {
8078 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8079 "[Picture %d Quality] NO Data yet.\n",
8081 /* reset all counts */
8082 WRITE_VREG(HEVC_PIC_QUALITY_CTRL
, (1<<8));
8086 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8088 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8089 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
8090 pic_number
, rdata32
/blk88_c_count
,
8091 rdata32
, blk88_c_count
);
8094 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8096 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8097 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
8098 pic_number
, rdata32
*100/blk88_c_count
,
8101 /* skipped_cu_c_count */
8102 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8104 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8105 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
8106 pic_number
, rdata32
*100/blk88_c_count
,
8109 /* coeff_non_zero_c_count */
8110 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8112 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8113 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
8114 pic_number
, (100 - rdata32
*100/(blk88_c_count
*1)),
8117 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
8118 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
8119 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8121 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8122 "[Picture %d Quality] Y QP min : %d\n",
8123 pic_number
, (rdata32
>>0)&0xff);
8125 frame
->min_qp
= (rdata32
>>0)&0xff;
8127 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8128 "[Picture %d Quality] Y QP max : %d\n",
8129 pic_number
, (rdata32
>>8)&0xff);
8131 frame
->max_qp
= (rdata32
>>8)&0xff;
8133 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8134 "[Picture %d Quality] C QP min : %d\n",
8135 pic_number
, (rdata32
>>16)&0xff);
8136 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8137 "[Picture %d Quality] C QP max : %d\n",
8138 pic_number
, (rdata32
>>24)&0xff);
8140 /* blk22_mv_count */
8141 blk22_mv_count
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8142 if (blk22_mv_count
== 0) {
8143 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8144 "[Picture %d Quality] NO MV Data yet.\n",
8146 /* reset all counts */
8147 WRITE_VREG(HEVC_PIC_QUALITY_CTRL
, (1<<8));
8150 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
8151 mvy_L0_count[39:32], mvx_L0_count[39:32] */
8152 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8153 /* should all be 0x00 or 0xff */
8154 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8155 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
8156 pic_number
, rdata32
);
8158 mvx_L0_hi
= ((rdata32
>>0)&0xff);
8159 mvy_L0_hi
= ((rdata32
>>8)&0xff);
8160 mvx_L1_hi
= ((rdata32
>>16)&0xff);
8161 mvy_L1_hi
= ((rdata32
>>24)&0xff);
8163 /* mvx_L0_count[31:0] */
8164 rdata32_l
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8165 temp_value
= mvx_L0_hi
;
8166 temp_value
= (temp_value
<< 32) | rdata32_l
;
8168 if (mvx_L0_hi
& 0x80)
8169 value
= 0xFFFFFFF000000000 | temp_value
;
8173 value
= div_s64(value
, blk22_mv_count
);
8175 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8176 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
8177 pic_number
, (int)value
,
8178 value
, blk22_mv_count
);
8180 frame
->avg_mv
= value
;
8182 /* mvy_L0_count[31:0] */
8183 rdata32_l
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8184 temp_value
= mvy_L0_hi
;
8185 temp_value
= (temp_value
<< 32) | rdata32_l
;
8187 if (mvy_L0_hi
& 0x80)
8188 value
= 0xFFFFFFF000000000 | temp_value
;
8192 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8193 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
8194 pic_number
, rdata32_l
/blk22_mv_count
,
8195 value
, blk22_mv_count
);
8197 /* mvx_L1_count[31:0] */
8198 rdata32_l
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8199 temp_value
= mvx_L1_hi
;
8200 temp_value
= (temp_value
<< 32) | rdata32_l
;
8201 if (mvx_L1_hi
& 0x80)
8202 value
= 0xFFFFFFF000000000 | temp_value
;
8206 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8207 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
8208 pic_number
, rdata32_l
/blk22_mv_count
,
8209 value
, blk22_mv_count
);
8211 /* mvy_L1_count[31:0] */
8212 rdata32_l
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8213 temp_value
= mvy_L1_hi
;
8214 temp_value
= (temp_value
<< 32) | rdata32_l
;
8215 if (mvy_L1_hi
& 0x80)
8216 value
= 0xFFFFFFF000000000 | temp_value
;
8220 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8221 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
8222 pic_number
, rdata32_l
/blk22_mv_count
,
8223 value
, blk22_mv_count
);
8225 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
8226 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8227 mv_hi
= (rdata32
>>16)&0xffff;
8229 mv_hi
= 0x8000 - mv_hi
;
8231 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8232 "[Picture %d Quality] MVX_L0 MAX : %d\n",
8235 frame
->max_mv
= mv_hi
;
8237 mv_lo
= (rdata32
>>0)&0xffff;
8239 mv_lo
= 0x8000 - mv_lo
;
8241 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8242 "[Picture %d Quality] MVX_L0 MIN : %d\n",
8245 frame
->min_mv
= mv_lo
;
8247 /* {mvy_L0_max, mvy_L0_min} */
8248 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8249 mv_hi
= (rdata32
>>16)&0xffff;
8251 mv_hi
= 0x8000 - mv_hi
;
8253 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8254 "[Picture %d Quality] MVY_L0 MAX : %d\n",
8257 mv_lo
= (rdata32
>>0)&0xffff;
8259 mv_lo
= 0x8000 - mv_lo
;
8261 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8262 "[Picture %d Quality] MVY_L0 MIN : %d\n",
8265 /* {mvx_L1_max, mvx_L1_min} */
8266 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8267 mv_hi
= (rdata32
>>16)&0xffff;
8269 mv_hi
= 0x8000 - mv_hi
;
8271 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8272 "[Picture %d Quality] MVX_L1 MAX : %d\n",
8275 mv_lo
= (rdata32
>>0)&0xffff;
8277 mv_lo
= 0x8000 - mv_lo
;
8279 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8280 "[Picture %d Quality] MVX_L1 MIN : %d\n",
8283 /* {mvy_L1_max, mvy_L1_min} */
8284 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8285 mv_hi
= (rdata32
>>16)&0xffff;
8287 mv_hi
= 0x8000 - mv_hi
;
8289 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8290 "[Picture %d Quality] MVY_L1 MAX : %d\n",
8293 mv_lo
= (rdata32
>>0)&0xffff;
8295 mv_lo
= 0x8000 - mv_lo
;
8297 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8298 "[Picture %d Quality] MVY_L1 MIN : %d\n",
8301 rdata32
= READ_VREG(HEVC_PIC_QUALITY_CTRL
);
8303 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8304 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
8305 pic_number
, rdata32
);
8307 /* reset all counts */
8308 WRITE_VREG(HEVC_PIC_QUALITY_CTRL
, (1<<8));
8312 static int vvp9_get_ps_info(struct VP9Decoder_s
*pbi
, struct aml_vdec_ps_infos
*ps
)
8314 int dw_mode
= v4l_parser_get_double_write_mode(pbi
);
8316 ps
->visible_width
= pbi
->frame_width
/ get_double_write_ratio(pbi
, dw_mode
);
8317 ps
->visible_height
= pbi
->frame_height
/ get_double_write_ratio(pbi
, dw_mode
);
8318 ps
->coded_width
= ALIGN(pbi
->frame_width
, 32) / get_double_write_ratio(pbi
, dw_mode
);
8319 ps
->coded_height
= ALIGN(pbi
->frame_height
, 32) / get_double_write_ratio(pbi
, dw_mode
);
8320 ps
->dpb_size
= pbi
->used_buf_num
;
8326 static int v4l_res_change(struct VP9Decoder_s
*pbi
)
8328 struct aml_vcodec_ctx
*ctx
=
8329 (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
8330 struct VP9_Common_s
*const cm
= &pbi
->common
;
8333 if (ctx
->param_sets_from_ucode
&&
8334 pbi
->res_ch_flag
== 0) {
8335 struct aml_vdec_ps_infos ps
;
8336 if ((cm
->width
!= 0 &&
8338 (pbi
->frame_width
!= cm
->width
||
8339 pbi
->frame_height
!= cm
->height
)) {
8341 vp9_print(pbi
, 0, "%s (%d,%d)=>(%d,%d)\r\n", __func__
, cm
->width
,
8342 cm
->height
, pbi
->frame_width
, pbi
->frame_height
);
8343 vvp9_get_ps_info(pbi
, &ps
);
8344 vdec_v4l_set_ps_infos(ctx
, &ps
);
8345 vdec_v4l_res_ch_event(ctx
);
8346 pbi
->v4l_params_parsed
= false;
8347 pbi
->res_ch_flag
= 1;
8349 vp9_bufmgr_postproc(pbi
);
8350 //del_timer_sync(&pbi->timer);
8351 notify_v4l_eos(hw_to_vdec(pbi
));
8360 static irqreturn_t
vvp9_isr_thread_fn(int irq
, void *data
)
8362 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)data
;
8363 unsigned int dec_status
= pbi
->dec_status
;
8366 /*if (pbi->wait_buf)
8367 * pr_info("set wait_buf to 0\r\n");
8372 #ifdef MULTI_INSTANCE_SUPPORT
8373 #ifdef SUPPORT_FB_DECODING
8374 #ifdef FB_DECODING_TEST_SCHEDULE
8375 if (pbi
->s1_test_cmd
== TEST_SET_PIC_DONE
)
8376 dec_status
= HEVC_DECPIC_DATA_DONE
;
8377 else if (pbi
->s1_test_cmd
== TEST_SET_S2_DONE
8378 && dec_status
== HEVC_DECPIC_DATA_DONE
)
8379 dec_status
= HEVC_S2_DECODING_DONE
;
8380 pbi
->s1_test_cmd
= TEST_SET_NONE
;
8382 /*if (irq != VDEC_IRQ_0)
8383 dec_status = HEVC_S2_DECODING_DONE;*/
8385 if (dec_status
== HEVC_S2_DECODING_DONE
) {
8386 pbi
->dec_result
= DEC_RESULT_DONE
;
8387 vdec_schedule_work(&pbi
->work
);
8388 #ifdef FB_DECODING_TEST_SCHEDULE
8390 pbi
->dec_s1_result
= DEC_S1_RESULT_DONE
;
8391 vdec_schedule_work(&pbi
->s1_work
);
8395 if ((dec_status
== HEVC_NAL_DECODE_DONE
) ||
8396 (dec_status
== HEVC_SEARCH_BUFEMPTY
) ||
8397 (dec_status
== HEVC_DECODE_BUFEMPTY
)
8399 if (pbi
->m_ins_flag
) {
8400 reset_process_time(pbi
);
8401 if (!vdec_frame_based(hw_to_vdec(pbi
)))
8402 dec_again_process(pbi
);
8404 pbi
->dec_result
= DEC_RESULT_GET_DATA
;
8405 vdec_schedule_work(&pbi
->work
);
8408 pbi
->process_busy
= 0;
8410 } else if (dec_status
== HEVC_DECPIC_DATA_DONE
) {
8411 if (pbi
->m_ins_flag
) {
8412 get_picture_qos_info(pbi
);
8413 #ifdef SUPPORT_FB_DECODING
8414 if (pbi
->used_stage_buf_num
> 0) {
8415 reset_process_time(pbi
);
8417 trigger_schedule(pbi
);
8418 #ifdef FB_DECODING_TEST_SCHEDULE
8419 pbi
->s1_test_cmd
= TEST_SET_S2_DONE
;
8422 pbi
->dec_s1_result
= DEC_S1_RESULT_DONE
;
8423 vdec_schedule_work(&pbi
->s1_work
);
8428 reset_process_time(pbi
);
8429 if (pbi
->vf_pre_count
== 0 || pbi
->low_latency_flag
)
8430 vp9_bufmgr_postproc(pbi
);
8432 pbi
->dec_result
= DEC_RESULT_DONE
;
8434 if (mcrcc_cache_alg_flag
)
8436 vdec_schedule_work(&pbi
->work
);
8439 if (pbi
->low_latency_flag
) {
8440 vp9_bufmgr_postproc(pbi
);
8441 WRITE_VREG(HEVC_DEC_STATUS_REG
, HEVC_ACTION_DONE
);
8442 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8443 vdec_profile(hw_to_vdec(pbi
), VDEC_PROFILE_EVENT_CB
);
8444 if (debug
& PRINT_FLAG_VDEC_DETAIL
)
8445 pr_info("%s VP9 frame done \n", __func__
);
8450 pbi
->process_busy
= 0;
8455 if (dec_status
== VP9_EOS
) {
8456 #ifdef MULTI_INSTANCE_SUPPORT
8457 if (pbi
->m_ins_flag
)
8458 reset_process_time(pbi
);
8461 pr_info("VP9_EOS, flush buffer\r\n");
8463 vp9_bufmgr_postproc(pbi
);
8465 pr_info("send VP9_10B_DISCARD_NAL\r\n");
8466 WRITE_VREG(HEVC_DEC_STATUS_REG
, VP9_10B_DISCARD_NAL
);
8467 pbi
->process_busy
= 0;
8468 #ifdef MULTI_INSTANCE_SUPPORT
8469 if (pbi
->m_ins_flag
) {
8470 pbi
->dec_result
= DEC_RESULT_DONE
;
8472 vdec_schedule_work(&pbi
->work
);
8476 } else if (dec_status
== HEVC_DECODE_OVER_SIZE
) {
8477 pr_info("vp9 decode oversize !!\n");
8478 debug
|= (VP9_DEBUG_DIS_LOC_ERROR_PROC
|
8479 VP9_DEBUG_DIS_SYS_ERROR_PROC
);
8480 pbi
->fatal_error
|= DECODER_FATAL_ERROR_SIZE_OVERFLOW
;
8481 #ifdef MULTI_INSTANCE_SUPPORT
8482 if (pbi
->m_ins_flag
)
8483 reset_process_time(pbi
);
8488 if (dec_status
!= VP9_HEAD_PARSER_DONE
) {
8489 pbi
->process_busy
= 0;
8493 if (pbi
->m_ins_flag
&&
8494 !get_free_buf_count(pbi
)) {
8495 pbi
->run_ready_min_buf_num
= pbi
->one_package_frame_cnt
+ 1;
8496 pr_err("need buffer, one package frame count = %d\n", pbi
->one_package_frame_cnt
+ 1);
8497 pbi
->dec_result
= DEC_RESULT_NEED_MORE_BUFFER
;
8498 vdec_schedule_work(&pbi
->work
);
8502 pbi
->one_package_frame_cnt
++;
8504 #ifdef MULTI_INSTANCE_SUPPORT
8505 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8506 if (pbi
->m_ins_flag
==0 && pbi
->low_latency_flag
) {
8507 vdec_profile(hw_to_vdec(pbi
), VDEC_PROFILE_EVENT_RUN
);
8508 if (debug
& PRINT_FLAG_VDEC_DETAIL
)
8509 pr_info("%s VP9 frame header found \n", __func__
);
8512 if (pbi
->m_ins_flag
)
8513 reset_process_time(pbi
);
8515 if (pbi
->process_state
!= PROC_STATE_SENDAGAIN
8516 #ifdef SUPPORT_FB_DECODING
8517 && pbi
->used_stage_buf_num
== 0
8520 if (pbi
->mmu_enable
)
8521 vp9_recycle_mmu_buf_tail(pbi
);
8524 if (pbi
->frame_count
> 0)
8525 vp9_bufmgr_postproc(pbi
);
8528 if (debug
& VP9_DEBUG_SEND_PARAM_WITH_REG
) {
8529 get_rpm_param(&pbi
->vp9_param
);
8531 #ifdef SUPPORT_FB_DECODING
8532 if (pbi
->used_stage_buf_num
> 0) {
8533 reset_process_time(pbi
);
8537 &pbi
->s1_mv_buf_index
,
8538 &pbi
->s1_mpred_mv_wr_start_addr
8541 "%s: Error get_mv_buf fail\n",
8545 if (pbi
->s1_buf
== NULL
) {
8547 "%s: Error get_s1_buf fail\n",
8549 pbi
->process_busy
= 0;
8553 for (i
= 0; i
< (RPM_END
- RPM_BEGIN
); i
+= 4) {
8555 for (ii
= 0; ii
< 4; ii
++) {
8556 pbi
->s1_buf
->rpm
[i
+ 3 - ii
] =
8557 pbi
->rpm_ptr
[i
+ 3 - ii
];
8558 pbi
->s1_param
.l
.data
[i
+ ii
] =
8559 pbi
->rpm_ptr
[i
+ 3 - ii
];
8564 #ifdef FB_DECODING_TEST_SCHEDULE
8565 pbi
->dec_s1_result
=
8566 DEC_S1_RESULT_TEST_TRIGGER_DONE
;
8567 vdec_schedule_work(&pbi
->s1_work
);
8569 WRITE_VREG(HEVC_ASSIST_FB_MMU_MAP_ADDR
,
8570 pbi
->stage_mmu_map_phy_addr
+
8571 pbi
->s1_buf
->index
* STAGE_MMU_MAP_SIZE
);
8573 start_s1_decoding(pbi
);
8575 start_process_time(pbi
);
8576 pbi
->process_busy
= 0;
8581 for (i
= 0; i
< (RPM_END
- RPM_BEGIN
); i
+= 4) {
8583 for (ii
= 0; ii
< 4; ii
++)
8584 pbi
->vp9_param
.l
.data
[i
+ ii
] =
8585 pbi
->rpm_ptr
[i
+ 3 - ii
];
8590 if (pbi
->is_used_v4l
) {
8591 struct aml_vcodec_ctx
*ctx
=
8592 (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
8594 pbi
->frame_width
= pbi
->vp9_param
.p
.width
;
8595 pbi
->frame_height
= pbi
->vp9_param
.p
.height
;
8597 if (!v4l_res_change(pbi
)) {
8598 if (ctx
->param_sets_from_ucode
&& !pbi
->v4l_params_parsed
) {
8599 struct aml_vdec_ps_infos ps
;
8601 pr_debug("set ucode parse\n");
8602 vvp9_get_ps_info(pbi
, &ps
);
8603 /*notice the v4l2 codec.*/
8604 vdec_v4l_set_ps_infos(ctx
, &ps
);
8605 pbi
->v4l_params_parsed
= true;
8606 pbi
->postproc_done
= 0;
8607 pbi
->process_busy
= 0;
8608 dec_again_process(pbi
);
8612 pbi
->postproc_done
= 0;
8613 pbi
->process_busy
= 0;
8614 dec_again_process(pbi
);
8619 continue_decoding(pbi
);
8620 pbi
->postproc_done
= 0;
8621 pbi
->process_busy
= 0;
8623 #ifdef MULTI_INSTANCE_SUPPORT
8624 if (pbi
->m_ins_flag
)
8625 start_process_time(pbi
);
8631 static irqreturn_t
vvp9_isr(int irq
, void *data
)
8634 unsigned int dec_status
;
8635 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)data
;
8636 unsigned int adapt_prob_status
;
8637 struct VP9_Common_s
*const cm
= &pbi
->common
;
8640 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG
, 1);
8642 dec_status
= READ_VREG(HEVC_DEC_STATUS_REG
);
8643 adapt_prob_status
= READ_VREG(VP9_ADAPT_PROB_REG
);
8646 if (pbi
->init_flag
== 0)
8648 if (pbi
->process_busy
)/*on process.*/
8650 pbi
->dec_status
= dec_status
;
8651 pbi
->process_busy
= 1;
8652 if (debug
& VP9_DEBUG_BUFMGR
)
8653 pr_info("vp9 isr (%d) dec status = 0x%x, lcu 0x%x shiftbyte 0x%x (%x %x lev %x, wr %x, rd %x)\n",
8655 dec_status
, READ_VREG(HEVC_PARSER_LCU_START
),
8656 READ_VREG(HEVC_SHIFT_BYTE_COUNT
),
8657 READ_VREG(HEVC_STREAM_START_ADDR
),
8658 READ_VREG(HEVC_STREAM_END_ADDR
),
8659 READ_VREG(HEVC_STREAM_LEVEL
),
8660 READ_VREG(HEVC_STREAM_WR_PTR
),
8661 READ_VREG(HEVC_STREAM_RD_PTR
)
8663 #ifdef SUPPORT_FB_DECODING
8664 /*if (irq != VDEC_IRQ_0)
8665 return IRQ_WAKE_THREAD;*/
8668 debug_tag
= READ_HREG(DEBUG_REG1
);
8669 if (debug_tag
& 0x10000) {
8670 pr_info("LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1
));
8671 for (i
= 0; i
< 0x400; i
+= 4) {
8674 pr_info("%03x: ", i
);
8675 for (ii
= 0; ii
< 4; ii
++) {
8677 pbi
->lmem_ptr
[i
+ 3 - ii
]);
8679 if (((i
+ ii
) & 0xf) == 0)
8683 if ((udebug_pause_pos
== (debug_tag
& 0xffff)) &&
8684 (udebug_pause_decode_idx
== 0 ||
8685 udebug_pause_decode_idx
== pbi
->slice_idx
) &&
8686 (udebug_pause_val
== 0 ||
8687 udebug_pause_val
== READ_HREG(DEBUG_REG2
)))
8688 pbi
->ucode_pause_pos
= udebug_pause_pos
;
8689 else if (debug_tag
& 0x20000)
8690 pbi
->ucode_pause_pos
= 0xffffffff;
8691 if (pbi
->ucode_pause_pos
)
8692 reset_process_time(pbi
);
8694 WRITE_HREG(DEBUG_REG1
, 0);
8695 } else if (debug_tag
!= 0) {
8697 "dbg%x: %x lcu %x\n", READ_HREG(DEBUG_REG1
),
8698 READ_HREG(DEBUG_REG2
),
8699 READ_VREG(HEVC_PARSER_LCU_START
));
8700 if ((udebug_pause_pos
== (debug_tag
& 0xffff)) &&
8701 (udebug_pause_decode_idx
== 0 ||
8702 udebug_pause_decode_idx
== pbi
->slice_idx
) &&
8703 (udebug_pause_val
== 0 ||
8704 udebug_pause_val
== READ_HREG(DEBUG_REG2
)))
8705 pbi
->ucode_pause_pos
= udebug_pause_pos
;
8706 if (pbi
->ucode_pause_pos
)
8707 reset_process_time(pbi
);
8709 WRITE_HREG(DEBUG_REG1
, 0);
8710 pbi
->process_busy
= 0;
8714 #ifdef MULTI_INSTANCE_SUPPORT
8715 if (!pbi
->m_ins_flag
) {
8717 if (pbi
->error_flag
== 1) {
8718 pbi
->error_flag
= 2;
8719 pbi
->process_busy
= 0;
8721 } else if (pbi
->error_flag
== 3) {
8722 pbi
->process_busy
= 0;
8726 if (get_free_buf_count(pbi
) <= 0) {
8728 if (pbi->wait_buf == 0)
8729 pr_info("set wait_buf to 1\r\n");
8732 pbi
->process_busy
= 0;
8735 #ifdef MULTI_INSTANCE_SUPPORT
8738 if ((adapt_prob_status
& 0xff) == 0xfd) {
8739 /*VP9_REQ_ADAPT_PROB*/
8740 int pre_fc
= (cm
->frame_type
== KEY_FRAME
) ? 1 : 0;
8741 uint8_t *prev_prob_b
=
8742 ((uint8_t *)pbi
->prob_buffer_addr
) +
8743 ((adapt_prob_status
>> 8) * 0x1000);
8744 uint8_t *cur_prob_b
=
8745 ((uint8_t *)pbi
->prob_buffer_addr
) + 0x4000;
8746 uint8_t *count_b
= (uint8_t *)pbi
->count_buffer_addr
;
8747 #ifdef MULTI_INSTANCE_SUPPORT
8748 if (pbi
->m_ins_flag
)
8749 reset_process_time(pbi
);
8751 adapt_coef_probs(pbi
->pic_count
,
8752 (cm
->last_frame_type
== KEY_FRAME
),
8753 pre_fc
, (adapt_prob_status
>> 8),
8754 (unsigned int *)prev_prob_b
,
8755 (unsigned int *)cur_prob_b
, (unsigned int *)count_b
);
8757 memcpy(prev_prob_b
, cur_prob_b
, PROB_SIZE
);
8758 WRITE_VREG(VP9_ADAPT_PROB_REG
, 0);
8759 pbi
->pic_count
+= 1;
8760 #ifdef MULTI_INSTANCE_SUPPORT
8761 if (pbi
->m_ins_flag
)
8762 start_process_time(pbi
);
8765 /*return IRQ_HANDLED;*/
8767 return IRQ_WAKE_THREAD
;
8770 static void vp9_set_clk(struct work_struct
*work
)
8772 struct VP9Decoder_s
*pbi
= container_of(work
,
8773 struct VP9Decoder_s
, set_clk_work
);
8774 int fps
= 96000 / pbi
->frame_dur
;
8776 if (hevc_source_changed(VFORMAT_VP9
,
8777 frame_width
, frame_height
, fps
) > 0)
8778 pbi
->saved_resolution
= frame_width
*
8782 static void vvp9_put_timer_func(unsigned long arg
)
8784 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)arg
;
8785 struct timer_list
*timer
= &pbi
->timer
;
8787 unsigned int buf_level
;
8789 enum receviver_start_e state
= RECEIVER_INACTIVE
;
8791 if (pbi
->m_ins_flag
) {
8792 if (hw_to_vdec(pbi
)->next_status
8793 == VDEC_STATUS_DISCONNECTED
&&
8794 !pbi
->is_used_v4l
) {
8795 #ifdef SUPPORT_FB_DECODING
8799 pbi
->dec_s1_result
= DEC_S1_RESULT_FORCE_EXIT
;
8800 vdec_schedule_work(&pbi
->s1_work
);
8802 pbi
->dec_result
= DEC_RESULT_FORCE_EXIT
;
8803 vdec_schedule_work(&pbi
->work
);
8805 "vdec requested to be disconnected\n");
8809 if (pbi
->init_flag
== 0) {
8810 if (pbi
->stat
& STAT_TIMER_ARM
) {
8811 timer
->expires
= jiffies
+ PUT_INTERVAL
;
8812 add_timer(&pbi
->timer
);
8816 if (pbi
->m_ins_flag
== 0) {
8817 if (vf_get_receiver(pbi
->provider_name
)) {
8819 vf_notify_receiver(pbi
->provider_name
,
8820 VFRAME_EVENT_PROVIDER_QUREY_STATE
,
8822 if ((state
== RECEIVER_STATE_NULL
)
8823 || (state
== RECEIVER_STATE_NONE
))
8824 state
= RECEIVER_INACTIVE
;
8826 state
= RECEIVER_INACTIVE
;
8828 empty_flag
= (READ_VREG(HEVC_PARSER_INT_STATUS
) >> 6) & 0x1;
8829 /* error watchdog */
8830 if (empty_flag
== 0) {
8831 /* decoder has input */
8832 if ((debug
& VP9_DEBUG_DIS_LOC_ERROR_PROC
) == 0) {
8834 buf_level
= READ_VREG(HEVC_STREAM_LEVEL
);
8835 /* receiver has no buffer to recycle */
8836 if ((state
== RECEIVER_INACTIVE
) &&
8837 (kfifo_is_empty(&pbi
->display_q
) &&
8841 (HEVC_ASSIST_MBOX0_IRQ_REG
,
8846 if ((debug
& VP9_DEBUG_DIS_SYS_ERROR_PROC
) == 0) {
8847 /* receiver has no buffer to recycle */
8848 /*if ((state == RECEIVER_INACTIVE) &&
8849 * (kfifo_is_empty(&pbi->display_q))) {
8850 *pr_info("vp9 something error,need reset\n");
8856 #ifdef MULTI_INSTANCE_SUPPORT
8859 (decode_timeout_val
> 0) &&
8860 (pbi
->start_process_time
> 0) &&
8861 ((1000 * (jiffies
- pbi
->start_process_time
) / HZ
)
8862 > decode_timeout_val
)
8864 int current_lcu_idx
=
8865 READ_VREG(HEVC_PARSER_LCU_START
)
8867 if (pbi
->last_lcu_idx
== current_lcu_idx
) {
8868 if (pbi
->decode_timeout_count
> 0)
8869 pbi
->decode_timeout_count
--;
8870 if (pbi
->decode_timeout_count
== 0) {
8871 if (input_frame_based(
8873 (READ_VREG(HEVC_STREAM_LEVEL
) > 0x200))
8874 timeout_process(pbi
);
8877 "timeout & empty, again\n");
8878 dec_again_process(pbi
);
8882 start_process_time(pbi
);
8883 pbi
->last_lcu_idx
= current_lcu_idx
;
8889 if ((pbi
->ucode_pause_pos
!= 0) &&
8890 (pbi
->ucode_pause_pos
!= 0xffffffff) &&
8891 udebug_pause_pos
!= pbi
->ucode_pause_pos
) {
8892 pbi
->ucode_pause_pos
= 0;
8893 WRITE_HREG(DEBUG_REG1
, 0);
8895 #ifdef MULTI_INSTANCE_SUPPORT
8896 if (debug
& VP9_DEBUG_FORCE_SEND_AGAIN
) {
8898 "Force Send Again\r\n");
8899 debug
&= ~VP9_DEBUG_FORCE_SEND_AGAIN
;
8900 reset_process_time(pbi
);
8901 pbi
->dec_result
= DEC_RESULT_AGAIN
;
8902 if (pbi
->process_state
==
8903 PROC_STATE_DECODESLICE
) {
8904 if (pbi
->mmu_enable
)
8905 vp9_recycle_mmu_buf(pbi
);
8906 pbi
->process_state
=
8907 PROC_STATE_SENDAGAIN
;
8911 vdec_schedule_work(&pbi
->work
);
8914 if (debug
& VP9_DEBUG_DUMP_DATA
) {
8915 debug
&= ~VP9_DEBUG_DUMP_DATA
;
8917 "%s: chunk size 0x%x off 0x%x sum 0x%x\n",
8921 get_data_check_sum(pbi
, pbi
->chunk
->size
)
8923 dump_data(pbi
, pbi
->chunk
->size
);
8926 if (debug
& VP9_DEBUG_DUMP_PIC_LIST
) {
8928 debug
&= ~VP9_DEBUG_DUMP_PIC_LIST
;
8930 if (debug
& VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC
) {
8931 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG
, 0x1);
8932 debug
&= ~VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC
;
8934 /*if (debug & VP9_DEBUG_HW_RESET) {
8939 WRITE_VREG(radr
, rval
);
8940 pr_info("WRITE_VREG(%x,%x)\n", radr
, rval
);
8942 pr_info("READ_VREG(%x)=%x\n", radr
, READ_VREG(radr
));
8946 if (pop_shorts
!= 0) {
8950 pr_info("pop stream 0x%x shorts\r\n", pop_shorts
);
8951 for (i
= 0; i
< pop_shorts
; i
++) {
8953 (READ_HREG(HEVC_SHIFTED_DATA
) >> 16);
8954 WRITE_HREG(HEVC_SHIFT_COMMAND
,
8957 pr_info("%04x:", i
);
8958 pr_info("%04x ", data
);
8959 if (((i
+ 1) & 0xf) == 0)
8963 pr_info("\r\nsum = %x\r\n", sum
);
8970 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB
&&
8971 get_double_write_mode(pbi
) == 0) {
8973 READ_VCBUS_REG(AFBC_BODY_BADDR
) << 4;
8975 struct canvas_s cur_canvas
;
8977 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0
)
8978 & 0xff), &cur_canvas
);
8979 disp_laddr
= cur_canvas
.addr
;
8981 pr_info("current displayed buffer address %x\r\n",
8986 /*don't changed at start.*/
8987 if (pbi
->get_frame_dur
&& pbi
->show_frame_num
> 60 &&
8988 pbi
->frame_dur
> 0 && pbi
->saved_resolution
!=
8989 frame_width
* frame_height
*
8990 (96000 / pbi
->frame_dur
))
8991 vdec_schedule_work(&pbi
->set_clk_work
);
8993 timer
->expires
= jiffies
+ PUT_INTERVAL
;
8998 int vvp9_dec_status(struct vdec_s
*vdec
, struct vdec_info
*vstatus
)
9000 struct VP9Decoder_s
*vp9
=
9001 (struct VP9Decoder_s
*)vdec
->private;
9006 vstatus
->frame_width
= frame_width
;
9007 vstatus
->frame_height
= frame_height
;
9008 if (vp9
->frame_dur
!= 0)
9009 vstatus
->frame_rate
= 96000 / vp9
->frame_dur
;
9011 vstatus
->frame_rate
= -1;
9012 vstatus
->error_count
= 0;
9013 vstatus
->status
= vp9
->stat
| vp9
->fatal_error
;
9014 vstatus
->frame_dur
= vp9
->frame_dur
;
9015 vstatus
->bit_rate
= vp9
->gvs
->bit_rate
;
9016 vstatus
->frame_data
= vp9
->gvs
->frame_data
;
9017 vstatus
->total_data
= vp9
->gvs
->total_data
;
9018 vstatus
->frame_count
= vp9
->gvs
->frame_count
;
9019 vstatus
->error_frame_count
= vp9
->gvs
->error_frame_count
;
9020 vstatus
->drop_frame_count
= vp9
->gvs
->drop_frame_count
;
9021 vstatus
->total_data
= vp9
->gvs
->total_data
;
9022 vstatus
->samp_cnt
= vp9
->gvs
->samp_cnt
;
9023 vstatus
->offset
= vp9
->gvs
->offset
;
9024 snprintf(vstatus
->vdec_name
, sizeof(vstatus
->vdec_name
),
9029 int vvp9_set_isreset(struct vdec_s
*vdec
, int isreset
)
9036 static void VP9_DECODE_INIT(void)
9038 /* enable vp9 clocks */
9039 WRITE_VREG(DOS_GCLK_EN3
, 0xffffffff);
9040 /* *************************************************************** */
9042 /* *************************************************************** */
9044 WRITE_VREG(AO_RTI_GEN_PWR_SLEEP0
,
9045 READ_VREG(AO_RTI_GEN_PWR_SLEEP0
) & (~(0x3 << 6)));
9046 WRITE_VREG(DOS_MEM_PD_HEVC
, 0x0);
9047 WRITE_VREG(DOS_SW_RESET3
, READ_VREG(DOS_SW_RESET3
) | (0x3ffff << 2));
9048 WRITE_VREG(DOS_SW_RESET3
, READ_VREG(DOS_SW_RESET3
) & (~(0x3ffff << 2)));
9049 /* remove isolations */
9050 WRITE_VREG(AO_RTI_GEN_PWR_ISO0
,
9051 READ_VREG(AO_RTI_GEN_PWR_ISO0
) & (~(0x3 << 10)));
9056 static void vvp9_prot_init(struct VP9Decoder_s
*pbi
, u32 mask
)
9058 unsigned int data32
;
9059 /* VP9_DECODE_INIT(); */
9060 vp9_config_work_space_hw(pbi
, mask
);
9061 if (mask
& HW_MASK_BACK
)
9062 init_pic_list_hw(pbi
);
9064 vp9_init_decoder_hw(pbi
, mask
);
9066 #ifdef VP9_LPF_LVL_UPDATE
9067 if (mask
& HW_MASK_BACK
)
9068 vp9_loop_filter_init(pbi
);
9071 if ((mask
& HW_MASK_FRONT
) == 0)
9074 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
9075 pr_info("%s\n", __func__
);
9076 data32
= READ_VREG(HEVC_STREAM_CONTROL
);
9078 (1 << 0)/*stream_fetch_enable*/
9080 WRITE_VREG(HEVC_STREAM_CONTROL
, data32
);
9082 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) {
9083 if (debug
& VP9_DEBUG_BUFMGR
)
9084 pr_info("[test.c] Config STREAM_FIFO_CTL\n");
9085 data32
= READ_VREG(HEVC_STREAM_FIFO_CTL
);
9087 (1 << 29) // stream_fifo_hole
9089 WRITE_VREG(HEVC_STREAM_FIFO_CTL
, data32
);
9092 data32
= READ_VREG(HEVC_SHIFT_STARTCODE
);
9093 if (data32
!= 0x00000100) {
9094 pr_info("vp9 prot init error %d\n", __LINE__
);
9097 data32
= READ_VREG(HEVC_SHIFT_EMULATECODE
);
9098 if (data32
!= 0x00000300) {
9099 pr_info("vp9 prot init error %d\n", __LINE__
);
9102 WRITE_VREG(HEVC_SHIFT_STARTCODE
, 0x12345678);
9103 WRITE_VREG(HEVC_SHIFT_EMULATECODE
, 0x9abcdef0);
9104 data32
= READ_VREG(HEVC_SHIFT_STARTCODE
);
9105 if (data32
!= 0x12345678) {
9106 pr_info("vp9 prot init error %d\n", __LINE__
);
9109 data32
= READ_VREG(HEVC_SHIFT_EMULATECODE
);
9110 if (data32
!= 0x9abcdef0) {
9111 pr_info("vp9 prot init error %d\n", __LINE__
);
9115 WRITE_VREG(HEVC_SHIFT_STARTCODE
, 0x000000001);
9116 WRITE_VREG(HEVC_SHIFT_EMULATECODE
, 0x00000300);
9121 WRITE_VREG(HEVC_WAIT_FLAG
, 1);
9123 /* WRITE_VREG(HEVC_MPSR, 1); */
9125 /* clear mailbox interrupt */
9126 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG
, 1);
9128 /* enable mailbox interrupt */
9129 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK
, 1);
9131 /* disable PSCALE for hardware sharing */
9132 WRITE_VREG(HEVC_PSCALE_CTRL
, 0);
9134 WRITE_VREG(DEBUG_REG1
, 0x0);
9135 /*check vps/sps/pps/i-slice in ucode*/
9136 WRITE_VREG(NAL_SEARCH_CTL
, 0x8);
9138 WRITE_VREG(DECODE_STOP_POS
, udebug_flag
);
9139 #ifdef SUPPORT_FB_DECODING
9140 #ifndef FB_DECODING_TEST_SCHEDULE
9141 if (pbi
->used_stage_buf_num
> 0) {
9142 if (mask
& HW_MASK_FRONT
) {
9144 HEVC_ASSIST_HED_FB_W_CTL
);
9146 (1 << 0) /*hed_fb_wr_en*/
9148 WRITE_VREG(HEVC_ASSIST_HED_FB_W_CTL
,
9151 if (mask
& HW_MASK_BACK
) {
9153 HEVC_ASSIST_HED_FB_R_CTL
);
9154 while (data32
& (1 << 7)) {
9157 HEVC_ASSIST_HED_FB_R_CTL
);
9159 data32
&= (~(0x1 << 0));
9160 /*hed_fb_rd_addr_auto_rd*/
9161 data32
&= (~(0x1 << 1));
9162 /*rd_id = 0, hed_rd_map_auto_halt_num,
9163 after wr 2 ready, then start reading*/
9164 data32
|= (0x2 << 16);
9165 WRITE_VREG(HEVC_ASSIST_HED_FB_R_CTL
,
9168 data32
|= (0x1 << 11); /*hed_rd_map_auto_halt_en*/
9169 data32
|= (0x1 << 1); /*hed_fb_rd_addr_auto_rd*/
9170 data32
|= (0x1 << 0); /*hed_fb_rd_en*/
9171 WRITE_VREG(HEVC_ASSIST_HED_FB_R_CTL
,
9180 static int vvp9_local_init(struct VP9Decoder_s
*pbi
)
9185 if (alloc_lf_buf(pbi
) < 0)
9188 pbi
->gvs
= vzalloc(sizeof(struct vdec_info
));
9189 if (NULL
== pbi
->gvs
) {
9190 pr_info("the struct of vdec status malloc failed.\n");
9193 vdec_set_vframe_comm(hw_to_vdec(pbi
), DRIVER_NAME
);
9195 pbi
->pts_missed
= 0;
9198 pbi
->new_frame_displayed
= 0;
9199 pbi
->last_put_idx
= -1;
9200 pbi
->saved_resolution
= 0;
9201 pbi
->get_frame_dur
= false;
9202 on_no_keyframe_skiped
= 0;
9203 pbi
->duration_from_pts_done
= 0;
9204 pbi
->vp9_first_pts_ready
= 0;
9205 pbi
->frame_cnt_window
= 0;
9206 width
= pbi
->vvp9_amstream_dec_info
.width
;
9207 height
= pbi
->vvp9_amstream_dec_info
.height
;
9209 (pbi
->vvp9_amstream_dec_info
.rate
==
9210 0) ? 3200 : pbi
->vvp9_amstream_dec_info
.rate
;
9211 if (width
&& height
)
9212 pbi
->frame_ar
= height
* 0x100 / width
;
9216 pr_info("vp9: ver (%d,%d) decinfo: %dx%d rate=%d\n", vp9_version
,
9217 0, width
, height
, pbi
->frame_dur
);
9219 if (pbi
->frame_dur
== 0)
9220 pbi
->frame_dur
= 96000 / 24;
9222 INIT_KFIFO(pbi
->display_q
);
9223 INIT_KFIFO(pbi
->newframe_q
);
9226 for (i
= 0; i
< VF_POOL_SIZE
; i
++) {
9227 const struct vframe_s
*vf
= &pbi
->vfpool
[i
];
9229 pbi
->vfpool
[i
].index
= -1;
9230 kfifo_put(&pbi
->newframe_q
, vf
);
9234 ret
= vp9_local_init(pbi
);
9236 if (!pbi
->pts_unstable
) {
9238 (pbi
->vvp9_amstream_dec_info
.rate
== 0)?1:0;
9239 pr_info("set pts unstable\n");
9246 #ifdef MULTI_INSTANCE_SUPPORT
9247 static s32
vvp9_init(struct vdec_s
*vdec
)
9249 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)vdec
->private;
9251 static s32
vvp9_init(struct VP9Decoder_s
*pbi
)
9255 int fw_size
= 0x1000 * 16;
9256 struct firmware_s
*fw
= NULL
;
9258 pbi
->stat
|= STAT_TIMER_INIT
;
9260 if (vvp9_local_init(pbi
) < 0)
9263 fw
= vmalloc(sizeof(struct firmware_s
) + fw_size
);
9264 if (IS_ERR_OR_NULL(fw
))
9267 if (get_firmware_data(VIDEO_DEC_VP9_MMU
, fw
->data
) < 0) {
9268 pr_err("get firmware fail.\n");
9275 INIT_WORK(&pbi
->set_clk_work
, vp9_set_clk
);
9276 init_timer(&pbi
->timer
);
9278 #ifdef MULTI_INSTANCE_SUPPORT
9279 if (pbi
->m_ins_flag
) {
9280 pbi
->timer
.data
= (ulong
) pbi
;
9281 pbi
->timer
.function
= vvp9_put_timer_func
;
9282 pbi
->timer
.expires
= jiffies
+ PUT_INTERVAL
;
9284 /*add_timer(&pbi->timer);
9286 pbi->stat |= STAT_TIMER_ARM;
9287 pbi->stat |= STAT_ISR_REG;*/
9289 INIT_WORK(&pbi
->work
, vp9_work
);
9290 INIT_WORK(&pbi
->recycle_mmu_work
, vp9_recycle_mmu_work
);
9291 #ifdef SUPPORT_FB_DECODING
9292 if (pbi
->used_stage_buf_num
> 0)
9293 INIT_WORK(&pbi
->s1_work
, vp9_s1_work
);
9297 /* picture list init.*/
9298 pbi
->dec_result
= DEC_INIT_PICLIST
;
9299 vdec_schedule_work(&pbi
->work
);
9308 ret
= amhevc_loadmc_ex(VFORMAT_VP9
, NULL
, fw
->data
);
9312 pr_err("VP9: the %s fw loading failed, err: %x\n",
9313 tee_enabled() ? "TEE" : "local", ret
);
9319 pbi
->stat
|= STAT_MC_LOAD
;
9321 /* enable AMRISC side protocol */
9322 vvp9_prot_init(pbi
, HW_MASK_FRONT
| HW_MASK_BACK
);
9324 if (vdec_request_threaded_irq(VDEC_IRQ_0
,
9327 IRQF_ONESHOT
,/*run thread on this irq disabled*/
9328 "vvp9-irq", (void *)pbi
)) {
9329 pr_info("vvp9 irq register error.\n");
9334 pbi
->stat
|= STAT_ISR_REG
;
9336 pbi
->provider_name
= PROVIDER_NAME
;
9337 #ifdef MULTI_INSTANCE_SUPPORT
9338 vf_provider_init(&vvp9_vf_prov
, PROVIDER_NAME
,
9339 &vvp9_vf_provider
, pbi
);
9340 vf_reg_provider(&vvp9_vf_prov
);
9341 vf_notify_receiver(PROVIDER_NAME
, VFRAME_EVENT_PROVIDER_START
, NULL
);
9342 if (pbi
->frame_dur
!= 0) {
9344 vf_notify_receiver(pbi
->provider_name
,
9345 VFRAME_EVENT_PROVIDER_FR_HINT
,
9347 ((unsigned long)pbi
->frame_dur
));
9350 vf_provider_init(&vvp9_vf_prov
, PROVIDER_NAME
, &vvp9_vf_provider
,
9352 vf_reg_provider(&vvp9_vf_prov
);
9353 vf_notify_receiver(PROVIDER_NAME
, VFRAME_EVENT_PROVIDER_START
, NULL
);
9355 vf_notify_receiver(PROVIDER_NAME
, VFRAME_EVENT_PROVIDER_FR_HINT
,
9356 (void *)((unsigned long)pbi
->frame_dur
));
9358 pbi
->stat
|= STAT_VF_HOOK
;
9360 pbi
->timer
.data
= (ulong
)pbi
;
9361 pbi
->timer
.function
= vvp9_put_timer_func
;
9362 pbi
->timer
.expires
= jiffies
+ PUT_INTERVAL
;
9364 pbi
->stat
|= STAT_VDEC_RUN
;
9366 add_timer(&pbi
->timer
);
9368 pbi
->stat
|= STAT_TIMER_ARM
;
9373 pbi
->process_busy
= 0;
9374 pr_info("%d, vvp9_init, RP=0x%x\n",
9375 __LINE__
, READ_VREG(HEVC_STREAM_RD_PTR
));
9379 static int vmvp9_stop(struct VP9Decoder_s
*pbi
)
9383 if (pbi
->stat
& STAT_VDEC_RUN
) {
9385 pbi
->stat
&= ~STAT_VDEC_RUN
;
9387 if (pbi
->stat
& STAT_ISR_REG
) {
9388 vdec_free_irq(VDEC_IRQ_0
, (void *)pbi
);
9389 pbi
->stat
&= ~STAT_ISR_REG
;
9391 if (pbi
->stat
& STAT_TIMER_ARM
) {
9392 del_timer_sync(&pbi
->timer
);
9393 pbi
->stat
&= ~STAT_TIMER_ARM
;
9396 if (pbi
->stat
& STAT_VF_HOOK
) {
9398 vf_notify_receiver(pbi
->provider_name
,
9399 VFRAME_EVENT_PROVIDER_FR_END_HINT
,
9402 vf_unreg_provider(&vvp9_vf_prov
);
9403 pbi
->stat
&= ~STAT_VF_HOOK
;
9405 vp9_local_uninit(pbi
);
9406 reset_process_time(pbi
);
9407 cancel_work_sync(&pbi
->work
);
9408 cancel_work_sync(&pbi
->recycle_mmu_work
);
9409 #ifdef SUPPORT_FB_DECODING
9410 if (pbi
->used_stage_buf_num
> 0)
9411 cancel_work_sync(&pbi
->s1_work
);
9413 cancel_work_sync(&pbi
->set_clk_work
);
9414 uninit_mmu_buffers(pbi
);
9421 static int vvp9_stop(struct VP9Decoder_s
*pbi
)
9425 pbi
->first_sc_checked
= 0;
9426 if (pbi
->stat
& STAT_VDEC_RUN
) {
9428 pbi
->stat
&= ~STAT_VDEC_RUN
;
9431 if (pbi
->stat
& STAT_ISR_REG
) {
9432 #ifdef MULTI_INSTANCE_SUPPORT
9433 if (!pbi
->m_ins_flag
)
9435 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK
, 0);
9436 vdec_free_irq(VDEC_IRQ_0
, (void *)pbi
);
9437 pbi
->stat
&= ~STAT_ISR_REG
;
9440 if (pbi
->stat
& STAT_TIMER_ARM
) {
9441 del_timer_sync(&pbi
->timer
);
9442 pbi
->stat
&= ~STAT_TIMER_ARM
;
9445 if (pbi
->stat
& STAT_VF_HOOK
) {
9447 vf_notify_receiver(pbi
->provider_name
,
9448 VFRAME_EVENT_PROVIDER_FR_END_HINT
,
9451 vf_unreg_provider(&vvp9_vf_prov
);
9452 pbi
->stat
&= ~STAT_VF_HOOK
;
9454 vp9_local_uninit(pbi
);
9456 cancel_work_sync(&pbi
->set_clk_work
);
9457 #ifdef MULTI_INSTANCE_SUPPORT
9458 if (pbi
->m_ins_flag
) {
9459 #ifdef SUPPORT_FB_DECODING
9460 if (pbi
->used_stage_buf_num
> 0)
9461 cancel_work_sync(&pbi
->s1_work
);
9463 cancel_work_sync(&pbi
->work
);
9464 cancel_work_sync(&pbi
->recycle_mmu_work
);
9470 uninit_mmu_buffers(pbi
);
9476 static int amvdec_vp9_mmu_init(struct VP9Decoder_s
*pbi
)
9478 int tvp_flag
= vdec_secure(hw_to_vdec(pbi
)) ?
9479 CODEC_MM_FLAGS_TVP
: 0;
9482 if ((pbi
->max_pic_w
* pbi
->max_pic_h
> 1280*736) &&
9483 (pbi
->max_pic_w
* pbi
->max_pic_h
<= 1920*1088)) {
9485 } else if ((pbi
->max_pic_w
* pbi
->max_pic_h
> 0) &&
9486 (pbi
->max_pic_w
* pbi
->max_pic_h
<= 1280*736)) {
9489 pbi
->need_cache_size
= buf_size
* SZ_1M
;
9490 pbi
->sc_start_time
= get_jiffies_64();
9491 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
9492 pbi
->mmu_box
= decoder_mmu_box_alloc_box(DRIVER_NAME
,
9493 pbi
->index
, FRAME_BUFFERS
,
9494 pbi
->need_cache_size
,
9497 if (!pbi
->mmu_box
) {
9498 pr_err("vp9 alloc mmu box failed!!\n");
9502 pbi
->bmmu_box
= decoder_bmmu_box_alloc_box(
9505 MAX_BMMU_BUFFER_NUM
,
9507 CODEC_MM_FLAGS_CMA_CLEAR
|
9508 CODEC_MM_FLAGS_FOR_VDECODER
|
9510 if (!pbi
->bmmu_box
) {
9511 pr_err("vp9 alloc bmmu box failed!!\n");
9517 static struct VP9Decoder_s
*gHevc
;
9519 static int amvdec_vp9_probe(struct platform_device
*pdev
)
9521 struct vdec_s
*pdata
= *(struct vdec_s
**)pdev
->dev
.platform_data
;
9522 struct BUF_s BUF
[MAX_BUF_NUM
];
9523 struct VP9Decoder_s
*pbi
;
9525 #ifndef MULTI_INSTANCE_SUPPORT
9528 pr_debug("%s\n", __func__
);
9530 mutex_lock(&vvp9_mutex
);
9531 pbi
= vmalloc(sizeof(struct VP9Decoder_s
));
9533 pr_info("\namvdec_vp9 device data allocation failed\n");
9534 mutex_unlock(&vvp9_mutex
);
9539 memcpy(&BUF
[0], &pbi
->m_BUF
[0], sizeof(struct BUF_s
) * MAX_BUF_NUM
);
9540 memset(pbi
, 0, sizeof(struct VP9Decoder_s
));
9541 memcpy(&pbi
->m_BUF
[0], &BUF
[0], sizeof(struct BUF_s
) * MAX_BUF_NUM
);
9544 pbi
->first_sc_checked
= 0;
9545 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
9546 vp9_max_pic_w
= 8192;
9547 vp9_max_pic_h
= 4608;
9549 pbi
->max_pic_w
= vp9_max_pic_w
;
9550 pbi
->max_pic_h
= vp9_max_pic_h
;
9552 #ifdef MULTI_INSTANCE_SUPPORT
9554 pbi
->start_process_time
= 0;
9555 pbi
->timeout_num
= 0;
9557 pbi
->fatal_error
= 0;
9558 pbi
->show_frame_num
= 0;
9559 if (pdata
== NULL
) {
9560 pr_info("\namvdec_vp9 memory resource undefined.\n");
9562 mutex_unlock(&vvp9_mutex
);
9565 pbi
->m_ins_flag
= 0;
9566 #ifdef MULTI_INSTANCE_SUPPORT
9567 pbi
->platform_dev
= pdev
;
9568 platform_set_drvdata(pdev
, pdata
);
9570 pbi
->double_write_mode
= double_write_mode
;
9571 pbi
->mmu_enable
= 1;
9572 if (amvdec_vp9_mmu_init(pbi
) < 0) {
9574 mutex_unlock(&vvp9_mutex
);
9575 pr_err("vp9 alloc bmmu box failed!!\n");
9579 ret
= decoder_bmmu_box_alloc_buf_phy(pbi
->bmmu_box
, WORK_SPACE_BUF_ID
,
9580 work_buf_size
, DRIVER_NAME
, &pdata
->mem_start
);
9582 uninit_mmu_buffers(pbi
);
9584 mutex_unlock(&vvp9_mutex
);
9587 pbi
->buf_size
= work_buf_size
;
9589 #ifdef MULTI_INSTANCE_SUPPORT
9590 pbi
->buf_start
= pdata
->mem_start
;
9592 if (!pbi
->mmu_enable
)
9593 pbi
->mc_buf_spec
.buf_end
= pdata
->mem_start
+ pbi
->buf_size
;
9595 for (i
= 0; i
< WORK_BUF_SPEC_NUM
; i
++)
9596 amvvp9_workbuff_spec
[i
].start_adr
= pdata
->mem_start
;
9601 pr_info("===VP9 decoder mem resource 0x%lx size 0x%x\n",
9602 pdata
->mem_start
, pbi
->buf_size
);
9605 if (pdata
->sys_info
)
9606 pbi
->vvp9_amstream_dec_info
= *pdata
->sys_info
;
9608 pbi
->vvp9_amstream_dec_info
.width
= 0;
9609 pbi
->vvp9_amstream_dec_info
.height
= 0;
9610 pbi
->vvp9_amstream_dec_info
.rate
= 30;
9612 pbi
->no_head
= no_head
;
9613 #ifdef MULTI_INSTANCE_SUPPORT
9614 pbi
->cma_dev
= pdata
->cma_dev
;
9616 cma_dev
= pdata
->cma_dev
;
9619 #ifdef MULTI_INSTANCE_SUPPORT
9620 pdata
->private = pbi
;
9621 pdata
->dec_status
= vvp9_dec_status
;
9622 pdata
->set_isreset
= vvp9_set_isreset
;
9624 if (vvp9_init(pdata
) < 0) {
9626 if (vvp9_init(pbi
) < 0) {
9628 pr_info("\namvdec_vp9 init failed.\n");
9629 vp9_local_uninit(pbi
);
9630 uninit_mmu_buffers(pbi
);
9632 pdata
->dec_status
= NULL
;
9633 mutex_unlock(&vvp9_mutex
);
9636 /*set the max clk for smooth playing...*/
9637 hevc_source_changed(VFORMAT_VP9
,
9639 mutex_unlock(&vvp9_mutex
);
9644 static void vdec_fence_release(struct VP9Decoder_s
*pbi
,
9645 struct vdec_sync
*sync
)
9650 /* notify signal to wake up all fences. */
9651 vdec_timeline_increase(sync
, VF_POOL_SIZE
);
9653 expires
= jiffies
+ msecs_to_jiffies(2000);
9654 while (!check_objs_all_signaled(sync
)) {
9655 if (time_after(jiffies
, expires
)) {
9656 pr_err("wait fence signaled timeout.\n");
9661 for (i
= 0; i
< VF_POOL_SIZE
; i
++) {
9662 struct vframe_s
*vf
= &pbi
->vfpool
[i
];
9665 vdec_fence_put(vf
->fence
);
9670 /* decreases refcnt of timeline. */
9671 vdec_timeline_put(sync
);
9674 static int amvdec_vp9_remove(struct platform_device
*pdev
)
9676 struct VP9Decoder_s
*pbi
= gHevc
;
9677 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
9681 pr_info("amvdec_vp9_remove\n");
9683 mutex_lock(&vvp9_mutex
);
9687 hevc_source_changed(VFORMAT_VP9
, 0, 0, 0);
9689 if (vdec
->parallel_dec
== 1) {
9690 for (i
= 0; i
< FRAME_BUFFERS
; i
++) {
9691 vdec
->free_canvas_ex(pbi
->common
.buffer_pool
->
9692 frame_bufs
[i
].buf
.y_canvas_index
, vdec
->id
);
9693 vdec
->free_canvas_ex(pbi
->common
.buffer_pool
->
9694 frame_bufs
[i
].buf
.uv_canvas_index
, vdec
->id
);
9699 pr_info("pts missed %ld, pts hit %ld, duration %d\n",
9700 pbi
->pts_missed
, pbi
->pts_hit
, pbi
->frame_dur
);
9704 if (pbi
->enable_fence
)
9705 vdec_fence_release(pbi
, &vdec
->sync
);
9708 mutex_unlock(&vvp9_mutex
);
9713 /****************************************/
9715 static int vp9_suspend(struct device
*dev
)
9717 amhevc_suspend(to_platform_device(dev
), dev
->power
.power_state
);
9721 static int vp9_resume(struct device
*dev
)
9723 amhevc_resume(to_platform_device(dev
));
9727 static const struct dev_pm_ops vp9_pm_ops
= {
9728 SET_SYSTEM_SLEEP_PM_OPS(vp9_suspend
, vp9_resume
)
9732 static struct platform_driver amvdec_vp9_driver
= {
9733 .probe
= amvdec_vp9_probe
,
9734 .remove
= amvdec_vp9_remove
,
9736 .name
= DRIVER_NAME
,
9743 static struct codec_profile_t amvdec_vp9_profile
= {
9748 static struct codec_profile_t amvdec_vp9_profile_mult
;
9750 static unsigned char get_data_check_sum
9751 (struct VP9Decoder_s
*pbi
, int size
)
9757 if (!pbi
->chunk
->block
->is_mapped
)
9758 data
= codec_mm_vmap(pbi
->chunk
->block
->start
+
9759 pbi
->chunk
->offset
, size
);
9761 data
= ((u8
*)pbi
->chunk
->block
->start_virt
) +
9764 for (jj
= 0; jj
< size
; jj
++)
9767 if (!pbi
->chunk
->block
->is_mapped
)
9768 codec_mm_unmap_phyaddr(data
);
9772 static void dump_data(struct VP9Decoder_s
*pbi
, int size
)
9776 int padding_size
= pbi
->chunk
->offset
&
9777 (VDEC_FIFO_ALIGN
- 1);
9779 if (!pbi
->chunk
->block
->is_mapped
)
9780 data
= codec_mm_vmap(pbi
->chunk
->block
->start
+
9781 pbi
->chunk
->offset
, size
);
9783 data
= ((u8
*)pbi
->chunk
->block
->start_virt
) +
9786 vp9_print(pbi
, 0, "padding: ");
9787 for (jj
= padding_size
; jj
> 0; jj
--)
9790 "%02x ", *(data
- jj
));
9791 vp9_print_cont(pbi
, 0, "data adr %p\n",
9794 for (jj
= 0; jj
< size
; jj
++) {
9795 if ((jj
& 0xf) == 0)
9802 if (((jj
+ 1) & 0xf) == 0)
9811 if (!pbi
->chunk
->block
->is_mapped
)
9812 codec_mm_unmap_phyaddr(data
);
9815 static void vp9_work(struct work_struct
*work
)
9817 struct VP9Decoder_s
*pbi
= container_of(work
,
9818 struct VP9Decoder_s
, work
);
9819 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
9820 /* finished decoding one frame or error,
9821 * notify vdec core to switch context
9823 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
9824 "%s dec_result %d %x %x %x\n",
9827 READ_VREG(HEVC_STREAM_LEVEL
),
9828 READ_VREG(HEVC_STREAM_WR_PTR
),
9829 READ_VREG(HEVC_STREAM_RD_PTR
));
9831 if (pbi
->dec_result
== DEC_INIT_PICLIST
) {
9833 pbi
->pic_list_init_done
= true;
9837 if (pbi
->dec_result
== DEC_RESULT_NEED_MORE_BUFFER
) {
9838 reset_process_time(pbi
);
9839 if (!get_free_buf_count(pbi
)) {
9840 pbi
->dec_result
= DEC_RESULT_NEED_MORE_BUFFER
;
9841 vdec_schedule_work(&pbi
->work
);
9845 if (pbi
->mmu_enable
)
9846 vp9_recycle_mmu_buf_tail(pbi
);
9848 if (pbi
->frame_count
> 0)
9849 vp9_bufmgr_postproc(pbi
);
9851 for (i
= 0; i
< (RPM_END
- RPM_BEGIN
); i
+= 4) {
9853 for (ii
= 0; ii
< 4; ii
++)
9854 pbi
->vp9_param
.l
.data
[i
+ ii
] =
9855 pbi
->rpm_ptr
[i
+ 3 - ii
];
9857 continue_decoding(pbi
);
9858 pbi
->postproc_done
= 0;
9859 pbi
->process_busy
= 0;
9861 start_process_time(pbi
);
9866 if (((pbi
->dec_result
== DEC_RESULT_GET_DATA
) ||
9867 (pbi
->dec_result
== DEC_RESULT_GET_DATA_RETRY
))
9868 && (hw_to_vdec(pbi
)->next_status
!=
9869 VDEC_STATUS_DISCONNECTED
)) {
9870 if (!vdec_has_more_input(vdec
)) {
9871 pbi
->dec_result
= DEC_RESULT_EOS
;
9872 vdec_schedule_work(&pbi
->work
);
9876 if (pbi
->dec_result
== DEC_RESULT_GET_DATA
) {
9877 vp9_print(pbi
, PRINT_FLAG_VDEC_STATUS
,
9878 "%s DEC_RESULT_GET_DATA %x %x %x\n",
9880 READ_VREG(HEVC_STREAM_LEVEL
),
9881 READ_VREG(HEVC_STREAM_WR_PTR
),
9882 READ_VREG(HEVC_STREAM_RD_PTR
));
9883 vdec_vframe_dirty(vdec
, pbi
->chunk
);
9884 vdec_clean_input(vdec
);
9887 if (get_free_buf_count(pbi
) >=
9888 pbi
->run_ready_min_buf_num
) {
9891 r
= vdec_prepare_input(vdec
, &pbi
->chunk
);
9893 pbi
->dec_result
= DEC_RESULT_GET_DATA_RETRY
;
9896 PRINT_FLAG_VDEC_DETAIL
,
9897 "amvdec_vh265: Insufficient data\n");
9899 vdec_schedule_work(&pbi
->work
);
9902 pbi
->dec_result
= DEC_RESULT_NONE
;
9903 vp9_print(pbi
, PRINT_FLAG_VDEC_STATUS
,
9904 "%s: chunk size 0x%x sum 0x%x\n",
9906 (debug
& PRINT_FLAG_VDEC_STATUS
) ?
9907 get_data_check_sum(pbi
, r
) : 0
9910 if (debug
& PRINT_FLAG_VDEC_DATA
)
9911 dump_data(pbi
, pbi
->chunk
->size
);
9913 decode_size
= pbi
->chunk
->size
+
9914 (pbi
->chunk
->offset
& (VDEC_FIFO_ALIGN
- 1));
9916 WRITE_VREG(HEVC_DECODE_SIZE
,
9917 READ_VREG(HEVC_DECODE_SIZE
) + decode_size
);
9919 vdec_enable_input(vdec
);
9921 WRITE_VREG(HEVC_DEC_STATUS_REG
, HEVC_ACTION_DONE
);
9923 start_process_time(pbi
);
9926 pbi
->dec_result
= DEC_RESULT_GET_DATA_RETRY
;
9928 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
9929 "amvdec_vh265: Insufficient data\n");
9931 vdec_schedule_work(&pbi
->work
);
9934 } else if (pbi
->dec_result
== DEC_RESULT_DONE
) {
9935 #ifdef SUPPORT_FB_DECODING
9936 if (pbi
->used_stage_buf_num
> 0) {
9937 #ifndef FB_DECODING_TEST_SCHEDULE
9938 if (!is_s2_decoding_finished(pbi
)) {
9939 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
9940 "s2 decoding not done, check again later\n");
9941 vdec_schedule_work(&pbi
->work
);
9945 if (mcrcc_cache_alg_flag
)
9949 /* if (!pbi->ctx_valid)
9950 pbi->ctx_valid = 1; */
9953 pbi
->process_state
= PROC_STATE_INIT
;
9954 decode_frame_count
[pbi
->index
] = pbi
->frame_count
;
9956 if (pbi
->mmu_enable
)
9958 (READ_VREG(HEVC_SAO_MMU_STATUS
) >> 16);
9959 vp9_print(pbi
, PRINT_FLAG_VDEC_STATUS
,
9960 "%s (===> %d) dec_result %d %x %x %x shiftbytes 0x%x decbytes 0x%x\n",
9964 READ_VREG(HEVC_STREAM_LEVEL
),
9965 READ_VREG(HEVC_STREAM_WR_PTR
),
9966 READ_VREG(HEVC_STREAM_RD_PTR
),
9967 READ_VREG(HEVC_SHIFT_BYTE_COUNT
),
9968 READ_VREG(HEVC_SHIFT_BYTE_COUNT
) -
9969 pbi
->start_shift_bytes
9971 vdec_vframe_dirty(hw_to_vdec(pbi
), pbi
->chunk
);
9972 } else if (pbi
->dec_result
== DEC_RESULT_AGAIN
) {
9974 stream base: stream buf empty or timeout
9975 frame base: vdec_prepare_input fail
9977 if (!vdec_has_more_input(vdec
)) {
9978 pbi
->dec_result
= DEC_RESULT_EOS
;
9979 vdec_schedule_work(&pbi
->work
);
9982 } else if (pbi
->dec_result
== DEC_RESULT_EOS
) {
9983 vp9_print(pbi
, PRINT_FLAG_VDEC_STATUS
,
9984 "%s: end of stream\n",
9987 vp9_bufmgr_postproc(pbi
);
9989 notify_v4l_eos(hw_to_vdec(pbi
));
9991 vdec_vframe_dirty(hw_to_vdec(pbi
), pbi
->chunk
);
9992 } else if (pbi
->dec_result
== DEC_RESULT_FORCE_EXIT
) {
9993 vp9_print(pbi
, PRINT_FLAG_VDEC_STATUS
,
9996 if (pbi
->stat
& STAT_VDEC_RUN
) {
9998 pbi
->stat
&= ~STAT_VDEC_RUN
;
10001 if (pbi
->stat
& STAT_ISR_REG
) {
10002 #ifdef MULTI_INSTANCE_SUPPORT
10003 if (!pbi
->m_ins_flag
)
10005 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK
, 0);
10006 vdec_free_irq(VDEC_IRQ_0
, (void *)pbi
);
10007 pbi
->stat
&= ~STAT_ISR_REG
;
10010 if (pbi
->stat
& STAT_VDEC_RUN
) {
10012 pbi
->stat
&= ~STAT_VDEC_RUN
;
10015 if (pbi
->stat
& STAT_TIMER_ARM
) {
10016 del_timer_sync(&pbi
->timer
);
10017 pbi
->stat
&= ~STAT_TIMER_ARM
;
10019 /* mark itself has all HW resource released and input released */
10020 #ifdef SUPPORT_FB_DECODING
10021 if (pbi
->used_stage_buf_num
> 0)
10022 vdec_core_finish_run(hw_to_vdec(pbi
), CORE_MASK_HEVC_BACK
);
10024 vdec_core_finish_run(hw_to_vdec(pbi
), CORE_MASK_VDEC_1
10026 | CORE_MASK_HEVC_FRONT
10027 | CORE_MASK_HEVC_BACK
10030 if (vdec
->parallel_dec
== 1)
10031 vdec_core_finish_run(vdec
, CORE_MASK_HEVC
);
10033 vdec_core_finish_run(hw_to_vdec(pbi
), CORE_MASK_VDEC_1
10036 trigger_schedule(pbi
);
10039 static int vp9_hw_ctx_restore(struct VP9Decoder_s
*pbi
)
10041 /* new to do ... */
10042 #if (!defined SUPPORT_FB_DECODING)
10043 vvp9_prot_init(pbi
, HW_MASK_FRONT
| HW_MASK_BACK
);
10044 #elif (defined FB_DECODING_TEST_SCHEDULE)
10045 vvp9_prot_init(pbi
, HW_MASK_FRONT
| HW_MASK_BACK
);
10047 if (pbi
->used_stage_buf_num
> 0)
10048 vvp9_prot_init(pbi
, HW_MASK_FRONT
);
10050 vvp9_prot_init(pbi
, HW_MASK_FRONT
| HW_MASK_BACK
);
10054 static unsigned long run_ready(struct vdec_s
*vdec
, unsigned long mask
)
10056 struct VP9Decoder_s
*pbi
=
10057 (struct VP9Decoder_s
*)vdec
->private;
10058 int tvp
= vdec_secure(hw_to_vdec(pbi
)) ?
10059 CODEC_MM_FLAGS_TVP
: 0;
10060 unsigned long ret
= 0;
10062 if (!(pbi
->pic_list_init_done
&& pbi
->pic_list_init_done2
) || pbi
->eos
)
10064 if (!pbi
->first_sc_checked
&& pbi
->mmu_enable
) {
10065 int size
= decoder_mmu_box_sc_check(pbi
->mmu_box
, tvp
);
10066 pbi
->first_sc_checked
= 1;
10067 vp9_print(pbi
, 0, "vp9 cached=%d need_size=%d speed= %d ms\n",
10068 size
, (pbi
->need_cache_size
>> PAGE_SHIFT
),
10069 (int)(get_jiffies_64() - pbi
->sc_start_time
) * 1000/HZ
);
10072 #ifdef SUPPORT_FB_DECODING
10073 if (pbi
->used_stage_buf_num
> 0) {
10074 if (mask
& CORE_MASK_HEVC_FRONT
) {
10075 if (get_free_stage_buf_num(pbi
) > 0
10076 && mv_buf_available(pbi
))
10077 ret
|= CORE_MASK_HEVC_FRONT
;
10079 if (mask
& CORE_MASK_HEVC_BACK
) {
10080 if (s2_buf_available(pbi
) &&
10081 (get_free_buf_count(pbi
) >=
10082 pbi
->run_ready_min_buf_num
)) {
10083 ret
|= CORE_MASK_HEVC_BACK
;
10084 pbi
->back_not_run_ready
= 0;
10086 pbi
->back_not_run_ready
= 1;
10088 if (get_free_buf_count(pbi
) <
10089 run_ready_min_buf_num
)
10090 dump_pic_list(pbi
);
10093 } else if (get_free_buf_count(pbi
) >=
10094 pbi
->run_ready_min_buf_num
)
10095 ret
= CORE_MASK_VDEC_1
| CORE_MASK_HEVC
10096 | CORE_MASK_HEVC_FRONT
10097 | CORE_MASK_HEVC_BACK
;
10099 if (ret
& CORE_MASK_HEVC_FRONT
)
10100 not_run_ready
[pbi
->index
] = 0;
10102 not_run_ready
[pbi
->index
]++;
10104 if (ret
& CORE_MASK_HEVC_BACK
)
10105 not_run2_ready
[pbi
->index
] = 0;
10107 not_run2_ready
[pbi
->index
]++;
10110 PRINT_FLAG_VDEC_DETAIL
, "%s mask %lx=>%lx (%d %d %d %d)\r\n",
10111 __func__
, mask
, ret
,
10112 get_free_stage_buf_num(pbi
),
10113 mv_buf_available(pbi
),
10114 s2_buf_available(pbi
),
10115 get_free_buf_count(pbi
)
10121 if (get_free_buf_count(pbi
) >=
10122 pbi
->run_ready_min_buf_num
) {
10123 if (vdec
->parallel_dec
== 1)
10124 ret
= CORE_MASK_HEVC
;
10126 ret
= CORE_MASK_VDEC_1
| CORE_MASK_HEVC
;
10129 if (pbi
->is_used_v4l
) {
10130 struct aml_vcodec_ctx
*ctx
=
10131 (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
10133 if (ctx
->param_sets_from_ucode
) {
10134 if (pbi
->v4l_params_parsed
) {
10135 if ((ctx
->cap_pool
.in
< pbi
->used_buf_num
) &&
10136 v4l2_m2m_num_dst_bufs_ready(ctx
->m2m_ctx
) <
10137 pbi
->run_ready_min_buf_num
)
10140 if ((pbi
->res_ch_flag
== 1) &&
10141 ((ctx
->state
<= AML_STATE_INIT
) ||
10142 (ctx
->state
>= AML_STATE_FLUSHING
)))
10145 } else if (ctx
->cap_pool
.in
< ctx
->dpb_size
) {
10146 if (v4l2_m2m_num_dst_bufs_ready(ctx
->m2m_ctx
) <
10147 pbi
->run_ready_min_buf_num
)
10153 not_run_ready
[pbi
->index
] = 0;
10155 not_run_ready
[pbi
->index
]++;
10158 PRINT_FLAG_VDEC_DETAIL
, "%s mask %lx=>%lx\r\n",
10159 __func__
, mask
, ret
);
10164 static void vp9_frame_mode_pts_save(struct VP9Decoder_s
*pbi
)
10168 if (pbi
->chunk
== NULL
)
10170 vp9_print(pbi
, VP9_DEBUG_OUT_PTS
,
10171 "run front: pts %d, pts64 %lld\n", pbi
->chunk
->pts
, pbi
->chunk
->pts64
);
10172 for (i
= (FRAME_BUFFERS
- 1); i
> 0; i
--) {
10173 pbi
->frame_mode_pts_save
[i
] = pbi
->frame_mode_pts_save
[i
- 1];
10174 pbi
->frame_mode_pts64_save
[i
] = pbi
->frame_mode_pts64_save
[i
- 1];
10176 pbi
->frame_mode_pts_save
[0] = pbi
->chunk
->pts
;
10177 pbi
->frame_mode_pts64_save
[0] = pbi
->chunk
->pts64
;
10180 static void run_front(struct vdec_s
*vdec
)
10182 struct VP9Decoder_s
*pbi
=
10183 (struct VP9Decoder_s
*)vdec
->private;
10186 run_count
[pbi
->index
]++;
10187 /* pbi->chunk = vdec_prepare_input(vdec); */
10188 #if (!defined SUPPORT_FB_DECODING)
10189 hevc_reset_core(vdec
);
10190 #elif (defined FB_DECODING_TEST_SCHEDULE)
10191 hevc_reset_core(vdec
);
10193 if (pbi
->used_stage_buf_num
> 0)
10194 fb_reset_core(vdec
, HW_MASK_FRONT
);
10196 hevc_reset_core(vdec
);
10199 size
= vdec_prepare_input(vdec
, &pbi
->chunk
);
10201 input_empty
[pbi
->index
]++;
10203 pbi
->dec_result
= DEC_RESULT_AGAIN
;
10205 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
10206 "ammvdec_vh265: Insufficient data\n");
10208 vdec_schedule_work(&pbi
->work
);
10212 input_empty
[pbi
->index
] = 0;
10213 pbi
->dec_result
= DEC_RESULT_NONE
;
10214 pbi
->start_shift_bytes
= READ_VREG(HEVC_SHIFT_BYTE_COUNT
);
10216 vp9_frame_mode_pts_save(pbi
);
10218 if (debug
& PRINT_FLAG_VDEC_STATUS
) {
10221 "%s (%d): size 0x%x (0x%x 0x%x) sum 0x%x (%x %x %x %x %x) bytes 0x%x",
10223 pbi
->frame_count
, size
,
10224 pbi
->chunk
? pbi
->chunk
->size
: 0,
10225 pbi
->chunk
? pbi
->chunk
->offset
: 0,
10226 pbi
->chunk
? ((vdec_frame_based(vdec
) &&
10227 (debug
& PRINT_FLAG_VDEC_STATUS
)) ?
10228 get_data_check_sum(pbi
, size
) : 0) : 0,
10229 READ_VREG(HEVC_STREAM_START_ADDR
),
10230 READ_VREG(HEVC_STREAM_END_ADDR
),
10231 READ_VREG(HEVC_STREAM_LEVEL
),
10232 READ_VREG(HEVC_STREAM_WR_PTR
),
10233 READ_VREG(HEVC_STREAM_RD_PTR
),
10234 pbi
->start_shift_bytes
);
10235 if (vdec_frame_based(vdec
) && pbi
->chunk
) {
10238 if (!pbi
->chunk
->block
->is_mapped
)
10239 data
= codec_mm_vmap(pbi
->chunk
->block
->start
+
10240 pbi
->chunk
->offset
, 8);
10242 data
= ((u8
*)pbi
->chunk
->block
->start_virt
) +
10243 pbi
->chunk
->offset
;
10245 vp9_print_cont(pbi
, 0, "data adr %p:",
10247 for (ii
= 0; ii
< 8; ii
++)
10248 vp9_print_cont(pbi
, 0, "%02x ",
10251 if (!pbi
->chunk
->block
->is_mapped
)
10252 codec_mm_unmap_phyaddr(data
);
10254 vp9_print_cont(pbi
, 0, "\r\n");
10256 if (vdec
->mc_loaded
) {
10257 /*firmware have load before,
10258 and not changes to another.
10262 ret
= amhevc_loadmc_ex(VFORMAT_VP9
, NULL
, pbi
->fw
->data
);
10265 vp9_print(pbi
, PRINT_FLAG_ERROR
,
10266 "VP9: the %s fw loading failed, err: %x\n",
10267 tee_enabled() ? "TEE" : "local", ret
);
10268 pbi
->dec_result
= DEC_RESULT_FORCE_EXIT
;
10269 vdec_schedule_work(&pbi
->work
);
10272 vdec
->mc_loaded
= 1;
10273 vdec
->mc_type
= VFORMAT_VP9
;
10276 if (vp9_hw_ctx_restore(pbi
) < 0) {
10277 vdec_schedule_work(&pbi
->work
);
10281 vdec_enable_input(vdec
);
10283 WRITE_VREG(HEVC_DEC_STATUS_REG
, HEVC_ACTION_DONE
);
10285 if (vdec_frame_based(vdec
)) {
10286 if (debug
& PRINT_FLAG_VDEC_DATA
)
10287 dump_data(pbi
, pbi
->chunk
->size
);
10289 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT
, 0);
10290 size
= pbi
->chunk
->size
+
10291 (pbi
->chunk
->offset
& (VDEC_FIFO_ALIGN
- 1));
10293 vdec
->mvfrm
->frame_size
= pbi
->chunk
->size
;
10295 WRITE_VREG(HEVC_DECODE_SIZE
, size
);
10296 WRITE_VREG(HEVC_DECODE_COUNT
, pbi
->slice_idx
);
10297 pbi
->init_flag
= 1;
10299 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
10300 "%s: start hevc (%x %x %x)\n",
10302 READ_VREG(HEVC_DEC_STATUS_REG
),
10303 READ_VREG(HEVC_MPC_E
),
10304 READ_VREG(HEVC_MPSR
));
10306 start_process_time(pbi
);
10307 mod_timer(&pbi
->timer
, jiffies
);
10308 pbi
->stat
|= STAT_TIMER_ARM
;
10309 pbi
->stat
|= STAT_ISR_REG
;
10311 pbi
->stat
|= STAT_VDEC_RUN
;
10314 #ifdef SUPPORT_FB_DECODING
10315 static void mpred_process(struct VP9Decoder_s
*pbi
)
10317 union param_u
*params
= &pbi
->s1_param
;
10318 unsigned char use_prev_frame_mvs
=
10319 !params
->p
.error_resilient_mode
&&
10320 params
->p
.width
== pbi
->s1_width
&&
10321 params
->p
.height
== pbi
->s1_height
&&
10322 !pbi
->s1_intra_only
&&
10323 pbi
->s1_last_show_frame
&&
10324 (pbi
->s1_frame_type
!= KEY_FRAME
);
10325 pbi
->s1_width
= params
->p
.width
;
10326 pbi
->s1_height
= params
->p
.height
;
10327 pbi
->s1_frame_type
= params
->p
.frame_type
;
10328 pbi
->s1_intra_only
=
10329 (params
->p
.show_frame
||
10330 params
->p
.show_existing_frame
)
10331 ? 0 : params
->p
.intra_only
;
10332 if ((pbi
->s1_frame_type
!= KEY_FRAME
)
10333 && (!pbi
->s1_intra_only
)) {
10334 unsigned int data32
;
10335 int mpred_mv_rd_end_addr
;
10337 mpred_mv_rd_end_addr
=
10338 pbi
->s1_mpred_mv_wr_start_addr_pre
10339 + (pbi
->lcu_total
* MV_MEM_UNIT
);
10341 WRITE_VREG(HEVC_MPRED_CTRL3
, 0x24122412);
10342 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR
,
10343 pbi
->work_space_buf
->
10344 mpred_above
.buf_start
);
10346 data32
= READ_VREG(HEVC_MPRED_CTRL4
);
10348 data32
&= (~(1 << 6));
10349 data32
|= (use_prev_frame_mvs
<< 6);
10350 WRITE_VREG(HEVC_MPRED_CTRL4
, data32
);
10352 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR
,
10353 pbi
->s1_mpred_mv_wr_start_addr
);
10354 WRITE_VREG(HEVC_MPRED_MV_WPTR
,
10355 pbi
->s1_mpred_mv_wr_start_addr
);
10357 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR
,
10358 pbi
->s1_mpred_mv_wr_start_addr_pre
);
10359 WRITE_VREG(HEVC_MPRED_MV_RPTR
,
10360 pbi
->s1_mpred_mv_wr_start_addr_pre
);
10362 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR
,
10363 mpred_mv_rd_end_addr
);
10366 clear_mpred_hw(pbi
);
10368 if (!params
->p
.show_existing_frame
) {
10369 pbi
->s1_mpred_mv_wr_start_addr_pre
=
10370 pbi
->s1_mpred_mv_wr_start_addr
;
10371 pbi
->s1_last_show_frame
=
10372 params
->p
.show_frame
;
10373 if (pbi
->s1_mv_buf_index_pre_pre
!= MV_BUFFER_NUM
)
10374 put_mv_buf(pbi
, &pbi
->s1_mv_buf_index_pre_pre
);
10375 pbi
->s1_mv_buf_index_pre_pre
=
10376 pbi
->s1_mv_buf_index_pre
;
10377 pbi
->s1_mv_buf_index_pre
= pbi
->s1_mv_buf_index
;
10379 put_mv_buf(pbi
, &pbi
->s1_mv_buf_index
);
10382 static void vp9_s1_work(struct work_struct
*s1_work
)
10384 struct VP9Decoder_s
*pbi
= container_of(s1_work
,
10385 struct VP9Decoder_s
, s1_work
);
10386 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
10387 "%s dec_s1_result %d\n",
10389 pbi
->dec_s1_result
);
10391 #ifdef FB_DECODING_TEST_SCHEDULE
10392 if (pbi
->dec_s1_result
==
10393 DEC_S1_RESULT_TEST_TRIGGER_DONE
) {
10394 pbi
->s1_test_cmd
= TEST_SET_PIC_DONE
;
10395 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG
, 0x1);
10398 if (pbi
->dec_s1_result
== DEC_S1_RESULT_DONE
||
10399 pbi
->dec_s1_result
== DEC_S1_RESULT_FORCE_EXIT
) {
10401 vdec_core_finish_run(hw_to_vdec(pbi
),
10402 CORE_MASK_HEVC_FRONT
);
10404 trigger_schedule(pbi
);
10405 /*pbi->dec_s1_result = DEC_S1_RESULT_NONE;*/
10410 static void run_back(struct vdec_s
*vdec
)
10412 struct VP9Decoder_s
*pbi
=
10413 (struct VP9Decoder_s
*)vdec
->private;
10415 run2_count
[pbi
->index
]++;
10416 if (debug
& PRINT_FLAG_VDEC_STATUS
) {
10420 pbi
->run2_busy
= 1;
10421 #ifndef FB_DECODING_TEST_SCHEDULE
10422 fb_reset_core(vdec
, HW_MASK_BACK
);
10424 vvp9_prot_init(pbi
, HW_MASK_BACK
);
10426 vp9_recycle_mmu_buf_tail(pbi
);
10428 if (pbi
->frame_count
> 0)
10429 vp9_bufmgr_postproc(pbi
);
10431 if (get_s2_buf(pbi
) >= 0) {
10432 for (i
= 0; i
< (RPM_END
- RPM_BEGIN
); i
+= 4) {
10434 for (ii
= 0; ii
< 4; ii
++)
10435 pbi
->vp9_param
.l
.data
[i
+ ii
] =
10436 pbi
->s2_buf
->rpm
[i
+ 3 - ii
];
10438 #ifndef FB_DECODING_TEST_SCHEDULE
10439 WRITE_VREG(HEVC_ASSIST_FBD_MMU_MAP_ADDR
,
10440 pbi
->stage_mmu_map_phy_addr
+
10441 pbi
->s2_buf
->index
* STAGE_MMU_MAP_SIZE
);
10443 continue_decoding(pbi
);
10445 pbi
->run2_busy
= 0;
10449 static void run(struct vdec_s
*vdec
, unsigned long mask
,
10450 void (*callback
)(struct vdec_s
*, void *), void *arg
)
10452 struct VP9Decoder_s
*pbi
=
10453 (struct VP9Decoder_s
*)vdec
->private;
10456 PRINT_FLAG_VDEC_DETAIL
, "%s mask %lx\r\n",
10460 vdec
->mvfrm
->hw_decode_start
= local_clock();
10461 run_count
[pbi
->index
]++;
10462 pbi
->vdec_cb_arg
= arg
;
10463 pbi
->vdec_cb
= callback
;
10464 pbi
->one_package_frame_cnt
= 0;
10465 #ifdef SUPPORT_FB_DECODING
10466 if ((mask
& CORE_MASK_HEVC
) ||
10467 (mask
& CORE_MASK_HEVC_FRONT
))
10470 if ((pbi
->used_stage_buf_num
> 0)
10471 && (mask
& CORE_MASK_HEVC_BACK
))
10479 static void init_frame_bufs(struct VP9Decoder_s
*pbi
)
10481 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
10482 struct VP9_Common_s
*const cm
= &pbi
->common
;
10483 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
10486 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
10487 frame_bufs
[i
].ref_count
= 0;
10488 frame_bufs
[i
].buf
.vf_ref
= 0;
10489 frame_bufs
[i
].buf
.decode_idx
= 0;
10490 frame_bufs
[i
].buf
.cma_alloc_addr
= 0;
10491 frame_bufs
[i
].buf
.index
= i
;
10492 frame_bufs
[i
].buf
.vframe_bound
= 0;
10495 if (vdec
->parallel_dec
== 1) {
10496 for (i
= 0; i
< FRAME_BUFFERS
; i
++) {
10497 vdec
->free_canvas_ex
10498 (pbi
->common
.buffer_pool
->frame_bufs
[i
].buf
.y_canvas_index
,
10500 vdec
->free_canvas_ex
10501 (pbi
->common
.buffer_pool
->frame_bufs
[i
].buf
.uv_canvas_index
,
10507 static void reset(struct vdec_s
*vdec
)
10509 struct VP9Decoder_s
*pbi
=
10510 (struct VP9Decoder_s
*)vdec
->private;
10512 cancel_work_sync(&pbi
->work
);
10513 if (pbi
->stat
& STAT_VDEC_RUN
) {
10515 pbi
->stat
&= ~STAT_VDEC_RUN
;
10518 if (pbi
->stat
& STAT_TIMER_ARM
) {
10519 del_timer_sync(&pbi
->timer
);
10520 pbi
->stat
&= ~STAT_TIMER_ARM
;
10522 pbi
->dec_result
= DEC_RESULT_NONE
;
10523 reset_process_time(pbi
);
10524 vp9_local_uninit(pbi
);
10525 if (vvp9_local_init(pbi
) < 0)
10526 vp9_print(pbi
, 0, "%s local_init failed \r\n", __func__
);
10527 init_frame_bufs(pbi
);
10531 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
, "%s\r\n", __func__
);
10534 static irqreturn_t
vp9_irq_cb(struct vdec_s
*vdec
, int irq
)
10536 struct VP9Decoder_s
*pbi
=
10537 (struct VP9Decoder_s
*)vdec
->private;
10538 return vvp9_isr(0, pbi
);
10541 static irqreturn_t
vp9_threaded_irq_cb(struct vdec_s
*vdec
, int irq
)
10543 struct VP9Decoder_s
*pbi
=
10544 (struct VP9Decoder_s
*)vdec
->private;
10545 return vvp9_isr_thread_fn(0, pbi
);
10548 static void vp9_dump_state(struct vdec_s
*vdec
)
10550 struct VP9Decoder_s
*pbi
=
10551 (struct VP9Decoder_s
*)vdec
->private;
10552 struct VP9_Common_s
*const cm
= &pbi
->common
;
10554 vp9_print(pbi
, 0, "====== %s\n", __func__
);
10557 "width/height (%d/%d), used_buf_num %d\n",
10564 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d low_latency %d no_head %d \n",
10565 input_frame_based(vdec
),
10568 decode_frame_count
[pbi
->index
],
10569 display_frame_count
[pbi
->index
],
10570 run_count
[pbi
->index
],
10571 not_run_ready
[pbi
->index
],
10572 input_empty
[pbi
->index
],
10573 pbi
->low_latency_flag
,
10577 if (vf_get_receiver(vdec
->vf_provider_name
)) {
10578 enum receviver_start_e state
=
10579 vf_notify_receiver(vdec
->vf_provider_name
,
10580 VFRAME_EVENT_PROVIDER_QUREY_STATE
,
10583 "\nreceiver(%s) state %d\n",
10584 vdec
->vf_provider_name
,
10589 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), free_buf_count %d (min %d for run_ready)\n",
10591 kfifo_len(&pbi
->newframe_q
),
10593 kfifo_len(&pbi
->display_q
),
10598 get_free_buf_count(pbi
),
10599 pbi
->run_ready_min_buf_num
10602 dump_pic_list(pbi
);
10604 for (i
= 0; i
< MAX_BUF_NUM
; i
++) {
10606 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
10608 pbi
->m_mv_BUF
[i
].start_adr
,
10609 pbi
->m_mv_BUF
[i
].size
,
10610 pbi
->m_mv_BUF
[i
].used_flag
);
10614 "HEVC_DEC_STATUS_REG=0x%x\n",
10615 READ_VREG(HEVC_DEC_STATUS_REG
));
10617 "HEVC_MPC_E=0x%x\n",
10618 READ_VREG(HEVC_MPC_E
));
10620 "DECODE_MODE=0x%x\n",
10621 READ_VREG(DECODE_MODE
));
10623 "NAL_SEARCH_CTL=0x%x\n",
10624 READ_VREG(NAL_SEARCH_CTL
));
10626 "HEVC_PARSER_LCU_START=0x%x\n",
10627 READ_VREG(HEVC_PARSER_LCU_START
));
10629 "HEVC_DECODE_SIZE=0x%x\n",
10630 READ_VREG(HEVC_DECODE_SIZE
));
10632 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
10633 READ_VREG(HEVC_SHIFT_BYTE_COUNT
));
10635 "HEVC_STREAM_START_ADDR=0x%x\n",
10636 READ_VREG(HEVC_STREAM_START_ADDR
));
10638 "HEVC_STREAM_END_ADDR=0x%x\n",
10639 READ_VREG(HEVC_STREAM_END_ADDR
));
10641 "HEVC_STREAM_LEVEL=0x%x\n",
10642 READ_VREG(HEVC_STREAM_LEVEL
));
10644 "HEVC_STREAM_WR_PTR=0x%x\n",
10645 READ_VREG(HEVC_STREAM_WR_PTR
));
10647 "HEVC_STREAM_RD_PTR=0x%x\n",
10648 READ_VREG(HEVC_STREAM_RD_PTR
));
10650 "PARSER_VIDEO_RP=0x%x\n",
10651 STBUF_READ(&vdec
->vbuf
, get_rp
));
10653 "PARSER_VIDEO_WP=0x%x\n",
10654 STBUF_READ(&vdec
->vbuf
, get_wp
));
10656 if (input_frame_based(vdec
) &&
10657 (debug
& PRINT_FLAG_VDEC_DATA
)
10660 if (pbi
->chunk
&& pbi
->chunk
->block
&&
10661 pbi
->chunk
->size
> 0) {
10664 if (!pbi
->chunk
->block
->is_mapped
)
10665 data
= codec_mm_vmap(
10666 pbi
->chunk
->block
->start
+
10667 pbi
->chunk
->offset
,
10670 data
= ((u8
*)pbi
->chunk
->block
->start_virt
)
10671 + pbi
->chunk
->offset
;
10673 "frame data size 0x%x\n",
10675 for (jj
= 0; jj
< pbi
->chunk
->size
; jj
++) {
10676 if ((jj
& 0xf) == 0)
10679 vp9_print_cont(pbi
, 0,
10680 "%02x ", data
[jj
]);
10681 if (((jj
+ 1) & 0xf) == 0)
10682 vp9_print_cont(pbi
, 0,
10686 if (!pbi
->chunk
->block
->is_mapped
)
10687 codec_mm_unmap_phyaddr(data
);
10693 static int ammvdec_vp9_probe(struct platform_device
*pdev
)
10695 struct vdec_s
*pdata
= *(struct vdec_s
**)pdev
->dev
.platform_data
;
10698 struct vframe_content_light_level_s content_light_level
;
10699 struct vframe_master_display_colour_s vf_dp
;
10701 struct BUF_s BUF
[MAX_BUF_NUM
];
10702 struct VP9Decoder_s
*pbi
= NULL
;
10703 pr_debug("%s\n", __func__
);
10705 if (pdata
== NULL
) {
10706 pr_info("\nammvdec_vp9 memory resource undefined.\n");
10709 /*pbi = (struct VP9Decoder_s *)devm_kzalloc(&pdev->dev,
10710 sizeof(struct VP9Decoder_s), GFP_KERNEL);*/
10711 memset(&vf_dp
, 0, sizeof(struct vframe_master_display_colour_s
));
10712 pbi
= vmalloc(sizeof(struct VP9Decoder_s
));
10714 pr_info("\nammvdec_vp9 device data allocation failed\n");
10717 memset(pbi
, 0, sizeof(struct VP9Decoder_s
));
10719 /* the ctx from v4l2 driver. */
10720 pbi
->v4l2_ctx
= pdata
->private;
10722 pdata
->private = pbi
;
10723 pdata
->dec_status
= vvp9_dec_status
;
10724 /* pdata->set_trickmode = set_trickmode; */
10725 pdata
->run_ready
= run_ready
;
10727 pdata
->reset
= reset
;
10728 pdata
->irq_handler
= vp9_irq_cb
;
10729 pdata
->threaded_irq_handler
= vp9_threaded_irq_cb
;
10730 pdata
->dump_state
= vp9_dump_state
;
10732 memcpy(&BUF
[0], &pbi
->m_BUF
[0], sizeof(struct BUF_s
) * MAX_BUF_NUM
);
10733 memcpy(&pbi
->m_BUF
[0], &BUF
[0], sizeof(struct BUF_s
) * MAX_BUF_NUM
);
10735 pbi
->index
= pdev
->id
;
10737 if (pdata
->use_vfm_path
)
10738 snprintf(pdata
->vf_provider_name
, VDEC_PROVIDER_NAME_SIZE
,
10739 VFM_DEC_PROVIDER_NAME
);
10741 snprintf(pdata
->vf_provider_name
, VDEC_PROVIDER_NAME_SIZE
,
10742 MULTI_INSTANCE_PROVIDER_NAME
".%02x", pdev
->id
& 0xff);
10744 vf_provider_init(&pdata
->vframe_provider
, pdata
->vf_provider_name
,
10745 &vvp9_vf_provider
, pbi
);
10747 pbi
->provider_name
= pdata
->vf_provider_name
;
10748 platform_set_drvdata(pdev
, pdata
);
10750 pbi
->platform_dev
= pdev
;
10751 pbi
->video_signal_type
= 0;
10752 pbi
->m_ins_flag
= 1;
10753 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX
)
10754 pbi
->stat
|= VP9_TRIGGER_FRAME_ENABLE
;
10756 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
10757 pbi
->max_pic_w
= 8192;
10758 pbi
->max_pic_h
= 4608;
10760 pbi
->max_pic_w
= 4096;
10761 pbi
->max_pic_h
= 2304;
10764 if ((debug
& IGNORE_PARAM_FROM_CONFIG
) == 0 &&
10765 pdata
->config_len
) {
10766 #ifdef MULTI_INSTANCE_SUPPORT
10767 int vp9_buf_width
= 0;
10768 int vp9_buf_height
= 0;
10769 /*use ptr config for doubel_write_mode, etc*/
10770 vp9_print(pbi
, 0, "pdata->config=%s\n", pdata
->config
);
10771 if (get_config_int(pdata
->config
, "vp9_double_write_mode",
10773 pbi
->double_write_mode
= config_val
;
10775 pbi
->double_write_mode
= double_write_mode
;
10777 if (get_config_int(pdata
->config
, "save_buffer_mode",
10779 pbi
->save_buffer_mode
= config_val
;
10781 pbi
->save_buffer_mode
= 0;
10782 if (get_config_int(pdata
->config
, "vp9_buf_width",
10783 &config_val
) == 0) {
10784 vp9_buf_width
= config_val
;
10786 if (get_config_int(pdata
->config
, "vp9_buf_height",
10787 &config_val
) == 0) {
10788 vp9_buf_height
= config_val
;
10791 if (get_config_int(pdata
->config
, "no_head",
10793 pbi
->no_head
= config_val
;
10795 pbi
->no_head
= no_head
;
10797 /*use ptr config for max_pic_w, etc*/
10798 if (get_config_int(pdata
->config
, "vp9_max_pic_w",
10799 &config_val
) == 0) {
10800 pbi
->max_pic_w
= config_val
;
10802 if (get_config_int(pdata
->config
, "vp9_max_pic_h",
10803 &config_val
) == 0) {
10804 pbi
->max_pic_h
= config_val
;
10806 if ((pbi
->max_pic_w
* pbi
->max_pic_h
)
10807 < (vp9_buf_width
* vp9_buf_height
)) {
10808 pbi
->max_pic_w
= vp9_buf_width
;
10809 pbi
->max_pic_h
= vp9_buf_height
;
10810 vp9_print(pbi
, 0, "use buf resolution\n");
10813 if (get_config_int(pdata
->config
, "sidebind_type",
10815 pbi
->sidebind_type
= config_val
;
10817 if (get_config_int(pdata
->config
, "sidebind_channel_id",
10819 pbi
->sidebind_channel_id
= config_val
;
10821 if (get_config_int(pdata
->config
,
10822 "parm_v4l_codec_enable",
10824 pbi
->is_used_v4l
= config_val
;
10826 if (get_config_int(pdata
->config
,
10827 "parm_v4l_buffer_margin",
10829 pbi
->dynamic_buf_num_margin
= config_val
;
10831 if (get_config_int(pdata
->config
,
10832 "parm_v4l_canvas_mem_mode",
10834 pbi
->mem_map_mode
= config_val
;
10836 if (get_config_int(pdata
->config
,
10837 "parm_enable_fence",
10839 pbi
->enable_fence
= config_val
;
10841 if (get_config_int(pdata
->config
,
10842 "parm_fence_usage",
10844 pbi
->fence_usage
= config_val
;
10846 if (get_config_int(pdata
->config
, "HDRStaticInfo",
10847 &vf_dp
.present_flag
) == 0
10848 && vf_dp
.present_flag
== 1) {
10849 get_config_int(pdata
->config
, "mG.x",
10850 &vf_dp
.primaries
[0][0]);
10851 get_config_int(pdata
->config
, "mG.y",
10852 &vf_dp
.primaries
[0][1]);
10853 get_config_int(pdata
->config
, "mB.x",
10854 &vf_dp
.primaries
[1][0]);
10855 get_config_int(pdata
->config
, "mB.y",
10856 &vf_dp
.primaries
[1][1]);
10857 get_config_int(pdata
->config
, "mR.x",
10858 &vf_dp
.primaries
[2][0]);
10859 get_config_int(pdata
->config
, "mR.y",
10860 &vf_dp
.primaries
[2][1]);
10861 get_config_int(pdata
->config
, "mW.x",
10862 &vf_dp
.white_point
[0]);
10863 get_config_int(pdata
->config
, "mW.y",
10864 &vf_dp
.white_point
[1]);
10865 get_config_int(pdata
->config
, "mMaxDL",
10866 &vf_dp
.luminance
[0]);
10867 get_config_int(pdata
->config
, "mMinDL",
10868 &vf_dp
.luminance
[1]);
10869 vf_dp
.content_light_level
.present_flag
= 1;
10870 get_config_int(pdata
->config
, "mMaxCLL",
10871 &content_light_level
.max_content
);
10872 get_config_int(pdata
->config
, "mMaxFALL",
10873 &content_light_level
.max_pic_average
);
10874 vf_dp
.content_light_level
= content_light_level
;
10875 pbi
->video_signal_type
= (1 << 29)
10876 | (5 << 26) /* unspecified */
10877 | (0 << 25) /* limit */
10878 | (1 << 24) /* color available */
10879 | (9 << 16) /* 2020 */
10880 | (16 << 8) /* 2084 */
10881 | (9 << 0); /* 2020 */
10883 pbi
->vf_dp
= vf_dp
;
10887 /*pbi->vvp9_amstream_dec_info.width = 0;
10888 pbi->vvp9_amstream_dec_info.height = 0;
10889 pbi->vvp9_amstream_dec_info.rate = 30;*/
10890 pbi
->double_write_mode
= double_write_mode
;
10893 if (no_head
& 0x10) {
10894 pbi
->no_head
= (no_head
& 0xf);
10897 if (!pbi
->is_used_v4l
) {
10898 pbi
->mem_map_mode
= mem_map_mode
;
10900 pbi
->run_ready_min_buf_num
= run_ready_min_buf_num
;
10901 if (is_oversize(pbi
->max_pic_w
, pbi
->max_pic_h
)) {
10902 pr_err("over size: %dx%d, probe failed\n",
10903 pbi
->max_pic_w
, pbi
->max_pic_h
);
10907 if (force_config_fence
) {
10908 pbi
->enable_fence
= true;
10910 (force_config_fence
>> 4) & 0xf;
10911 if (force_config_fence
& 0x2)
10912 pbi
->enable_fence
= false;
10913 vp9_print(pbi
, 0, "enable fence: %d, fence usage: %d\n",
10914 pbi
->enable_fence
, pbi
->fence_usage
);
10917 if (pbi
->enable_fence
)
10918 pdata
->sync
.usage
= pbi
->fence_usage
;
10920 pbi
->mmu_enable
= 1;
10921 video_signal_type
= pbi
->video_signal_type
;
10923 if (pdata
->sys_info
) {
10924 pbi
->vvp9_amstream_dec_info
= *pdata
->sys_info
;
10926 pbi
->vvp9_amstream_dec_info
.width
= 0;
10927 pbi
->vvp9_amstream_dec_info
.height
= 0;
10928 pbi
->vvp9_amstream_dec_info
.rate
= 30;
10930 pbi
->low_latency_flag
= 1;
10933 "no_head %d low_latency %d\n",
10934 pbi
->no_head
, pbi
->low_latency_flag
);
10936 pbi
->buf_start
= pdata
->mem_start
;
10937 pbi
->buf_size
= pdata
->mem_end
- pdata
->mem_start
+ 1;
10939 if (amvdec_vp9_mmu_init(pbi
) < 0) {
10940 pr_err("vp9 alloc bmmu box failed!!\n");
10941 /* devm_kfree(&pdev->dev, (void *)pbi); */
10942 vfree((void *)pbi
);
10943 pdata
->dec_status
= NULL
;
10947 pbi
->cma_alloc_count
= PAGE_ALIGN(work_buf_size
) / PAGE_SIZE
;
10948 ret
= decoder_bmmu_box_alloc_buf_phy(pbi
->bmmu_box
, WORK_SPACE_BUF_ID
,
10949 pbi
->cma_alloc_count
* PAGE_SIZE
, DRIVER_NAME
,
10950 &pbi
->cma_alloc_addr
);
10952 uninit_mmu_buffers(pbi
);
10953 /* devm_kfree(&pdev->dev, (void *)pbi); */
10954 vfree((void *)pbi
);
10955 pdata
->dec_status
= NULL
;
10958 pbi
->buf_start
= pbi
->cma_alloc_addr
;
10959 pbi
->buf_size
= work_buf_size
;
10962 pbi
->init_flag
= 0;
10963 pbi
->first_sc_checked
= 0;
10964 pbi
->fatal_error
= 0;
10965 pbi
->show_frame_num
= 0;
10968 pr_info("===VP9 decoder mem resource 0x%lx size 0x%x\n",
10973 pbi
->cma_dev
= pdata
->cma_dev
;
10974 if (vvp9_init(pdata
) < 0) {
10975 pr_info("\namvdec_vp9 init failed.\n");
10976 vp9_local_uninit(pbi
);
10977 uninit_mmu_buffers(pbi
);
10978 /* devm_kfree(&pdev->dev, (void *)pbi); */
10979 vfree((void *)pbi
);
10980 pdata
->dec_status
= NULL
;
10983 vdec_set_prepare_level(pdata
, start_decode_buf_level
);
10984 hevc_source_changed(VFORMAT_VP9
,
10986 #ifdef SUPPORT_FB_DECODING
10987 if (pbi
->used_stage_buf_num
> 0)
10988 vdec_core_request(pdata
,
10989 CORE_MASK_HEVC_FRONT
| CORE_MASK_HEVC_BACK
);
10991 vdec_core_request(pdata
, CORE_MASK_VDEC_1
| CORE_MASK_HEVC
10992 | CORE_MASK_HEVC_FRONT
| CORE_MASK_HEVC_BACK
10993 | CORE_MASK_COMBINE
);
10995 if (pdata
->parallel_dec
== 1)
10996 vdec_core_request(pdata
, CORE_MASK_HEVC
);
10998 vdec_core_request(pdata
, CORE_MASK_VDEC_1
| CORE_MASK_HEVC
10999 | CORE_MASK_COMBINE
);
11001 pbi
->pic_list_init_done2
= true;
11003 if (pbi
->enable_fence
) {
11004 /* creat timeline. */
11005 vdec_timeline_create(&pdata
->sync
, DRIVER_NAME
);
11011 static int ammvdec_vp9_remove(struct platform_device
*pdev
)
11013 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)
11014 (((struct vdec_s
*)(platform_get_drvdata(pdev
)))->private);
11015 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
11018 pr_info("amvdec_vp9_remove\n");
11022 #ifdef SUPPORT_FB_DECODING
11023 vdec_core_release(hw_to_vdec(pbi
), CORE_MASK_VDEC_1
| CORE_MASK_HEVC
11024 | CORE_MASK_HEVC_FRONT
| CORE_MASK_HEVC_BACK
11027 if (vdec
->parallel_dec
== 1)
11028 vdec_core_release(hw_to_vdec(pbi
), CORE_MASK_HEVC
);
11030 vdec_core_release(hw_to_vdec(pbi
), CORE_MASK_VDEC_1
| CORE_MASK_HEVC
);
11032 vdec_set_status(hw_to_vdec(pbi
), VDEC_STATUS_DISCONNECTED
);
11034 if (vdec
->parallel_dec
== 1) {
11035 for (i
= 0; i
< FRAME_BUFFERS
; i
++) {
11036 vdec
->free_canvas_ex
11037 (pbi
->common
.buffer_pool
->frame_bufs
[i
].buf
.y_canvas_index
,
11039 vdec
->free_canvas_ex
11040 (pbi
->common
.buffer_pool
->frame_bufs
[i
].buf
.uv_canvas_index
,
11045 if (pbi
->enable_fence
)
11046 vdec_fence_release(pbi
, &vdec
->sync
);
11049 pr_info("pts missed %ld, pts hit %ld, duration %d\n",
11050 pbi
->pts_missed
, pbi
->pts_hit
, pbi
->frame_dur
);
11054 /* devm_kfree(&pdev->dev, (void *)pbi); */
11055 vfree((void *)pbi
);
11059 static struct platform_driver ammvdec_vp9_driver
= {
11060 .probe
= ammvdec_vp9_probe
,
11061 .remove
= ammvdec_vp9_remove
,
11063 .name
= MULTI_DRIVER_NAME
,
11070 static struct mconfig vp9_configs
[] = {
11071 MC_PU32("bit_depth_luma", &bit_depth_luma
),
11072 MC_PU32("bit_depth_chroma", &bit_depth_chroma
),
11073 MC_PU32("frame_width", &frame_width
),
11074 MC_PU32("frame_height", &frame_height
),
11075 MC_PU32("debug", &debug
),
11076 MC_PU32("radr", &radr
),
11077 MC_PU32("rval", &rval
),
11078 MC_PU32("pop_shorts", &pop_shorts
),
11079 MC_PU32("dbg_cmd", &dbg_cmd
),
11080 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index
),
11081 MC_PU32("endian", &endian
),
11082 MC_PU32("step", &step
),
11083 MC_PU32("udebug_flag", &udebug_flag
),
11084 MC_PU32("decode_pic_begin", &decode_pic_begin
),
11085 MC_PU32("slice_parse_begin", &slice_parse_begin
),
11086 MC_PU32("i_only_flag", &i_only_flag
),
11087 MC_PU32("error_handle_policy", &error_handle_policy
),
11088 MC_PU32("buf_alloc_width", &buf_alloc_width
),
11089 MC_PU32("buf_alloc_height", &buf_alloc_height
),
11090 MC_PU32("buf_alloc_depth", &buf_alloc_depth
),
11091 MC_PU32("buf_alloc_size", &buf_alloc_size
),
11092 MC_PU32("buffer_mode", &buffer_mode
),
11093 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg
),
11094 MC_PU32("max_buf_num", &max_buf_num
),
11095 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin
),
11096 MC_PU32("mem_map_mode", &mem_map_mode
),
11097 MC_PU32("double_write_mode", &double_write_mode
),
11098 MC_PU32("enable_mem_saving", &enable_mem_saving
),
11099 MC_PU32("force_w_h", &force_w_h
),
11100 MC_PU32("force_fps", &force_fps
),
11101 MC_PU32("max_decoding_time", &max_decoding_time
),
11102 MC_PU32("on_no_keyframe_skiped", &on_no_keyframe_skiped
),
11103 MC_PU32("start_decode_buf_level", &start_decode_buf_level
),
11104 MC_PU32("decode_timeout_val", &decode_timeout_val
),
11105 MC_PU32("vp9_max_pic_w", &vp9_max_pic_w
),
11106 MC_PU32("vp9_max_pic_h", &vp9_max_pic_h
),
11108 static struct mconfig_node vp9_node
;
11110 static int __init
amvdec_vp9_driver_init_module(void)
11113 struct BuffInfo_s
*p_buf_info
;
11115 if (vdec_is_support_4k()) {
11116 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
)
11117 p_buf_info
= &amvvp9_workbuff_spec
[2];
11119 p_buf_info
= &amvvp9_workbuff_spec
[1];
11121 p_buf_info
= &amvvp9_workbuff_spec
[0];
11123 init_buff_spec(NULL
, p_buf_info
);
11125 (p_buf_info
->end_adr
- p_buf_info
->start_adr
11126 + 0xffff) & (~0xffff);
11128 pr_debug("amvdec_vp9 module init\n");
11130 error_handle_policy
= 0;
11132 #ifdef ERROR_HANDLE_DEBUG
11133 dbg_nal_skip_flag
= 0;
11134 dbg_nal_skip_count
= 0;
11137 decode_pic_begin
= 0;
11138 slice_parse_begin
= 0;
11140 buf_alloc_size
= 0;
11141 #ifdef MULTI_INSTANCE_SUPPORT
11142 if (platform_driver_register(&ammvdec_vp9_driver
))
11143 pr_err("failed to register ammvdec_vp9 driver\n");
11146 if (platform_driver_register(&amvdec_vp9_driver
)) {
11147 pr_err("failed to register amvdec_vp9 driver\n");
11151 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
11152 amvdec_vp9_profile
.profile
=
11153 "8k, 10bit, dwrite, compressed";
11154 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL
11155 /*&& get_cpu_major_id() != MESON_CPU_MAJOR_ID_GXLX*/
11156 && get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TXL
) {
11157 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX
) {
11158 if (vdec_is_support_4k())
11159 amvdec_vp9_profile
.profile
=
11160 "4k, 10bit, dwrite, compressed";
11162 amvdec_vp9_profile
.profile
=
11163 "10bit, dwrite, compressed";
11165 if (vdec_is_support_4k())
11166 amvdec_vp9_profile
.profile
=
11167 "4k, 10bit, dwrite, compressed";
11169 amvdec_vp9_profile
.profile
=
11170 "10bit, dwrite, compressed";
11174 amvdec_vp9_profile
.name
= "vp9_unsupport";
11177 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
)
11178 max_buf_num
= MAX_BUF_NUM_LESS
;
11180 vcodec_profile_register(&amvdec_vp9_profile
);
11181 amvdec_vp9_profile_mult
= amvdec_vp9_profile
;
11182 amvdec_vp9_profile_mult
.name
= "mvp9";
11183 vcodec_profile_register(&amvdec_vp9_profile_mult
);
11184 INIT_REG_NODE_CONFIGS("media.decoder", &vp9_node
,
11185 "vp9", vp9_configs
, CONFIG_FOR_RW
);
11190 static void __exit
amvdec_vp9_driver_remove_module(void)
11192 pr_debug("amvdec_vp9 module remove.\n");
11193 #ifdef MULTI_INSTANCE_SUPPORT
11194 platform_driver_unregister(&ammvdec_vp9_driver
);
11196 platform_driver_unregister(&amvdec_vp9_driver
);
11199 /****************************************/
11201 module_param(bit_depth_luma
, uint
, 0664);
11202 MODULE_PARM_DESC(bit_depth_luma
, "\n amvdec_vp9 bit_depth_luma\n");
11204 module_param(bit_depth_chroma
, uint
, 0664);
11205 MODULE_PARM_DESC(bit_depth_chroma
, "\n amvdec_vp9 bit_depth_chroma\n");
11207 module_param(frame_width
, uint
, 0664);
11208 MODULE_PARM_DESC(frame_width
, "\n amvdec_vp9 frame_width\n");
11210 module_param(frame_height
, uint
, 0664);
11211 MODULE_PARM_DESC(frame_height
, "\n amvdec_vp9 frame_height\n");
11213 module_param(debug
, uint
, 0664);
11214 MODULE_PARM_DESC(debug
, "\n amvdec_vp9 debug\n");
11216 module_param(radr
, uint
, 0664);
11217 MODULE_PARM_DESC(radr
, "\n radr\n");
11219 module_param(rval
, uint
, 0664);
11220 MODULE_PARM_DESC(rval
, "\n rval\n");
11222 module_param(pop_shorts
, uint
, 0664);
11223 MODULE_PARM_DESC(pop_shorts
, "\n rval\n");
11225 module_param(dbg_cmd
, uint
, 0664);
11226 MODULE_PARM_DESC(dbg_cmd
, "\n dbg_cmd\n");
11228 module_param(dbg_skip_decode_index
, uint
, 0664);
11229 MODULE_PARM_DESC(dbg_skip_decode_index
, "\n dbg_skip_decode_index\n");
11231 module_param(endian
, uint
, 0664);
11232 MODULE_PARM_DESC(endian
, "\n rval\n");
11234 module_param(step
, uint
, 0664);
11235 MODULE_PARM_DESC(step
, "\n amvdec_vp9 step\n");
11237 module_param(decode_pic_begin
, uint
, 0664);
11238 MODULE_PARM_DESC(decode_pic_begin
, "\n amvdec_vp9 decode_pic_begin\n");
11240 module_param(slice_parse_begin
, uint
, 0664);
11241 MODULE_PARM_DESC(slice_parse_begin
, "\n amvdec_vp9 slice_parse_begin\n");
11243 module_param(i_only_flag
, uint
, 0664);
11244 MODULE_PARM_DESC(i_only_flag
, "\n amvdec_vp9 i_only_flag\n");
11246 module_param(low_latency_flag
, uint
, 0664);
11247 MODULE_PARM_DESC(low_latency_flag
, "\n amvdec_vp9 low_latency_flag\n");
11249 module_param(no_head
, uint
, 0664);
11250 MODULE_PARM_DESC(no_head
, "\n amvdec_vp9 no_head\n");
11252 module_param(error_handle_policy
, uint
, 0664);
11253 MODULE_PARM_DESC(error_handle_policy
, "\n amvdec_vp9 error_handle_policy\n");
11255 module_param(buf_alloc_width
, uint
, 0664);
11256 MODULE_PARM_DESC(buf_alloc_width
, "\n buf_alloc_width\n");
11258 module_param(buf_alloc_height
, uint
, 0664);
11259 MODULE_PARM_DESC(buf_alloc_height
, "\n buf_alloc_height\n");
11261 module_param(buf_alloc_depth
, uint
, 0664);
11262 MODULE_PARM_DESC(buf_alloc_depth
, "\n buf_alloc_depth\n");
11264 module_param(buf_alloc_size
, uint
, 0664);
11265 MODULE_PARM_DESC(buf_alloc_size
, "\n buf_alloc_size\n");
11267 module_param(buffer_mode
, uint
, 0664);
11268 MODULE_PARM_DESC(buffer_mode
, "\n buffer_mode\n");
11270 module_param(buffer_mode_dbg
, uint
, 0664);
11271 MODULE_PARM_DESC(buffer_mode_dbg
, "\n buffer_mode_dbg\n");
11273 module_param(max_buf_num
, uint
, 0664);
11274 MODULE_PARM_DESC(max_buf_num
, "\n max_buf_num\n");
11276 module_param(dynamic_buf_num_margin
, uint
, 0664);
11277 MODULE_PARM_DESC(dynamic_buf_num_margin
, "\n dynamic_buf_num_margin\n");
11279 module_param(mv_buf_margin
, uint
, 0664);
11280 MODULE_PARM_DESC(mv_buf_margin
, "\n mv_buf_margin\n");
11282 module_param(run_ready_min_buf_num
, uint
, 0664);
11283 MODULE_PARM_DESC(run_ready_min_buf_num
, "\n run_ready_min_buf_num\n");
11287 module_param(mem_map_mode
, uint
, 0664);
11288 MODULE_PARM_DESC(mem_map_mode
, "\n mem_map_mode\n");
11290 #ifdef SUPPORT_10BIT
11291 module_param(double_write_mode
, uint
, 0664);
11292 MODULE_PARM_DESC(double_write_mode
, "\n double_write_mode\n");
11294 module_param(enable_mem_saving
, uint
, 0664);
11295 MODULE_PARM_DESC(enable_mem_saving
, "\n enable_mem_saving\n");
11297 module_param(force_w_h
, uint
, 0664);
11298 MODULE_PARM_DESC(force_w_h
, "\n force_w_h\n");
11301 module_param(force_fps
, uint
, 0664);
11302 MODULE_PARM_DESC(force_fps
, "\n force_fps\n");
11304 module_param(max_decoding_time
, uint
, 0664);
11305 MODULE_PARM_DESC(max_decoding_time
, "\n max_decoding_time\n");
11307 module_param(on_no_keyframe_skiped
, uint
, 0664);
11308 MODULE_PARM_DESC(on_no_keyframe_skiped
, "\n on_no_keyframe_skiped\n");
11310 module_param(mcrcc_cache_alg_flag
, uint
, 0664);
11311 MODULE_PARM_DESC(mcrcc_cache_alg_flag
, "\n mcrcc_cache_alg_flag\n");
11313 #ifdef MULTI_INSTANCE_SUPPORT
11314 module_param(start_decode_buf_level
, int, 0664);
11315 MODULE_PARM_DESC(start_decode_buf_level
,
11316 "\n vp9 start_decode_buf_level\n");
11318 module_param(decode_timeout_val
, uint
, 0664);
11319 MODULE_PARM_DESC(decode_timeout_val
,
11320 "\n vp9 decode_timeout_val\n");
11322 module_param(vp9_max_pic_w
, uint
, 0664);
11323 MODULE_PARM_DESC(vp9_max_pic_w
, "\n vp9_max_pic_w\n");
11325 module_param(vp9_max_pic_h
, uint
, 0664);
11326 MODULE_PARM_DESC(vp9_max_pic_h
, "\n vp9_max_pic_h\n");
11328 module_param_array(decode_frame_count
, uint
,
11329 &max_decode_instance_num
, 0664);
11331 module_param_array(display_frame_count
, uint
,
11332 &max_decode_instance_num
, 0664);
11334 module_param_array(max_process_time
, uint
,
11335 &max_decode_instance_num
, 0664);
11337 module_param_array(run_count
, uint
,
11338 &max_decode_instance_num
, 0664);
11340 module_param_array(input_empty
, uint
,
11341 &max_decode_instance_num
, 0664);
11343 module_param_array(not_run_ready
, uint
,
11344 &max_decode_instance_num
, 0664);
11347 #ifdef SUPPORT_FB_DECODING
11348 module_param_array(not_run2_ready
, uint
,
11349 &max_decode_instance_num
, 0664);
11351 module_param_array(run2_count
, uint
,
11352 &max_decode_instance_num
, 0664);
11354 module_param(stage_buf_num
, uint
, 0664);
11355 MODULE_PARM_DESC(stage_buf_num
, "\n amvdec_h265 stage_buf_num\n");
11358 module_param(udebug_flag
, uint
, 0664);
11359 MODULE_PARM_DESC(udebug_flag
, "\n amvdec_h265 udebug_flag\n");
11361 module_param(udebug_pause_pos
, uint
, 0664);
11362 MODULE_PARM_DESC(udebug_pause_pos
, "\n udebug_pause_pos\n");
11364 module_param(udebug_pause_val
, uint
, 0664);
11365 MODULE_PARM_DESC(udebug_pause_val
, "\n udebug_pause_val\n");
11367 module_param(udebug_pause_decode_idx
, uint
, 0664);
11368 MODULE_PARM_DESC(udebug_pause_decode_idx
, "\n udebug_pause_decode_idx\n");
11370 module_param(without_display_mode
, uint
, 0664);
11371 MODULE_PARM_DESC(without_display_mode
, "\n without_display_mode\n");
11373 module_param(force_config_fence
, uint
, 0664);
11374 MODULE_PARM_DESC(force_config_fence
, "\n force enable fence\n");
11376 module_param(force_pts_unstable
, uint
, 0664);
11377 MODULE_PARM_DESC(force_pts_unstable
, "\n force_pts_unstable\n");
11379 module_init(amvdec_vp9_driver_init_module
);
11380 module_exit(amvdec_vp9_driver_remove_module
);
11382 MODULE_DESCRIPTION("AMLOGIC vp9 Video Decoder Driver");
11383 MODULE_LICENSE("GPL");