2 * drivers/amlogic/amports/vvp9.c
4 * Copyright (C) 2015 Amlogic, Inc. All rights reserved.
6 * This program is free software; you can redistribute it and/or modify
7 * it under the terms of the GNU General Public License as published by
8 * the Free Software Foundation; either version 2 of the License, or
9 * (at your option) any later version.
11 * This program is distributed in the hope that it will be useful, but WITHOUT
12 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
13 * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for
18 #include <linux/kernel.h>
19 #include <linux/module.h>
20 #include <linux/types.h>
21 #include <linux/errno.h>
22 #include <linux/interrupt.h>
23 #include <linux/semaphore.h>
24 #include <linux/delay.h>
25 #include <linux/timer.h>
26 #include <linux/kfifo.h>
27 #include <linux/kthread.h>
28 #include <linux/spinlock.h>
29 #include <linux/platform_device.h>
30 #include <linux/amlogic/media/vfm/vframe.h>
31 #include <linux/amlogic/media/utils/amstream.h>
32 #include <linux/amlogic/media/utils/vformat.h>
33 #include <linux/amlogic/media/frame_sync/ptsserv.h>
34 #include <linux/amlogic/media/canvas/canvas.h>
35 #include <linux/amlogic/media/vfm/vframe_provider.h>
36 #include <linux/amlogic/media/vfm/vframe_receiver.h>
37 #include <linux/dma-mapping.h>
38 #include <linux/dma-contiguous.h>
39 #include <linux/slab.h>
40 #include <linux/amlogic/tee.h>
41 #include "../../../stream_input/amports/amports_priv.h"
42 #include <linux/amlogic/media/codec_mm/codec_mm.h>
43 #include "../utils/decoder_mmu_box.h"
44 #include "../utils/decoder_bmmu_box.h"
46 #define MEM_NAME "codec_vp9"
47 /* #include <mach/am_regs.h> */
48 #include <linux/amlogic/media/utils/vdec_reg.h>
49 #include "../utils/vdec.h"
50 #include "../utils/amvdec.h"
51 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
52 #include "../utils/vdec_profile.h"
55 #include <linux/amlogic/media/video_sink/video.h>
56 #include <linux/amlogic/media/codec_mm/configs.h>
57 #include "../utils/config_parser.h"
58 #include "../utils/firmware.h"
59 #include "../../../common/chips/decoder_cpu_ver_info.h"
60 #include "../utils/vdec_v4l2_buffer_ops.h"
61 #include <media/v4l2-mem2mem.h>
63 #define MIX_STREAM_SUPPORT
68 /*#define SUPPORT_FB_DECODING*/
69 /*#define FB_DECODING_TEST_SCHEDULE*/
72 #define HW_MASK_FRONT 0x1
73 #define HW_MASK_BACK 0x2
75 #define VP9D_MPP_REFINFO_TBL_ACCCONFIG 0x3442
76 #define VP9D_MPP_REFINFO_DATA 0x3443
77 #define VP9D_MPP_REF_SCALE_ENBL 0x3441
78 #define HEVC_MPRED_CTRL4 0x324c
79 #define HEVC_CM_HEADER_START_ADDR 0x3628
80 #define HEVC_DBLK_CFGB 0x350b
81 #define HEVCD_MPP_ANC2AXI_TBL_DATA 0x3464
82 #define HEVC_SAO_MMU_VH1_ADDR 0x363b
83 #define HEVC_SAO_MMU_VH0_ADDR 0x363a
85 #define HEVC_MV_INFO 0x310d
86 #define HEVC_QP_INFO 0x3137
87 #define HEVC_SKIP_INFO 0x3136
89 #define VP9_10B_DEC_IDLE 0
90 #define VP9_10B_DEC_FRAME_HEADER 1
91 #define VP9_10B_DEC_SLICE_SEGMENT 2
92 #define VP9_10B_DECODE_SLICE 5
93 #define VP9_10B_DISCARD_NAL 6
94 #define VP9_DUMP_LMEM 7
95 #define HEVC_DECPIC_DATA_DONE 0xa
96 #define HEVC_DECPIC_DATA_ERROR 0xb
97 #define HEVC_NAL_DECODE_DONE 0xe
98 #define HEVC_DECODE_BUFEMPTY 0x20
99 #define HEVC_DECODE_TIMEOUT 0x21
100 #define HEVC_SEARCH_BUFEMPTY 0x22
101 #define HEVC_DECODE_OVER_SIZE 0x23
102 #define HEVC_S2_DECODING_DONE 0x50
103 #define VP9_HEAD_PARSER_DONE 0xf0
104 #define VP9_HEAD_SEARCH_DONE 0xf1
106 #define HEVC_ACTION_DONE 0xff
108 #define VF_POOL_SIZE 32
111 #define pr_info printk
113 #define DECODE_MODE_SINGLE ((0x80 << 24) | 0)
114 #define DECODE_MODE_MULTI_STREAMBASE ((0x80 << 24) | 1)
115 #define DECODE_MODE_MULTI_FRAMEBASE ((0x80 << 24) | 2)
116 #define DECODE_MODE_SINGLE_LOW_LATENCY ((0x80 << 24) | 3)
117 #define DECODE_MODE_MULTI_FRAMEBASE_NOHEAD ((0x80 << 24) | 4)
119 #define VP9_TRIGGER_FRAME_DONE 0x100
120 #define VP9_TRIGGER_FRAME_ENABLE 0x200
122 #define MV_MEM_UNIT 0x240
123 /*---------------------------------------------------
124 * Include "parser_cmd.h"
125 *---------------------------------------------------
127 #define PARSER_CMD_SKIP_CFG_0 0x0000090b
129 #define PARSER_CMD_SKIP_CFG_1 0x1b14140f
131 #define PARSER_CMD_SKIP_CFG_2 0x001b1910
133 #define PARSER_CMD_NUMBER 37
135 /*#define HEVC_PIC_STRUCT_SUPPORT*/
136 /* to remove, fix build error */
138 /*#define CODEC_MM_FLAGS_FOR_VDECODER 0*/
140 #define MULTI_INSTANCE_SUPPORT
141 #define SUPPORT_10BIT
142 /* #define ERROR_HANDLE_DEBUG */
145 #define STAT_KTHREAD 0x40
148 #ifdef MULTI_INSTANCE_SUPPORT
149 #define MAX_DECODE_INSTANCE_NUM 9
150 #define MULTI_DRIVER_NAME "ammvdec_vp9"
151 static unsigned int max_decode_instance_num
152 = MAX_DECODE_INSTANCE_NUM
;
153 static unsigned int decode_frame_count
[MAX_DECODE_INSTANCE_NUM
];
154 static unsigned int display_frame_count
[MAX_DECODE_INSTANCE_NUM
];
155 static unsigned int max_process_time
[MAX_DECODE_INSTANCE_NUM
];
156 static unsigned int run_count
[MAX_DECODE_INSTANCE_NUM
];
157 static unsigned int input_empty
[MAX_DECODE_INSTANCE_NUM
];
158 static unsigned int not_run_ready
[MAX_DECODE_INSTANCE_NUM
];
160 static u32 decode_timeout_val
= 200;
161 static int start_decode_buf_level
= 0x8000;
162 static u32 work_buf_size
;
164 static u32 mv_buf_margin
;
166 /* DOUBLE_WRITE_MODE is enabled only when NV21 8 bit output is needed */
167 /* double_write_mode:
168 * 0, no double write;
170 * 2, (1/4):(1/4) ratio;
171 * 3, (1/4):(1/4) ratio, with both compressed frame included
172 * 4, (1/2):(1/2) ratio;
173 * 0x10, double write only
174 * 0x100, if > 1080p,use mode 4,else use mode 1;
175 * 0x200, if > 1080p,use mode 2,else use mode 1;
176 * 0x300, if > 720p, use mode 4, else use mode 1;
178 static u32 double_write_mode
;
180 #define DRIVER_NAME "amvdec_vp9"
181 #define MODULE_NAME "amvdec_vp9"
182 #define DRIVER_HEADER_NAME "amvdec_vp9_header"
185 #define PUT_INTERVAL (HZ/100)
186 #define ERROR_SYSTEM_RESET_COUNT 200
189 #define PTS_NONE_REF_USE_DURATION 1
191 #define PTS_MODE_SWITCHING_THRESHOLD 3
192 #define PTS_MODE_SWITCHING_RECOVERY_THREASHOLD 3
194 #define DUR2PTS(x) ((x)*90/96)
197 static int vvp9_vf_states(struct vframe_states
*states
, void *);
198 static struct vframe_s
*vvp9_vf_peek(void *);
199 static struct vframe_s
*vvp9_vf_get(void *);
200 static void vvp9_vf_put(struct vframe_s
*, void *);
201 static int vvp9_event_cb(int type
, void *data
, void *private_data
);
203 static int vvp9_stop(struct VP9Decoder_s
*pbi
);
204 #ifdef MULTI_INSTANCE_SUPPORT
205 static s32
vvp9_init(struct vdec_s
*vdec
);
207 static s32
vvp9_init(struct VP9Decoder_s
*pbi
);
209 static void vvp9_prot_init(struct VP9Decoder_s
*pbi
, u32 mask
);
210 static int vvp9_local_init(struct VP9Decoder_s
*pbi
);
211 static void vvp9_put_timer_func(unsigned long arg
);
212 static void dump_data(struct VP9Decoder_s
*pbi
, int size
);
213 static unsigned char get_data_check_sum
214 (struct VP9Decoder_s
*pbi
, int size
);
215 static void dump_pic_list(struct VP9Decoder_s
*pbi
);
216 static int vp9_alloc_mmu(
217 struct VP9Decoder_s
*pbi
,
221 unsigned short bit_depth
,
222 unsigned int *mmu_index_adr
);
225 static const char vvp9_dec_id
[] = "vvp9-dev";
227 #define PROVIDER_NAME "decoder.vp9"
228 #define MULTI_INSTANCE_PROVIDER_NAME "vdec.vp9"
230 static const struct vframe_operations_s vvp9_vf_provider
= {
231 .peek
= vvp9_vf_peek
,
234 .event_cb
= vvp9_event_cb
,
235 .vf_states
= vvp9_vf_states
,
238 static struct vframe_provider_s vvp9_vf_prov
;
240 static u32 bit_depth_luma
;
241 static u32 bit_depth_chroma
;
242 static u32 frame_width
;
243 static u32 frame_height
;
244 static u32 video_signal_type
;
246 static u32 on_no_keyframe_skiped
;
248 #define PROB_SIZE (496 * 2 * 4)
249 #define PROB_BUF_SIZE (0x5000)
250 #define COUNT_BUF_SIZE (0x300 * 4 * 4)
251 /*compute_losless_comp_body_size(4096, 2304, 1) = 18874368(0x1200000)*/
252 #define MAX_FRAME_4K_NUM 0x1200
253 #define MAX_FRAME_8K_NUM 0x4800
255 #define HEVC_ASSIST_MMU_MAP_ADDR 0x3009
257 #ifdef SUPPORT_FB_DECODING
258 /* register define */
259 #define HEVC_ASSIST_HED_FB_W_CTL 0x3006
260 #define HEVC_ASSIST_HED_FB_R_CTL 0x3007
261 #define HEVC_ASSIST_HED_FB_ADDR 0x3008
262 #define HEVC_ASSIST_FB_MMU_MAP_ADDR 0x300a
263 #define HEVC_ASSIST_FBD_MMU_MAP_ADDR 0x300b
266 #define MAX_STAGE_PAGE_NUM 0x1200
267 #define STAGE_MMU_MAP_SIZE (MAX_STAGE_PAGE_NUM * 4)
269 static inline int div_r32(int64_t m
, int n
)
286 unsigned int alloc_flag
;
288 unsigned int cma_page_count
;
289 unsigned long alloc_addr
;
290 unsigned long start_adr
;
293 unsigned int free_start_adr
;
294 ulong v4l_ref_buf_addr
;
301 unsigned long start_adr
;
306 /* #undef BUFMGR_ONLY to enable hardware configuration */
308 /*#define TEST_WR_PTR_INC*/
309 /*#define WR_PTR_INC_NUM 128*/
310 #define WR_PTR_INC_NUM 1
314 #undef MEMORY_MAP_IN_REAL_CHIP
316 /*#undef DOS_PROJECT*/
317 /*#define MEMORY_MAP_IN_REAL_CHIP*/
319 /*#define BUFFER_MGR_ONLY*/
320 /*#define CONFIG_HEVC_CLK_FORCED_ON*/
321 /*#define ENABLE_SWAP_TEST*/
324 #define VP9_LPF_LVL_UPDATE
325 /*#define DBG_LF_PRINT*/
329 #define LOSLESS_COMPRESS_MODE
332 #define DOUBLE_WRITE_YSTART_TEMP 0x02000000
333 #define DOUBLE_WRITE_CSTART_TEMP 0x02900000
337 typedef unsigned int u32
;
338 typedef unsigned short u16
;
340 #define VP9_DEBUG_BUFMGR 0x01
341 #define VP9_DEBUG_BUFMGR_MORE 0x02
342 #define VP9_DEBUG_BUFMGR_DETAIL 0x04
343 #define VP9_DEBUG_OUT_PTS 0x10
344 #define VP9_DEBUG_SEND_PARAM_WITH_REG 0x100
345 #define VP9_DEBUG_MERGE 0x200
346 #define VP9_DEBUG_DBG_LF_PRINT 0x400
347 #define VP9_DEBUG_REG 0x800
348 #define VP9_DEBUG_2_STAGE 0x1000
349 #define VP9_DEBUG_2_STAGE_MORE 0x2000
350 #define VP9_DEBUG_QOS_INFO 0x4000
351 #define VP9_DEBUG_DIS_LOC_ERROR_PROC 0x10000
352 #define VP9_DEBUG_DIS_SYS_ERROR_PROC 0x20000
353 #define VP9_DEBUG_DUMP_PIC_LIST 0x40000
354 #define VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC 0x80000
355 #define VP9_DEBUG_NO_TRIGGER_FRAME 0x100000
356 #define VP9_DEBUG_LOAD_UCODE_FROM_FILE 0x200000
357 #define VP9_DEBUG_FORCE_SEND_AGAIN 0x400000
358 #define VP9_DEBUG_DUMP_DATA 0x800000
359 #define VP9_DEBUG_CACHE 0x1000000
360 #define VP9_DEBUG_CACHE_HIT_RATE 0x2000000
361 #define IGNORE_PARAM_FROM_CONFIG 0x8000000
362 #ifdef MULTI_INSTANCE_SUPPORT
363 #define PRINT_FLAG_ERROR 0x0
364 #define PRINT_FLAG_V4L_DETAIL 0x10000000
365 #define PRINT_FLAG_VDEC_STATUS 0x20000000
366 #define PRINT_FLAG_VDEC_DETAIL 0x40000000
367 #define PRINT_FLAG_VDEC_DATA 0x80000000
371 static bool is_reset
;
375 bit 0, enable ucode print
376 bit 1, enable ucode detail print
377 bit [31:16] not 0, pos to dump lmem
378 bit 2, pop bits to lmem
379 bit [11:8], pre-pop bits for alignment (when bit 2 is 1)
381 static u32 udebug_flag
;
383 when udebug_flag[1:0] is not 0
384 udebug_pause_pos not 0,
387 static u32 udebug_pause_pos
;
389 when udebug_flag[1:0] is not 0
390 and udebug_pause_pos is not 0,
391 pause only when DEBUG_REG2 is equal to this val
393 static u32 udebug_pause_val
;
395 static u32 udebug_pause_decode_idx
;
397 static u32 without_display_mode
;
401 void WRITE_VREG_DBG2(unsigned int adr
, unsigned int val
)
403 if (debug
& VP9_DEBUG_REG
)
404 pr_info("%s(%x, %x)\n", __func__
, adr
, val
);
406 WRITE_VREG(adr
, val
);
410 #define WRITE_VREG WRITE_VREG_DBG2
413 #define FRAME_CNT_WINDOW_SIZE 59
414 #define RATE_CORRECTION_THRESHOLD 5
415 /**************************************************
417 VP9 buffer management start
419 ***************************************************/
421 #define MMU_COMPRESS_HEADER_SIZE 0x48000
422 #define MMU_COMPRESS_8K_HEADER_SIZE (0x48000*4)
423 #define MAX_SIZE_8K (8192 * 4608)
424 #define MAX_SIZE_4K (4096 * 2304)
425 #define IS_8K_SIZE(w, h) (((w) * (h)) > MAX_SIZE_4K)
427 #define INVALID_IDX -1 /* Invalid buffer index.*/
429 #define RPM_BEGIN 0x200
430 #define RPM_END 0x280
434 unsigned short data
[RPM_END
- RPM_BEGIN
];
437 /* from ucode lmem, do not change this struct */
438 unsigned short profile
;
439 unsigned short show_existing_frame
;
440 unsigned short frame_to_show_idx
;
441 unsigned short frame_type
; /*1 bit*/
442 unsigned short show_frame
; /*1 bit*/
443 unsigned short error_resilient_mode
; /*1 bit*/
444 unsigned short intra_only
; /*1 bit*/
445 unsigned short display_size_present
; /*1 bit*/
446 unsigned short reset_frame_context
;
447 unsigned short refresh_frame_flags
;
448 unsigned short width
;
449 unsigned short height
;
450 unsigned short display_width
;
451 unsigned short display_height
;
453 *bit[11:8] - ref_frame_info_0 (ref(3-bits), ref_frame_sign_bias(1-bit))
454 *bit[7:4] - ref_frame_info_1 (ref(3-bits), ref_frame_sign_bias(1-bit))
455 *bit[3:0] - ref_frame_info_2 (ref(3-bits), ref_frame_sign_bias(1-bit))
457 unsigned short ref_info
;
459 *bit[2]: same_frame_size0
460 *bit[1]: same_frame_size1
461 *bit[0]: same_frame_size2
463 unsigned short same_frame_size
;
465 unsigned short mode_ref_delta_enabled
;
466 unsigned short ref_deltas
[4];
467 unsigned short mode_deltas
[2];
468 unsigned short filter_level
;
469 unsigned short sharpness_level
;
470 unsigned short bit_depth
;
471 unsigned short seg_quant_info
[8];
472 unsigned short seg_enabled
;
473 unsigned short seg_abs_delta
;
474 /* bit 15: feature enabled; bit 8, sign; bit[5:0], data */
475 unsigned short seg_lf_info
[8];
480 struct vpx_codec_frame_buffer_s
{
481 uint8_t *data
; /**< Pointer to the data buffer */
482 size_t size
; /**< Size of data in bytes */
483 void *priv
; /**< Frame's private data */
486 enum vpx_color_space_t
{
487 VPX_CS_UNKNOWN
= 0, /**< Unknown */
488 VPX_CS_BT_601
= 1, /**< BT.601 */
489 VPX_CS_BT_709
= 2, /**< BT.709 */
490 VPX_CS_SMPTE_170
= 3, /**< SMPTE.170 */
491 VPX_CS_SMPTE_240
= 4, /**< SMPTE.240 */
492 VPX_CS_BT_2020
= 5, /**< BT.2020 */
493 VPX_CS_RESERVED
= 6, /**< Reserved */
494 VPX_CS_SRGB
= 7 /**< sRGB */
495 }; /**< alias for enum vpx_color_space */
497 enum vpx_bit_depth_t
{
498 VPX_BITS_8
= 8, /**< 8 bits */
499 VPX_BITS_10
= 10, /**< 10 bits */
500 VPX_BITS_12
= 12, /**< 12 bits */
503 #define MAX_SLICE_NUM 1024
504 struct PIC_BUFFER_CONFIG_s
{
513 #ifdef MULTI_INSTANCE_SUPPORT
514 struct canvas_config_s canvas_config
[2];
526 unsigned long header_adr
;
527 unsigned long mpred_mv_wr_start_addr
;
528 /*unsigned long mc_y_adr;
529 *unsigned long mc_u_v_adr;
531 unsigned int dw_y_adr
;
532 unsigned int dw_u_v_adr
;
557 uint8_t *alpha_buffer
;
559 uint8_t *buffer_alloc
;
565 unsigned int bit_depth
;
566 enum vpx_color_space_t color_space
;
570 unsigned long cma_alloc_addr
;
572 int double_write_mode
;
574 /* picture qos infomation*/
588 enum BITSTREAM_PROFILE
{
602 enum REFERENCE_MODE
{
603 SINGLE_REFERENCE
= 0,
604 COMPOUND_REFERENCE
= 1,
605 REFERENCE_MODE_SELECT
= 2,
610 #define INTRA_FRAME 0
612 #define GOLDEN_FRAME 2
613 #define ALTREF_FRAME 3
614 #define MAX_REF_FRAMES 4
616 #define REFS_PER_FRAME 3
618 #define REF_FRAMES_LOG2 3
619 #define REF_FRAMES (1 << REF_FRAMES_LOG2)
620 #define REF_FRAMES_4K (6)
622 /*4 scratch frames for the new frames to support a maximum of 4 cores decoding
623 *in parallel, 3 for scaled references on the encoder.
624 *TODO(hkuang): Add ondemand frame buffers instead of hardcoding the number
625 * // of framebuffers.
626 *TODO(jkoleszar): These 3 extra references could probably come from the
627 *normal reference pool.
629 #define FRAME_BUFFERS (REF_FRAMES + 16)
630 #define HEADER_FRAME_BUFFERS (FRAME_BUFFERS)
631 #define MAX_BUF_NUM (FRAME_BUFFERS)
632 #define MV_BUFFER_NUM FRAME_BUFFERS
633 #ifdef SUPPORT_FB_DECODING
634 #define STAGE_MAX_BUFFERS 16
636 #define STAGE_MAX_BUFFERS 0
639 #define FRAME_CONTEXTS_LOG2 2
640 #define FRAME_CONTEXTS (1 << FRAME_CONTEXTS_LOG2)
641 /*buffer + header buffer + workspace*/
642 #ifdef MV_USE_FIXED_BUF
643 #define MAX_BMMU_BUFFER_NUM (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + 1)
644 #define VF_BUFFER_IDX(n) (n)
645 #define HEADER_BUFFER_IDX(n) (FRAME_BUFFERS + n)
646 #define WORK_SPACE_BUF_ID (FRAME_BUFFERS + HEADER_FRAME_BUFFERS)
648 #define MAX_BMMU_BUFFER_NUM \
649 (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + MV_BUFFER_NUM + 1)
650 #define VF_BUFFER_IDX(n) (n)
651 #define HEADER_BUFFER_IDX(n) (FRAME_BUFFERS + n)
652 #define MV_BUFFER_IDX(n) (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + n)
653 #define WORK_SPACE_BUF_ID \
654 (FRAME_BUFFERS + HEADER_FRAME_BUFFERS + MV_BUFFER_NUM)
657 struct RefCntBuffer_s
{
662 struct vpx_codec_frame_buffer_s raw_frame_buffer
;
663 struct PIC_BUFFER_CONFIG_s buf
;
665 /*The Following variables will only be used in frame parallel decode.
667 *frame_worker_owner indicates which FrameWorker owns this buffer. NULL means
668 *that no FrameWorker owns, or is decoding, this buffer.
669 *VP9Worker *frame_worker_owner;
671 *row and col indicate which position frame has been decoded to in real
672 *pixel unit. They are reset to -1 when decoding begins and set to INT_MAX
673 *when the frame is fully decoded.
680 /*TODO(dkovalev): idx is not really required and should be removed, now it
681 *is used in vp9_onyxd_if.c
684 struct PIC_BUFFER_CONFIG_s
*buf
;
685 /*struct scale_factors sf;*/
688 struct InternalFrameBuffer_s
{
692 } InternalFrameBuffer
;
694 struct InternalFrameBufferList_s
{
695 int num_internal_frame_buffers
;
696 struct InternalFrameBuffer_s
*int_fb
;
697 } InternalFrameBufferList
;
699 struct BufferPool_s
{
700 /*Protect BufferPool from being accessed by several FrameWorkers at
701 *the same time during frame parallel decode.
702 *TODO(hkuang): Try to use atomic variable instead of locking the whole pool.
704 *Private data associated with the frame buffer callbacks.
707 *vpx_get_frame_buffer_cb_fn_t get_fb_cb;
708 *vpx_release_frame_buffer_cb_fn_t release_fb_cb;
711 struct RefCntBuffer_s frame_bufs
[FRAME_BUFFERS
];
713 /*Frame buffers allocated internally by the codec.*/
714 struct InternalFrameBufferList_s int_frame_buffers
;
720 #define lock_buffer_pool(pool, flags) \
721 spin_lock_irqsave(&pool->lock, flags)
723 #define unlock_buffer_pool(pool, flags) \
724 spin_unlock_irqrestore(&pool->lock, flags)
726 struct VP9_Common_s
{
727 enum vpx_color_space_t color_space
;
738 int use_highbitdepth
;/*Marks if we need to use 16bit frame buffers.*/
740 struct PIC_BUFFER_CONFIG_s
*frame_to_show
;
741 struct RefCntBuffer_s
*prev_frame
;
743 /*TODO(hkuang): Combine this with cur_buf in macroblockd.*/
744 struct RefCntBuffer_s
*cur_frame
;
746 int ref_frame_map
[REF_FRAMES
]; /* maps fb_idx to reference slot */
748 /*Prepare ref_frame_map for the next frame.
749 *Only used in frame parallel decode.
751 int next_ref_frame_map
[REF_FRAMES
];
753 /* TODO(jkoleszar): could expand active_ref_idx to 4,
754 *with 0 as intra, and roll new_fb_idx into it.
757 /*Each frame can reference REFS_PER_FRAME buffers*/
758 struct RefBuffer_s frame_refs
[REFS_PER_FRAME
];
763 /*last frame's frame type for motion search*/
764 enum FRAME_TYPE last_frame_type
;
765 enum FRAME_TYPE frame_type
;
769 int show_existing_frame
;
771 /*Flag signaling that the frame is encoded using only INTRA modes.*/
773 uint8_t last_intra_only
;
775 int allow_high_precision_mv
;
777 /*Flag signaling that the frame context should be reset to default
778 *values. 0 or 1 implies don't reset, 2 reset just the context
779 *specified in the frame header, 3 reset all contexts.
781 int reset_frame_context
;
783 /*MBs, mb_rows/cols is in 16-pixel units; mi_rows/cols is in
784 * MODE_INFO (8-pixel) units.
787 int mb_rows
, mi_rows
;
788 int mb_cols
, mi_cols
;
791 /*Whether to use previous frame's motion vectors for prediction.*/
792 int use_prev_frame_mvs
;
794 int refresh_frame_context
; /* Two state 0 = NO, 1 = YES */
796 int ref_frame_sign_bias
[MAX_REF_FRAMES
]; /* Two state 0, 1 */
798 /*struct loopfilter lf;*/
799 /*struct segmentation seg;*/
801 /*TODO(hkuang):Remove this as it is the same as frame_parallel_decode*/
803 int frame_parallel_decode
; /* frame-based threading.*/
805 /*Context probabilities for reference frame prediction*/
806 /*MV_REFERENCE_FRAME comp_fixed_ref;*/
807 /*MV_REFERENCE_FRAME comp_var_ref[2];*/
808 enum REFERENCE_MODE reference_mode
;
810 /*FRAME_CONTEXT *fc; */ /* this frame entropy */
811 /*FRAME_CONTEXT *frame_contexts; */ /*FRAME_CONTEXTS*/
812 /*unsigned int frame_context_idx; *//* Context to use/update */
813 /*FRAME_COUNTS counts;*/
815 unsigned int current_video_frame
;
816 enum BITSTREAM_PROFILE profile
;
818 enum vpx_bit_depth_t bit_depth
;
820 int error_resilient_mode
;
821 int frame_parallel_decoding_mode
;
824 int skip_loop_filter
;
826 /*External BufferPool passed from outside.*/
827 struct BufferPool_s
*buffer_pool
;
829 int above_context_alloc_cols
;
833 static void set_canvas(struct VP9Decoder_s
*pbi
,
834 struct PIC_BUFFER_CONFIG_s
*pic_config
);
835 static int prepare_display_buf(struct VP9Decoder_s
*pbi
,
836 struct PIC_BUFFER_CONFIG_s
*pic_config
);
838 static void fill_frame_info(struct VP9Decoder_s
*pbi
,
839 struct PIC_BUFFER_CONFIG_s
*frame
,
840 unsigned int framesize
,
843 static struct PIC_BUFFER_CONFIG_s
*get_frame_new_buffer(struct VP9_Common_s
*cm
)
845 return &cm
->buffer_pool
->frame_bufs
[cm
->new_fb_idx
].buf
;
848 static void ref_cnt_fb(struct RefCntBuffer_s
*bufs
, int *idx
, int new_idx
)
850 const int ref_index
= *idx
;
852 if (ref_index
>= 0 && bufs
[ref_index
].ref_count
> 0) {
853 bufs
[ref_index
].ref_count
--;
854 /*pr_info("[MMU DEBUG 2] dec ref_count[%d] : %d\r\n",
855 * ref_index, bufs[ref_index].ref_count);
861 bufs
[new_idx
].ref_count
++;
862 /*pr_info("[MMU DEBUG 3] inc ref_count[%d] : %d\r\n",
863 * new_idx, bufs[new_idx].ref_count);
867 int vp9_release_frame_buffer(struct vpx_codec_frame_buffer_s
*fb
)
869 struct InternalFrameBuffer_s
*const int_fb
=
870 (struct InternalFrameBuffer_s
*)fb
->priv
;
876 static int compute_losless_comp_body_size(int width
, int height
,
877 uint8_t is_bit_depth_10
);
879 static void setup_display_size(struct VP9_Common_s
*cm
, union param_u
*params
,
880 int print_header_info
)
882 cm
->display_width
= cm
->width
;
883 cm
->display_height
= cm
->height
;
884 if (params
->p
.display_size_present
) {
885 if (print_header_info
)
886 pr_info(" * 1-bit display_size_present read : 1\n");
887 cm
->display_width
= params
->p
.display_width
;
888 cm
->display_height
= params
->p
.display_height
;
889 /*vp9_read_frame_size(rb, &cm->display_width,
890 * &cm->display_height);
893 if (print_header_info
)
894 pr_info(" * 1-bit display_size_present read : 0\n");
899 uint8_t print_header_info
= 0;
913 struct buff_s sao_abv
;
914 struct buff_s sao_vb
;
915 struct buff_s short_term_rps
;
919 struct buff_s sao_up
;
920 struct buff_s swap_buf
;
921 struct buff_s swap_buf2
;
922 struct buff_s scalelut
;
923 struct buff_s dblk_para
;
924 struct buff_s dblk_data
;
925 struct buff_s seg_map
;
926 struct buff_s mmu_vbh
;
927 struct buff_s cm_header
;
928 struct buff_s mpred_above
;
929 #ifdef MV_USE_FIXED_BUF
930 struct buff_s mpred_mv
;
935 #ifdef MULTI_INSTANCE_SUPPORT
936 #define DEC_RESULT_NONE 0
937 #define DEC_RESULT_DONE 1
938 #define DEC_RESULT_AGAIN 2
939 #define DEC_RESULT_CONFIG_PARAM 3
940 #define DEC_RESULT_ERROR 4
941 #define DEC_INIT_PICLIST 5
942 #define DEC_UNINIT_PICLIST 6
943 #define DEC_RESULT_GET_DATA 7
944 #define DEC_RESULT_GET_DATA_RETRY 8
945 #define DEC_RESULT_EOS 9
946 #define DEC_RESULT_FORCE_EXIT 10
947 #define DEC_V4L2_CONTINUE_DECODING 18
949 #define DEC_S1_RESULT_NONE 0
950 #define DEC_S1_RESULT_DONE 1
951 #define DEC_S1_RESULT_FORCE_EXIT 2
952 #define DEC_S1_RESULT_TEST_TRIGGER_DONE 0xf0
954 #ifdef FB_DECODING_TEST_SCHEDULE
955 #define TEST_SET_NONE 0
956 #define TEST_SET_PIC_DONE 1
957 #define TEST_SET_S2_DONE 2
960 static void vp9_work(struct work_struct
*work
);
962 struct loop_filter_info_n
;
966 #ifdef SUPPORT_FB_DECODING
967 static void mpred_process(struct VP9Decoder_s
*pbi
);
968 static void vp9_s1_work(struct work_struct
*work
);
972 unsigned short rpm
[RPM_END
- RPM_BEGIN
];
975 static unsigned int not_run2_ready
[MAX_DECODE_INSTANCE_NUM
];
977 static unsigned int run2_count
[MAX_DECODE_INSTANCE_NUM
];
979 #ifdef FB_DECODING_TEST_SCHEDULE
980 u32 stage_buf_num
; /* = 16;*/
986 struct VP9Decoder_s
{
987 #ifdef MULTI_INSTANCE_SUPPORT
990 struct device
*cma_dev
;
991 struct platform_device
*platform_dev
;
992 void (*vdec_cb
)(struct vdec_s
*, void *);
994 struct vframe_chunk_s
*chunk
;
996 struct work_struct work
;
997 struct work_struct set_clk_work
;
998 u32 start_shift_bytes
;
1000 struct BuffInfo_s work_space_buf_store
;
1001 unsigned long buf_start
;
1003 u32 cma_alloc_count
;
1004 unsigned long cma_alloc_addr
;
1006 unsigned long int start_process_time
;
1007 unsigned last_lcu_idx
;
1008 int decode_timeout_count
;
1009 unsigned timeout_num
;
1010 int save_buffer_mode
;
1012 int double_write_mode
;
1016 unsigned char m_ins_flag
;
1017 char *provider_name
;
1018 union param_u param
;
1022 struct timer_list timer
;
1027 uint8_t first_sc_checked
;
1028 uint8_t process_busy
;
1029 #define PROC_STATE_INIT 0
1030 #define PROC_STATE_DECODESLICE 1
1031 #define PROC_STATE_SENDAGAIN 2
1032 uint8_t process_state
;
1033 u32 ucode_pause_pos
;
1036 struct buff_s mc_buf_spec
;
1037 struct dec_sysinfo vvp9_amstream_dec_info
;
1040 dma_addr_t rpm_phy_addr
;
1041 dma_addr_t lmem_phy_addr
;
1042 unsigned short *lmem_ptr
;
1043 unsigned short *debug_ptr
;
1045 void *prob_buffer_addr
;
1046 void *count_buffer_addr
;
1047 dma_addr_t prob_buffer_phy_addr
;
1048 dma_addr_t count_buffer_phy_addr
;
1050 void *frame_mmu_map_addr
;
1051 dma_addr_t frame_mmu_map_phy_addr
;
1053 unsigned int use_cma_flag
;
1055 struct BUF_s m_BUF
[MAX_BUF_NUM
];
1056 struct MVBUF_s m_mv_BUF
[MV_BUFFER_NUM
];
1058 DECLARE_KFIFO(newframe_q
, struct vframe_s
*, VF_POOL_SIZE
);
1059 DECLARE_KFIFO(display_q
, struct vframe_s
*, VF_POOL_SIZE
);
1060 DECLARE_KFIFO(pending_q
, struct vframe_s
*, VF_POOL_SIZE
);
1061 struct vframe_s vfpool
[VF_POOL_SIZE
];
1068 unsigned int losless_comp_body_size
;
1070 u32 video_signal_type
;
1073 int last_lookup_pts
;
1075 u64 last_lookup_pts_us64
;
1077 u64 shift_byte_count
;
1080 u32 frame_cnt_window
;
1083 u32 duration_from_pts_done
;
1084 bool vp9_first_pts_ready
;
1086 u32 shift_byte_count_lo
;
1087 u32 shift_byte_count_hi
;
1088 int pts_mode_switching_count
;
1089 int pts_mode_recovery_count
;
1092 u32 saved_resolution
;
1095 struct VP9_Common_s common
;
1096 struct RefCntBuffer_s
*cur_buf
;
1097 int refresh_frame_flags
;
1098 uint8_t need_resync
;
1099 uint8_t hold_ref_buf
;
1100 uint8_t ready_for_new_data
;
1101 struct BufferPool_s vp9_buffer_pool
;
1103 struct BuffInfo_s
*work_space_buf
;
1105 struct buff_s
*mc_buf
;
1107 unsigned int frame_width
;
1108 unsigned int frame_height
;
1110 unsigned short *rpm_ptr
;
1121 uint8_t has_keyframe
;
1125 /* bit 0, for decoding; bit 1, for displaying */
1126 uint8_t ignore_bufmgr_error
;
1128 int PB_skip_count_after_decoding
;
1132 int default_filt_lvl
;
1133 struct loop_filter_info_n
*lfi
;
1134 struct loopfilter
*lf
;
1135 struct segmentation
*seg_4lf
;
1137 struct vdec_info
*gvs
;
1139 u32 pre_stream_offset
;
1141 unsigned int dec_status
;
1143 int new_frame_displayed
;
1147 struct vframe_master_display_colour_s vf_dp
;
1148 struct firmware_s
*fw
;
1151 #ifdef SUPPORT_FB_DECODING
1154 struct work_struct s1_work
;
1155 int used_stage_buf_num
;
1158 void *stage_mmu_map_addr
;
1159 dma_addr_t stage_mmu_map_phy_addr
;
1160 struct stage_buf_s
*s1_buf
;
1161 struct stage_buf_s
*s2_buf
;
1162 struct stage_buf_s
*stage_bufs
1163 [STAGE_MAX_BUFFERS
];
1164 unsigned char run2_busy
;
1166 int s1_mv_buf_index
;
1167 int s1_mv_buf_index_pre
;
1168 int s1_mv_buf_index_pre_pre
;
1169 unsigned long s1_mpred_mv_wr_start_addr
;
1170 unsigned long s1_mpred_mv_wr_start_addr_pre
;
1171 unsigned short s1_intra_only
;
1172 unsigned short s1_frame_type
;
1173 unsigned short s1_width
;
1174 unsigned short s1_height
;
1175 unsigned short s1_last_show_frame
;
1176 union param_u s1_param
;
1177 u8 back_not_run_ready
;
1179 int need_cache_size
;
1182 int low_latency_flag
;
1184 bool pic_list_init_done
;
1185 bool pic_list_init_done2
;
1188 bool v4l_params_parsed
;
1189 int frameinfo_enable
;
1190 struct vframe_qos_s vframe_qos
;
1192 u32 dynamic_buf_num_margin
;
1193 struct vframe_s vframe_dummy
;
1196 static int vp9_print(struct VP9Decoder_s
*pbi
,
1197 int flag
, const char *fmt
, ...)
1199 #define HEVC_PRINT_BUF 256
1200 unsigned char buf
[HEVC_PRINT_BUF
];
1208 va_start(args
, fmt
);
1210 len
= sprintf(buf
, "[%d]", pbi
->index
);
1211 vsnprintf(buf
+ len
, HEVC_PRINT_BUF
- len
, fmt
, args
);
1212 pr_debug("%s", buf
);
1218 static int is_oversize(int w
, int h
)
1220 int max
= (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
)?
1221 MAX_SIZE_8K
: MAX_SIZE_4K
;
1223 if (w
<= 0 || h
<= 0)
1226 if (h
!= 0 && (w
> max
/ h
))
1232 static int v4l_alloc_and_config_pic(struct VP9Decoder_s
*pbi
,
1233 struct PIC_BUFFER_CONFIG_s
*pic
);
1235 static void resize_context_buffers(struct VP9Decoder_s
*pbi
,
1236 struct VP9_Common_s
*cm
, int width
, int height
)
1238 if (cm
->width
!= width
|| cm
->height
!= height
) {
1241 pbi
->vp9_first_pts_ready
= 0;
1242 pbi
->duration_from_pts_done
= 0;
1244 pr_info("%s (%d,%d)=>(%d,%d)\r\n", __func__
, cm
->width
,
1245 cm
->height
, width
, height
);
1247 cm
->height
= height
;
1250 *if (cm->cur_frame->mvs == NULL ||
1251 * cm->mi_rows > cm->cur_frame->mi_rows ||
1252 * cm->mi_cols > cm->cur_frame->mi_cols) {
1253 * resize_mv_buffer(cm);
1258 static int valid_ref_frame_size(int ref_width
, int ref_height
,
1259 int this_width
, int this_height
) {
1260 return 2 * this_width
>= ref_width
&&
1261 2 * this_height
>= ref_height
&&
1262 this_width
<= 16 * ref_width
&&
1263 this_height
<= 16 * ref_height
;
1267 *static int valid_ref_frame_img_fmt(enum vpx_bit_depth_t ref_bit_depth,
1268 * int ref_xss, int ref_yss,
1269 * enum vpx_bit_depth_t this_bit_depth,
1270 * int this_xss, int this_yss) {
1271 * return ref_bit_depth == this_bit_depth && ref_xss == this_xss &&
1272 * ref_yss == this_yss;
1277 static int setup_frame_size(
1278 struct VP9Decoder_s
*pbi
,
1279 struct VP9_Common_s
*cm
, union param_u
*params
,
1280 unsigned int *mmu_index_adr
,
1281 int print_header_info
) {
1283 struct BufferPool_s
* const pool
= cm
->buffer_pool
;
1284 struct PIC_BUFFER_CONFIG_s
*ybf
;
1287 width
= params
->p
.width
;
1288 height
= params
->p
.height
;
1289 if (is_oversize(width
, height
)) {
1290 vp9_print(pbi
, 0, "%s, Error: Invalid frame size\n", __func__
);
1294 /*vp9_read_frame_size(rb, &width, &height);*/
1295 if (print_header_info
)
1296 pr_info(" * 16-bits w read : %d (width : %d)\n", width
, height
);
1297 if (print_header_info
)
1299 (" * 16-bits h read : %d (height : %d)\n", width
, height
);
1301 WRITE_VREG(HEVC_PARSER_PICTURE_SIZE
, (height
<< 16) | width
);
1302 #ifdef VP9_10B_HED_FB
1303 WRITE_VREG(HEVC_ASSIST_PIC_SIZE_FB_READ
, (height
<< 16) | width
);
1305 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
1306 ret
= vp9_alloc_mmu(pbi
,
1310 params
->p
.bit_depth
,
1313 pr_err("can't alloc need mmu1,idx %d ret =%d\n",
1318 cm
->cur_fb_idx_mmu
= cm
->new_fb_idx
;
1321 resize_context_buffers(pbi
, cm
, width
, height
);
1322 setup_display_size(cm
, params
, print_header_info
);
1324 lock_buffer_pool(pool
);
1325 if (vp9_realloc_frame_buffer(
1326 get_frame_new_buffer(cm
), cm
->width
, cm
->height
,
1327 cm
->subsampling_x
, cm
->subsampling_y
,
1328 #if CONFIG_VP9_HIGHBITDEPTH
1329 cm
->use_highbitdepth
,
1331 VP9_DEC_BORDER_IN_PIXELS
,
1333 &pool
->frame_bufs
[cm
->new_fb_idx
].raw_frame_buffer
,
1334 pool
->get_fb_cb
, pool
->cb_priv
)) {
1335 unlock_buffer_pool(pool
);
1336 vpx_internal_error(&cm
->error
, VPX_CODEC_MEM_ERROR
,
1337 "Failed to allocate frame buffer");
1339 unlock_buffer_pool(pool
);
1342 ybf
= get_frame_new_buffer(cm
);
1346 ybf
->y_crop_width
= width
;
1347 ybf
->y_crop_height
= height
;
1348 ybf
->bit_depth
= params
->p
.bit_depth
;
1350 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.subsampling_x
= cm
->subsampling_x
;
1351 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.subsampling_y
= cm
->subsampling_y
;
1352 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.bit_depth
=
1353 (unsigned int)cm
->bit_depth
;
1354 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.color_space
= cm
->color_space
;
1358 static int setup_frame_size_with_refs(
1359 struct VP9Decoder_s
*pbi
,
1360 struct VP9_Common_s
*cm
,
1361 union param_u
*params
,
1362 unsigned int *mmu_index_adr
,
1363 int print_header_info
) {
1367 int has_valid_ref_frame
= 0;
1368 struct PIC_BUFFER_CONFIG_s
*ybf
;
1369 struct BufferPool_s
* const pool
= cm
->buffer_pool
;
1372 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
1373 if ((params
->p
.same_frame_size
>>
1374 (REFS_PER_FRAME
- i
- 1)) & 0x1) {
1375 struct PIC_BUFFER_CONFIG_s
*const buf
=
1376 cm
->frame_refs
[i
].buf
;
1377 /*if (print_header_info)
1379 * ("1-bit same_frame_size[%d] read : 1\n", i);
1381 width
= buf
->y_crop_width
;
1382 height
= buf
->y_crop_height
;
1383 /*if (print_header_info)
1385 * (" - same_frame_size width : %d\n", width);
1387 /*if (print_header_info)
1389 * (" - same_frame_size height : %d\n", height);
1394 /*if (print_header_info)
1396 * ("1-bit same_frame_size[%d] read : 0\n", i);
1402 /*vp9_read_frame_size(rb, &width, &height);*/
1403 width
= params
->p
.width
;
1404 height
= params
->p
.height
;
1405 /*if (print_header_info)
1407 * (" * 16-bits w read : %d (width : %d)\n",
1409 *if (print_header_info)
1411 * (" * 16-bits h read : %d (height : %d)\n",
1416 if (is_oversize(width
, height
)) {
1417 vp9_print(pbi
, 0, "%s, Error: Invalid frame size\n", __func__
);
1421 params
->p
.width
= width
;
1422 params
->p
.height
= height
;
1424 WRITE_VREG(HEVC_PARSER_PICTURE_SIZE
, (height
<< 16) | width
);
1425 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
1426 /*if(cm->prev_fb_idx >= 0) release_unused_4k(cm->prev_fb_idx);
1427 *cm->prev_fb_idx = cm->new_fb_idx;
1430 * ("[DEBUG DEBUG]Before alloc_mmu,
1431 * prev_fb_idx : %d, new_fb_idx : %d\r\n",
1432 * cm->prev_fb_idx, cm->new_fb_idx);
1434 ret
= vp9_alloc_mmu(pbi
, cm
->new_fb_idx
,
1435 params
->p
.width
, params
->p
.height
,
1436 params
->p
.bit_depth
, mmu_index_adr
);
1438 pr_err("can't alloc need mmu,idx %d\r\n",
1442 cm
->cur_fb_idx_mmu
= cm
->new_fb_idx
;
1445 /*Check to make sure at least one of frames that this frame references
1446 *has valid dimensions.
1448 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
1449 struct RefBuffer_s
* const ref_frame
= &cm
->frame_refs
[i
];
1451 has_valid_ref_frame
|=
1452 valid_ref_frame_size(ref_frame
->buf
->y_crop_width
,
1453 ref_frame
->buf
->y_crop_height
,
1456 if (!has_valid_ref_frame
) {
1457 pr_err("Error: Referenced frame has invalid size\r\n");
1461 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
1462 struct RefBuffer_s
* const ref_frame
=
1464 if (!valid_ref_frame_img_fmt(
1465 ref_frame
->buf
->bit_depth
,
1466 ref_frame
->buf
->subsampling_x
,
1467 ref_frame
->buf
->subsampling_y
,
1472 ("Referenced frame incompatible color fmt\r\n");
1476 resize_context_buffers(pbi
, cm
, width
, height
);
1477 setup_display_size(cm
, params
, print_header_info
);
1480 lock_buffer_pool(pool
);
1481 if (vp9_realloc_frame_buffer(
1482 get_frame_new_buffer(cm
), cm
->width
, cm
->height
,
1483 cm
->subsampling_x
, cm
->subsampling_y
,
1484 #if CONFIG_VP9_HIGHBITDEPTH
1485 cm
->use_highbitdepth
,
1487 VP9_DEC_BORDER_IN_PIXELS
,
1489 &pool
->frame_bufs
[cm
->new_fb_idx
].raw_frame_buffer
,
1492 unlock_buffer_pool(pool
);
1493 vpx_internal_error(&cm
->error
, VPX_CODEC_MEM_ERROR
,
1494 "Failed to allocate frame buffer");
1496 unlock_buffer_pool(pool
);
1499 ybf
= get_frame_new_buffer(cm
);
1503 ybf
->y_crop_width
= width
;
1504 ybf
->y_crop_height
= height
;
1505 ybf
->bit_depth
= params
->p
.bit_depth
;
1507 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.subsampling_x
= cm
->subsampling_x
;
1508 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.subsampling_y
= cm
->subsampling_y
;
1509 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.bit_depth
=
1510 (unsigned int)cm
->bit_depth
;
1511 pool
->frame_bufs
[cm
->new_fb_idx
].buf
.color_space
= cm
->color_space
;
1515 static inline bool close_to(int a
, int b
, int m
)
1517 return (abs(a
- b
) < m
) ? true : false;
1520 #ifdef MULTI_INSTANCE_SUPPORT
1521 static int vp9_print_cont(struct VP9Decoder_s
*pbi
,
1522 int flag
, const char *fmt
, ...)
1524 unsigned char buf
[HEVC_PRINT_BUF
];
1532 va_start(args
, fmt
);
1533 vsnprintf(buf
+ len
, HEVC_PRINT_BUF
- len
, fmt
, args
);
1534 pr_debug("%s", buf
);
1540 static void trigger_schedule(struct VP9Decoder_s
*pbi
)
1542 if (pbi
->is_used_v4l
) {
1543 struct aml_vcodec_ctx
*ctx
=
1544 (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
1546 if (ctx
->param_sets_from_ucode
&&
1547 !pbi
->v4l_params_parsed
)
1548 vdec_v4l_write_frame_sync(ctx
);
1552 pbi
->vdec_cb(hw_to_vdec(pbi
), pbi
->vdec_cb_arg
);
1555 static void reset_process_time(struct VP9Decoder_s
*pbi
)
1557 if (pbi
->start_process_time
) {
1558 unsigned process_time
=
1559 1000 * (jiffies
- pbi
->start_process_time
) / HZ
;
1560 pbi
->start_process_time
= 0;
1561 if (process_time
> max_process_time
[pbi
->index
])
1562 max_process_time
[pbi
->index
] = process_time
;
1566 static void start_process_time(struct VP9Decoder_s
*pbi
)
1568 pbi
->start_process_time
= jiffies
;
1569 pbi
->decode_timeout_count
= 0;
1570 pbi
->last_lcu_idx
= 0;
1573 static void timeout_process(struct VP9Decoder_s
*pbi
)
1578 0, "%s decoder timeout\n", __func__
);
1580 pbi
->dec_result
= DEC_RESULT_DONE
;
1581 reset_process_time(pbi
);
1582 vdec_schedule_work(&pbi
->work
);
1585 static u32
get_valid_double_write_mode(struct VP9Decoder_s
*pbi
)
1587 return ((double_write_mode
& 0x80000000) == 0) ?
1588 pbi
->double_write_mode
:
1589 (double_write_mode
& 0x7fffffff);
1592 static int get_double_write_mode(struct VP9Decoder_s
*pbi
)
1594 u32 valid_dw_mode
= get_valid_double_write_mode(pbi
);
1597 struct VP9_Common_s
*cm
= &pbi
->common
;
1598 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
;
1600 /* mask for supporting double write value bigger than 0x100 */
1601 if (valid_dw_mode
& 0xffffff00) {
1603 return 1;/*no valid frame,*/
1604 cur_pic_config
= &cm
->cur_frame
->buf
;
1605 w
= cur_pic_config
->y_crop_width
;
1606 h
= cur_pic_config
->y_crop_height
;
1609 switch (valid_dw_mode
) {
1611 if (w
> 1920 && h
> 1088)
1615 if (w
> 1920 && h
> 1088)
1619 if (w
> 1280 && h
> 720)
1628 return valid_dw_mode
;
1631 /* for double write buf alloc */
1632 static int get_double_write_mode_init(struct VP9Decoder_s
*pbi
)
1634 u32 valid_dw_mode
= get_valid_double_write_mode(pbi
);
1636 int w
= pbi
->init_pic_w
;
1637 int h
= pbi
->init_pic_h
;
1640 switch (valid_dw_mode
) {
1642 if (w
> 1920 && h
> 1088)
1646 if (w
> 1920 && h
> 1088)
1650 if (w
> 1280 && h
> 720)
1661 static int get_double_write_ratio(struct VP9Decoder_s
*pbi
,
1665 if ((dw_mode
== 2) ||
1668 else if (dw_mode
== 4)
1673 //#define MAX_4K_NUM 0x1200
1676 struct VP9Decoder_s
*pbi
,
1680 unsigned short bit_depth
,
1681 unsigned int *mmu_index_adr
)
1683 int bit_depth_10
= (bit_depth
== VPX_BITS_10
);
1685 int cur_mmu_4k_number
, max_frame_num
;
1686 if (!pbi
->mmu_box
) {
1687 pr_err("error no mmu box!\n");
1690 if (pbi
->double_write_mode
& 0x10)
1692 if (bit_depth
>= VPX_BITS_12
) {
1693 pbi
->fatal_error
= DECODER_FATAL_ERROR_SIZE_OVERFLOW
;
1694 pr_err("fatal_error, un support bit depth 12!\n\n");
1697 picture_size
= compute_losless_comp_body_size(pic_width
, pic_height
,
1699 cur_mmu_4k_number
= ((picture_size
+ (1 << 12) - 1) >> 12);
1701 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
)
1702 max_frame_num
= MAX_FRAME_8K_NUM
;
1704 max_frame_num
= MAX_FRAME_4K_NUM
;
1706 if (cur_mmu_4k_number
> max_frame_num
) {
1707 pr_err("over max !! cur_mmu_4k_number 0x%x width %d height %d\n",
1708 cur_mmu_4k_number
, pic_width
, pic_height
);
1711 return decoder_mmu_box_alloc_idx(
1719 #ifndef MV_USE_FIXED_BUF
1720 static void dealloc_mv_bufs(struct VP9Decoder_s
*pbi
)
1723 for (i
= 0; i
< MV_BUFFER_NUM
; i
++) {
1724 if (pbi
->m_mv_BUF
[i
].start_adr
) {
1727 "dealloc mv buf(%d) adr %ld size 0x%x used_flag %d\n",
1728 i
, pbi
->m_mv_BUF
[i
].start_adr
,
1729 pbi
->m_mv_BUF
[i
].size
,
1730 pbi
->m_mv_BUF
[i
].used_flag
);
1731 decoder_bmmu_box_free_idx(
1734 pbi
->m_mv_BUF
[i
].start_adr
= 0;
1735 pbi
->m_mv_BUF
[i
].size
= 0;
1736 pbi
->m_mv_BUF
[i
].used_flag
= 0;
1741 static int alloc_mv_buf(struct VP9Decoder_s
*pbi
,
1746 if (pbi
->m_mv_BUF
[i
].start_adr
&&
1747 size
> pbi
->m_mv_BUF
[i
].size
) {
1748 dealloc_mv_bufs(pbi
);
1749 } else if (pbi
->m_mv_BUF
[i
].start_adr
)
1752 if (decoder_bmmu_box_alloc_buf_phy
1754 MV_BUFFER_IDX(i
), size
,
1756 &pbi
->m_mv_BUF
[i
].start_adr
) < 0) {
1757 pbi
->m_mv_BUF
[i
].start_adr
= 0;
1760 pbi
->m_mv_BUF
[i
].size
= size
;
1761 pbi
->m_mv_BUF
[i
].used_flag
= 0;
1765 "MV Buffer %d: start_adr %p size %x\n",
1767 (void *)pbi
->m_mv_BUF
[i
].start_adr
,
1768 pbi
->m_mv_BUF
[i
].size
);
1774 static int init_mv_buf_list(struct VP9Decoder_s
*pbi
)
1778 int count
= MV_BUFFER_NUM
;
1779 int pic_width
= pbi
->init_pic_w
;
1780 int pic_height
= pbi
->init_pic_h
;
1781 int lcu_size
= 64; /*fixed 64*/
1782 int pic_width_64
= (pic_width
+ 63) & (~0x3f);
1783 int pic_height_32
= (pic_height
+ 31) & (~0x1f);
1784 int pic_width_lcu
= (pic_width_64
% lcu_size
) ?
1785 pic_width_64
/ lcu_size
+ 1
1786 : pic_width_64
/ lcu_size
;
1787 int pic_height_lcu
= (pic_height_32
% lcu_size
) ?
1788 pic_height_32
/ lcu_size
+ 1
1789 : pic_height_32
/ lcu_size
;
1790 int lcu_total
= pic_width_lcu
* pic_height_lcu
;
1791 int size
= ((lcu_total
* MV_MEM_UNIT
) + 0xffff) &
1793 if (mv_buf_margin
> 0)
1794 count
= REF_FRAMES
+ mv_buf_margin
;
1796 if (pbi
->init_pic_w
> 2048 && pbi
->init_pic_h
> 1088)
1797 count
= REF_FRAMES_4K
+ mv_buf_margin
;
1800 pr_info("%s w:%d, h:%d, count: %d\n",
1801 __func__
, pbi
->init_pic_w
, pbi
->init_pic_h
, count
);
1805 i
< count
&& i
< MV_BUFFER_NUM
; i
++) {
1806 if (alloc_mv_buf(pbi
, i
, size
) < 0) {
1814 static int get_mv_buf(struct VP9Decoder_s
*pbi
,
1816 unsigned long *mpred_mv_wr_start_addr
)
1820 for (i
= 0; i
< MV_BUFFER_NUM
; i
++) {
1821 if (pbi
->m_mv_BUF
[i
].start_adr
&&
1822 pbi
->m_mv_BUF
[i
].used_flag
== 0) {
1823 pbi
->m_mv_BUF
[i
].used_flag
= 1;
1830 *mv_buf_index
= ret
;
1831 *mpred_mv_wr_start_addr
=
1832 (pbi
->m_mv_BUF
[ret
].start_adr
+ 0xffff) &
1834 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
1836 "%s => %d (%ld) size 0x%x\n",
1838 *mpred_mv_wr_start_addr
,
1839 pbi
->m_mv_BUF
[ret
].size
);
1842 "%s: Error, mv buf is not enough\n",
1848 static void put_mv_buf(struct VP9Decoder_s
*pbi
,
1851 int i
= *mv_buf_index
;
1852 if (i
>= MV_BUFFER_NUM
) {
1853 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
1855 "%s: index %d beyond range\n",
1859 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
1861 "%s(%d): used_flag(%d)\n",
1863 pbi
->m_mv_BUF
[i
].used_flag
);
1866 if (pbi
->m_mv_BUF
[i
].start_adr
&&
1867 pbi
->m_mv_BUF
[i
].used_flag
)
1868 pbi
->m_mv_BUF
[i
].used_flag
= 0;
1871 static void put_un_used_mv_bufs(struct VP9Decoder_s
*pbi
)
1873 struct VP9_Common_s
*const cm
= &pbi
->common
;
1874 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
1876 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
1877 if ((frame_bufs
[i
].ref_count
== 0) &&
1878 (frame_bufs
[i
].buf
.index
!= -1) &&
1879 (frame_bufs
[i
].buf
.mv_buf_index
>= 0)
1881 put_mv_buf(pbi
, &frame_bufs
[i
].buf
.mv_buf_index
);
1885 #ifdef SUPPORT_FB_DECODING
1886 static bool mv_buf_available(struct VP9Decoder_s
*pbi
)
1890 for (i
= 0; i
< MV_BUFFER_NUM
; i
++) {
1891 if (pbi
->m_mv_BUF
[i
].start_adr
&&
1892 pbi
->m_mv_BUF
[i
].used_flag
== 0) {
1902 #ifdef SUPPORT_FB_DECODING
1903 static void init_stage_buf(struct VP9Decoder_s
*pbi
)
1906 for (i
= 0; i
< STAGE_MAX_BUFFERS
1907 && i
< stage_buf_num
; i
++) {
1908 pbi
->stage_bufs
[i
] =
1909 vmalloc(sizeof(struct stage_buf_s
));
1910 if (pbi
->stage_bufs
[i
] == NULL
) {
1912 0, "%s vmalloc fail\n", __func__
);
1915 pbi
->stage_bufs
[i
]->index
= i
;
1917 pbi
->used_stage_buf_num
= i
;
1922 pbi
->s1_mv_buf_index
= FRAME_BUFFERS
;
1923 pbi
->s1_mv_buf_index_pre
= FRAME_BUFFERS
;
1924 pbi
->s1_mv_buf_index_pre_pre
= FRAME_BUFFERS
;
1926 if (pbi
->used_stage_buf_num
> 0)
1928 0, "%s 2 stage decoding buf %d\n",
1930 pbi
->used_stage_buf_num
);
1933 static void uninit_stage_buf(struct VP9Decoder_s
*pbi
)
1936 for (i
= 0; i
< pbi
->used_stage_buf_num
; i
++) {
1937 if (pbi
->stage_bufs
[i
])
1938 vfree(pbi
->stage_bufs
[i
]);
1939 pbi
->stage_bufs
[i
] = NULL
;
1941 pbi
->used_stage_buf_num
= 0;
1948 static int get_s1_buf(
1949 struct VP9Decoder_s
*pbi
)
1951 struct stage_buf_s
*buf
= NULL
;
1953 int buf_page_num
= MAX_STAGE_PAGE_NUM
;
1954 int next_s1_pos
= pbi
->s1_pos
+ 1;
1956 if (next_s1_pos
>= pbi
->used_stage_buf_num
)
1958 if (next_s1_pos
== pbi
->s2_pos
) {
1963 buf
= pbi
->stage_bufs
[pbi
->s1_pos
];
1964 ret
= decoder_mmu_box_alloc_idx(
1968 pbi
->stage_mmu_map_addr
);
1971 "%s decoder_mmu_box_alloc fail for index %d (s1_pos %d s2_pos %d)\n",
1972 __func__
, buf
->index
,
1973 pbi
->s1_pos
, pbi
->s2_pos
);
1976 vp9_print(pbi
, VP9_DEBUG_2_STAGE
,
1977 "%s decoder_mmu_box_alloc %d page for index %d (s1_pos %d s2_pos %d)\n",
1978 __func__
, buf_page_num
, buf
->index
,
1979 pbi
->s1_pos
, pbi
->s2_pos
);
1985 static void inc_s1_pos(struct VP9Decoder_s
*pbi
)
1987 struct stage_buf_s
*buf
=
1988 pbi
->stage_bufs
[pbi
->s1_pos
];
1991 #ifdef FB_DECODING_TEST_SCHEDULE
1992 MAX_STAGE_PAGE_NUM
/2;
1994 (READ_VREG(HEVC_ASSIST_HED_FB_W_CTL
) >> 16);
1996 decoder_mmu_box_free_idx_tail(pbi
->mmu_box
,
1997 FRAME_BUFFERS
+ buf
->index
, used_page_num
);
2000 if (pbi
->s1_pos
>= pbi
->used_stage_buf_num
)
2003 vp9_print(pbi
, VP9_DEBUG_2_STAGE
,
2004 "%s (used_page_num %d) for index %d (s1_pos %d s2_pos %d)\n",
2005 __func__
, used_page_num
, buf
->index
,
2006 pbi
->s1_pos
, pbi
->s2_pos
);
2009 #define s2_buf_available(pbi) (pbi->s1_pos != pbi->s2_pos)
2011 static int get_s2_buf(
2012 struct VP9Decoder_s
*pbi
)
2015 struct stage_buf_s
*buf
= NULL
;
2016 if (s2_buf_available(pbi
)) {
2017 buf
= pbi
->stage_bufs
[pbi
->s2_pos
];
2018 vp9_print(pbi
, VP9_DEBUG_2_STAGE
,
2019 "%s for index %d (s1_pos %d s2_pos %d)\n",
2020 __func__
, buf
->index
,
2021 pbi
->s1_pos
, pbi
->s2_pos
);
2028 static void inc_s2_pos(struct VP9Decoder_s
*pbi
)
2030 struct stage_buf_s
*buf
=
2031 pbi
->stage_bufs
[pbi
->s2_pos
];
2032 decoder_mmu_box_free_idx(pbi
->mmu_box
,
2033 FRAME_BUFFERS
+ buf
->index
);
2035 if (pbi
->s2_pos
>= pbi
->used_stage_buf_num
)
2037 vp9_print(pbi
, VP9_DEBUG_2_STAGE
,
2038 "%s for index %d (s1_pos %d s2_pos %d)\n",
2039 __func__
, buf
->index
,
2040 pbi
->s1_pos
, pbi
->s2_pos
);
2043 static int get_free_stage_buf_num(struct VP9Decoder_s
*pbi
)
2046 if (pbi
->s1_pos
>= pbi
->s2_pos
)
2047 num
= pbi
->used_stage_buf_num
-
2048 (pbi
->s1_pos
- pbi
->s2_pos
) - 1;
2050 num
= (pbi
->s2_pos
- pbi
->s1_pos
) - 1;
2054 #ifndef FB_DECODING_TEST_SCHEDULE
2055 static DEFINE_SPINLOCK(fb_core_spin_lock
);
2057 static u8
is_s2_decoding_finished(struct VP9Decoder_s
*pbi
)
2059 /* to do: VLSI review
2060 completion of last LCU decoding in BACK
2065 static void start_s1_decoding(struct VP9Decoder_s
*pbi
)
2067 /* to do: VLSI review
2068 after parser, how to start LCU decoding in BACK
2072 static void fb_reset_core(struct vdec_s
*vdec
, u32 mask
)
2074 /* to do: VLSI review
2075 1. how to disconnect DMC for FRONT and BACK
2076 2. reset bit 13, 24, FRONT or BACK ??
2079 unsigned long flags
;
2081 if (mask
& HW_MASK_FRONT
)
2082 WRITE_VREG(HEVC_STREAM_CONTROL
, 0);
2083 spin_lock_irqsave(&fb_core_spin_lock
, flags
);
2084 codec_dmcbus_write(DMC_REQ_CTRL
,
2085 codec_dmcbus_read(DMC_REQ_CTRL
) & (~(1 << 4)));
2086 spin_unlock_irqrestore(&fb_core_spin_lock
, flags
);
2088 while (!(codec_dmcbus_read(DMC_CHAN_STS
)
2092 if ((mask
& HW_MASK_FRONT
) &&
2093 input_frame_based(vdec
))
2094 WRITE_VREG(HEVC_STREAM_CONTROL
, 0);
2111 if (mask
& HW_MASK_FRONT
) {
2113 (1<<3)|(1<<4)|(1<<11)|
2116 if (mask
& HW_MASK_BACK
) {
2118 (1<<8)|(1<<13)|(1<<14)|(1<<15)|
2119 (1<<17)|(1<<19)|(1<<24);
2121 WRITE_VREG(DOS_SW_RESET3
, reset_bits
);
2123 (1<<3)|(1<<4)|(1<<8)|(1<<11)|
2124 (1<<12)|(1<<13)|(1<<14)|(1<<15)|
2125 (1<<17)|(1<<18)|(1<<19)|(1<<24);
2127 WRITE_VREG(DOS_SW_RESET3
, 0);
2130 spin_lock_irqsave(&fb_core_spin_lock
, flags
);
2131 codec_dmcbus_write(DMC_REQ_CTRL
,
2132 codec_dmcbus_read(DMC_REQ_CTRL
) | (1 << 4));
2133 spin_unlock_irqrestore(&fb_core_spin_lock
, flags
);
2140 static void init_pic_list_hw(struct VP9Decoder_s
*pbi
);
2142 static int get_free_fb(struct VP9Decoder_s
*pbi
)
2144 struct VP9_Common_s
*const cm
= &pbi
->common
;
2145 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2147 unsigned long flags
;
2149 lock_buffer_pool(cm
->buffer_pool
, flags
);
2150 if (debug
& VP9_DEBUG_BUFMGR_MORE
) {
2151 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
2152 pr_info("%s:%d, ref_count %d vf_ref %d index %d\r\n",
2153 __func__
, i
, frame_bufs
[i
].ref_count
,
2154 frame_bufs
[i
].buf
.vf_ref
,
2155 frame_bufs
[i
].buf
.index
);
2158 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
2159 if ((frame_bufs
[i
].ref_count
== 0) &&
2160 (frame_bufs
[i
].buf
.vf_ref
== 0) &&
2161 (frame_bufs
[i
].buf
.index
!= -1)
2165 if (i
!= pbi
->used_buf_num
) {
2166 frame_bufs
[i
].ref_count
= 1;
2167 /*pr_info("[MMU DEBUG 1] set ref_count[%d] : %d\r\n",
2168 i, frame_bufs[i].ref_count);*/
2170 /* Reset i to be INVALID_IDX to indicate
2171 no free buffer found*/
2175 unlock_buffer_pool(cm
->buffer_pool
, flags
);
2179 static int v4l_get_free_fb(struct VP9Decoder_s
*pbi
)
2181 struct VP9_Common_s
*const cm
= &pbi
->common
;
2182 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2183 struct aml_vcodec_ctx
* v4l
= pbi
->v4l2_ctx
;
2184 struct v4l_buff_pool
*pool
= &v4l
->cap_pool
;
2185 struct PIC_BUFFER_CONFIG_s
*pic
= NULL
;
2186 int i
, idx
= INVALID_IDX
;
2189 lock_buffer_pool(cm
->buffer_pool
, flags
);
2191 for (i
= 0; i
< pool
->in
; ++i
) {
2192 u32 state
= (pool
->seq
[i
] >> 16);
2193 u32 index
= (pool
->seq
[i
] & 0xffff);
2196 case V4L_CAP_BUFF_IN_DEC
:
2197 pic
= &frame_bufs
[i
].buf
;
2198 if ((frame_bufs
[i
].ref_count
== 0) &&
2199 (pic
->vf_ref
== 0) &&
2200 (pic
->index
!= -1) &&
2201 pic
->cma_alloc_addr
) {
2205 case V4L_CAP_BUFF_IN_M2M
:
2206 pic
= &frame_bufs
[index
].buf
;
2207 pic
->y_crop_width
= pbi
->frame_width
;
2208 pic
->y_crop_height
= pbi
->frame_height
;
2209 if (!v4l_alloc_and_config_pic(pbi
, pic
)) {
2210 set_canvas(pbi
, pic
);
2211 init_pic_list_hw(pbi
);
2216 pr_err("v4l buffer state err %d.\n", state
);
2220 if (idx
!= INVALID_IDX
) {
2221 frame_bufs
[idx
].ref_count
= 1;
2226 unlock_buffer_pool(cm
->buffer_pool
, flags
);
2231 static int get_free_buf_count(struct VP9Decoder_s
*pbi
)
2233 struct VP9_Common_s
*const cm
= &pbi
->common
;
2234 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2236 int free_buf_count
= 0;
2237 for (i
= 0; i
< pbi
->used_buf_num
; ++i
)
2238 if ((frame_bufs
[i
].ref_count
== 0) &&
2239 (frame_bufs
[i
].buf
.vf_ref
== 0) &&
2240 (frame_bufs
[i
].buf
.index
!= -1)
2243 return free_buf_count
;
2246 static void decrease_ref_count(int idx
, struct RefCntBuffer_s
*const frame_bufs
,
2247 struct BufferPool_s
*const pool
)
2250 --frame_bufs
[idx
].ref_count
;
2251 /*pr_info("[MMU DEBUG 7] dec ref_count[%d] : %d\r\n", idx,
2252 * frame_bufs[idx].ref_count);
2254 /*A worker may only get a free framebuffer index when
2255 *calling get_free_fb. But the private buffer is not set up
2256 *until finish decoding header. So any error happens during
2257 *decoding header, the frame_bufs will not have valid priv
2261 if (frame_bufs
[idx
].ref_count
== 0 &&
2262 frame_bufs
[idx
].raw_frame_buffer
.priv
)
2263 vp9_release_frame_buffer
2264 (&frame_bufs
[idx
].raw_frame_buffer
);
2268 static void generate_next_ref_frames(struct VP9Decoder_s
*pbi
)
2270 struct VP9_Common_s
*const cm
= &pbi
->common
;
2271 struct RefCntBuffer_s
*frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2272 struct BufferPool_s
*const pool
= cm
->buffer_pool
;
2273 int mask
, ref_index
= 0;
2274 unsigned long flags
;
2276 /* Generate next_ref_frame_map.*/
2277 lock_buffer_pool(pool
, flags
);
2278 for (mask
= pbi
->refresh_frame_flags
; mask
; mask
>>= 1) {
2280 cm
->next_ref_frame_map
[ref_index
] = cm
->new_fb_idx
;
2281 ++frame_bufs
[cm
->new_fb_idx
].ref_count
;
2282 /*pr_info("[MMU DEBUG 4] inc ref_count[%d] : %d\r\n",
2283 *cm->new_fb_idx, frame_bufs[cm->new_fb_idx].ref_count);
2286 cm
->next_ref_frame_map
[ref_index
] =
2287 cm
->ref_frame_map
[ref_index
];
2288 /* Current thread holds the reference frame.*/
2289 if (cm
->ref_frame_map
[ref_index
] >= 0) {
2290 ++frame_bufs
[cm
->ref_frame_map
[ref_index
]].ref_count
;
2292 *("[MMU DEBUG 5] inc ref_count[%d] : %d\r\n",
2293 *cm->ref_frame_map[ref_index],
2294 *frame_bufs[cm->ref_frame_map[ref_index]].ref_count);
2300 for (; ref_index
< REF_FRAMES
; ++ref_index
) {
2301 cm
->next_ref_frame_map
[ref_index
] =
2302 cm
->ref_frame_map
[ref_index
];
2303 /* Current thread holds the reference frame.*/
2304 if (cm
->ref_frame_map
[ref_index
] >= 0) {
2305 ++frame_bufs
[cm
->ref_frame_map
[ref_index
]].ref_count
;
2306 /*pr_info("[MMU DEBUG 6] inc ref_count[%d] : %d\r\n",
2307 *cm->ref_frame_map[ref_index],
2308 *frame_bufs[cm->ref_frame_map[ref_index]].ref_count);
2312 unlock_buffer_pool(pool
, flags
);
2316 static void refresh_ref_frames(struct VP9Decoder_s
*pbi
)
2319 struct VP9_Common_s
*const cm
= &pbi
->common
;
2320 struct BufferPool_s
*pool
= cm
->buffer_pool
;
2321 struct RefCntBuffer_s
*frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2322 int mask
, ref_index
= 0;
2323 unsigned long flags
;
2325 lock_buffer_pool(pool
, flags
);
2326 for (mask
= pbi
->refresh_frame_flags
; mask
; mask
>>= 1) {
2327 const int old_idx
= cm
->ref_frame_map
[ref_index
];
2328 /*Current thread releases the holding of reference frame.*/
2329 decrease_ref_count(old_idx
, frame_bufs
, pool
);
2331 /*Release the reference frame in reference map.*/
2332 if ((mask
& 1) && old_idx
>= 0)
2333 decrease_ref_count(old_idx
, frame_bufs
, pool
);
2334 cm
->ref_frame_map
[ref_index
] =
2335 cm
->next_ref_frame_map
[ref_index
];
2339 /*Current thread releases the holding of reference frame.*/
2340 for (; ref_index
< REF_FRAMES
&& !cm
->show_existing_frame
;
2342 const int old_idx
= cm
->ref_frame_map
[ref_index
];
2344 decrease_ref_count(old_idx
, frame_bufs
, pool
);
2345 cm
->ref_frame_map
[ref_index
] =
2346 cm
->next_ref_frame_map
[ref_index
];
2348 unlock_buffer_pool(pool
, flags
);
2352 int vp9_bufmgr_process(struct VP9Decoder_s
*pbi
, union param_u
*params
)
2354 struct VP9_Common_s
*const cm
= &pbi
->common
;
2355 struct BufferPool_s
*pool
= cm
->buffer_pool
;
2356 struct RefCntBuffer_s
*frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2357 struct PIC_BUFFER_CONFIG_s
*pic
= NULL
;
2361 pbi
->ready_for_new_data
= 0;
2363 if (pbi
->has_keyframe
== 0 &&
2364 params
->p
.frame_type
!= KEY_FRAME
){
2365 on_no_keyframe_skiped
++;
2368 pbi
->has_keyframe
= 1;
2369 on_no_keyframe_skiped
= 0;
2371 if (pbi
->mmu_enable
) {
2372 if (!pbi
->m_ins_flag
)
2373 pbi
->used_4k_num
= (READ_VREG(HEVC_SAO_MMU_STATUS
) >> 16);
2374 if (cm
->prev_fb_idx
>= 0) {
2375 decoder_mmu_box_free_idx_tail(pbi
->mmu_box
,
2376 cm
->prev_fb_idx
, pbi
->used_4k_num
);
2380 if (cm
->new_fb_idx
>= 0
2381 && frame_bufs
[cm
->new_fb_idx
].ref_count
== 0){
2382 vp9_release_frame_buffer
2383 (&frame_bufs
[cm
->new_fb_idx
].raw_frame_buffer
);
2385 /*pr_info("Before get_free_fb, prev_fb_idx : %d, new_fb_idx : %d\r\n",
2386 cm->prev_fb_idx, cm->new_fb_idx);*/
2387 #ifndef MV_USE_FIXED_BUF
2388 put_un_used_mv_bufs(pbi
);
2389 if (debug
& VP9_DEBUG_BUFMGR_DETAIL
)
2392 cm
->new_fb_idx
= pbi
->is_used_v4l
?
2393 v4l_get_free_fb(pbi
) :
2395 if (cm
->new_fb_idx
== INVALID_IDX
) {
2396 pr_info("get_free_fb error\r\n");
2399 #ifndef MV_USE_FIXED_BUF
2400 #ifdef SUPPORT_FB_DECODING
2401 if (pbi
->used_stage_buf_num
== 0) {
2404 &pool
->frame_bufs
[cm
->new_fb_idx
].
2406 &pool
->frame_bufs
[cm
->new_fb_idx
].
2407 buf
.mpred_mv_wr_start_addr
2409 pr_info("get_mv_buf fail\r\n");
2412 if (debug
& VP9_DEBUG_BUFMGR_DETAIL
)
2414 #ifdef SUPPORT_FB_DECODING
2418 cm
->cur_frame
= &pool
->frame_bufs
[cm
->new_fb_idx
];
2419 /*if (debug & VP9_DEBUG_BUFMGR)
2420 pr_info("[VP9 DEBUG]%s(get_free_fb): %d\r\n", __func__,
2423 pbi
->cur_buf
= &frame_bufs
[cm
->new_fb_idx
];
2424 if (pbi
->mmu_enable
) {
2425 /* moved to after picture size ready
2426 *alloc_mmu(cm, params->p.width, params->p.height,
2427 *params->p.bit_depth, pbi->frame_mmu_map_addr);
2429 cm
->prev_fb_idx
= cm
->new_fb_idx
;
2431 /*read_uncompressed_header()*/
2432 cm
->last_frame_type
= cm
->frame_type
;
2433 cm
->last_intra_only
= cm
->intra_only
;
2434 cm
->profile
= params
->p
.profile
;
2435 if (cm
->profile
>= MAX_PROFILES
) {
2436 pr_err("Error: Unsupported profile %d\r\n", cm
->profile
);
2439 cm
->show_existing_frame
= params
->p
.show_existing_frame
;
2440 if (cm
->show_existing_frame
) {
2441 /* Show an existing frame directly.*/
2442 int frame_to_show_idx
= params
->p
.frame_to_show_idx
;
2444 unsigned long flags
;
2445 if (frame_to_show_idx
>= REF_FRAMES
) {
2446 pr_info("frame_to_show_idx %d exceed max index\r\n",
2451 frame_to_show
= cm
->ref_frame_map
[frame_to_show_idx
];
2452 /*pr_info("frame_to_show %d\r\n", frame_to_show);*/
2453 lock_buffer_pool(pool
, flags
);
2454 if (frame_to_show
< 0 ||
2455 frame_bufs
[frame_to_show
].ref_count
< 1) {
2456 unlock_buffer_pool(pool
, flags
);
2458 ("Error:Buffer %d does not contain a decoded frame",
2463 ref_cnt_fb(frame_bufs
, &cm
->new_fb_idx
, frame_to_show
);
2464 unlock_buffer_pool(pool
, flags
);
2465 pbi
->refresh_frame_flags
= 0;
2466 /*cm->lf.filter_level = 0;*/
2470 *if (pbi->frame_parallel_decode) {
2471 * for (i = 0; i < REF_FRAMES; ++i)
2472 * cm->next_ref_frame_map[i] =
2473 * cm->ref_frame_map[i];
2476 /* do not decode, search next start code */
2479 cm
->frame_type
= params
->p
.frame_type
;
2480 cm
->show_frame
= params
->p
.show_frame
;
2481 cm
->error_resilient_mode
= params
->p
.error_resilient_mode
;
2484 if (cm
->frame_type
== KEY_FRAME
) {
2485 pbi
->refresh_frame_flags
= (1 << REF_FRAMES
) - 1;
2487 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
2488 cm
->frame_refs
[i
].idx
= INVALID_IDX
;
2489 cm
->frame_refs
[i
].buf
= NULL
;
2492 ret
= setup_frame_size(pbi
,
2493 cm
, params
, pbi
->frame_mmu_map_addr
,
2497 if (pbi
->need_resync
) {
2498 memset(&cm
->ref_frame_map
, -1,
2499 sizeof(cm
->ref_frame_map
));
2500 pbi
->need_resync
= 0;
2503 cm
->intra_only
= cm
->show_frame
? 0 : params
->p
.intra_only
;
2504 /*if (print_header_info) {
2505 * if (cm->show_frame)
2507 * ("intra_only set to 0 because of show_frame\n");
2510 * ("1-bit intra_only read: %d\n", cm->intra_only);
2515 cm
->reset_frame_context
= cm
->error_resilient_mode
?
2516 0 : params
->p
.reset_frame_context
;
2517 if (print_header_info
) {
2518 if (cm
->error_resilient_mode
)
2520 ("reset to 0 error_resilient_mode\n");
2523 (" * 2-bits reset_frame_context read : %d\n",
2524 cm
->reset_frame_context
);
2527 if (cm
->intra_only
) {
2528 if (cm
->profile
> PROFILE_0
) {
2529 /*read_bitdepth_colorspace_sampling(cm,
2530 * rb, print_header_info);
2533 /*NOTE: The intra-only frame header
2534 *does not include the specification
2535 *of either the color format or
2537 *in profile 0. VP9 specifies that the default
2538 *color format should be YUV 4:2:0 in this
2541 cm
->color_space
= VPX_CS_BT_601
;
2542 cm
->subsampling_y
= cm
->subsampling_x
= 1;
2543 cm
->bit_depth
= VPX_BITS_8
;
2544 cm
->use_highbitdepth
= 0;
2547 pbi
->refresh_frame_flags
=
2548 params
->p
.refresh_frame_flags
;
2549 /*if (print_header_info)
2550 * pr_info("*%d-bits refresh_frame read:0x%x\n",
2551 * REF_FRAMES, pbi->refresh_frame_flags);
2553 ret
= setup_frame_size(pbi
,
2556 pbi
->frame_mmu_map_addr
,
2560 if (pbi
->need_resync
) {
2561 memset(&cm
->ref_frame_map
, -1,
2562 sizeof(cm
->ref_frame_map
));
2563 pbi
->need_resync
= 0;
2565 } else if (pbi
->need_resync
!= 1) { /* Skip if need resync */
2566 pbi
->refresh_frame_flags
=
2567 params
->p
.refresh_frame_flags
;
2568 if (print_header_info
)
2570 ("*%d-bits refresh_frame read:0x%x\n",
2571 REF_FRAMES
, pbi
->refresh_frame_flags
);
2572 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
2574 (params
->p
.ref_info
>>
2575 (((REFS_PER_FRAME
-i
-1)*4)+1))
2578 cm
->ref_frame_map
[ref
];
2579 struct RefBuffer_s
* const ref_frame
=
2581 if (print_header_info
)
2582 pr_info("*%d-bits ref[%d]read:%d\n",
2583 REF_FRAMES_LOG2
, i
, ref
);
2584 ref_frame
->idx
= idx
;
2585 ref_frame
->buf
= &frame_bufs
[idx
].buf
;
2586 cm
->ref_frame_sign_bias
[LAST_FRAME
+ i
]
2587 = (params
->p
.ref_info
>>
2588 ((REFS_PER_FRAME
-i
-1)*4)) & 0x1;
2589 if (print_header_info
)
2590 pr_info("1bit ref_frame_sign_bias");
2594 *cm->ref_frame_sign_bias
2598 *("[VP9 DEBUG]%s(get ref):%d\r\n",
2599 *__func__, ref_frame->idx);
2604 ret
= setup_frame_size_with_refs(
2608 pbi
->frame_mmu_map_addr
,
2612 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
2613 /*struct RefBuffer_s *const ref_buf =
2614 *&cm->frame_refs[i];
2617 *vp9_setup_scale_factors_for_frame
2623 pic
= get_frame_new_buffer(cm
);
2627 pic
->bit_depth
= cm
->bit_depth
;
2628 pic
->color_space
= cm
->color_space
;
2629 pic
->slice_type
= cm
->frame_type
;
2631 if (pbi
->need_resync
) {
2633 ("Error: Keyframe/intra-only frame required to reset\r\n");
2636 generate_next_ref_frames(pbi
);
2637 pbi
->hold_ref_buf
= 1;
2640 if (frame_is_intra_only(cm
) || cm
->error_resilient_mode
)
2641 vp9_setup_past_independence(cm
);
2642 setup_loopfilter(&cm
->lf
, rb
, print_header_info
);
2643 setup_quantization(cm
, &pbi
->mb
, rb
, print_header_info
);
2644 setup_segmentation(&cm
->seg
, rb
, print_header_info
);
2645 setup_segmentation_dequant(cm
, print_header_info
);
2647 setup_tile_info(cm
, rb
, print_header_info
);
2648 sz
= vp9_rb_read_literal(rb
, 16);
2649 if (print_header_info
)
2650 pr_info(" * 16-bits size read : %d (0x%x)\n", sz
, sz
);
2653 vpx_internal_error(&cm
->error
, VPX_CODEC_CORRUPT_FRAME
,
2654 "Invalid header size");
2656 /*end read_uncompressed_header()*/
2657 cm
->use_prev_frame_mvs
= !cm
->error_resilient_mode
&&
2658 cm
->width
== cm
->last_width
&&
2659 cm
->height
== cm
->last_height
&&
2660 !cm
->last_intra_only
&&
2661 cm
->last_show_frame
&&
2662 (cm
->last_frame_type
!= KEY_FRAME
);
2665 *("set use_prev_frame_mvs to %d (last_width %d last_height %d",
2666 *cm->use_prev_frame_mvs, cm->last_width, cm->last_height);
2668 *(" last_intra_only %d last_show_frame %d last_frame_type %d)\n",
2669 *cm->last_intra_only, cm->last_show_frame, cm->last_frame_type);
2675 void swap_frame_buffers(struct VP9Decoder_s
*pbi
)
2678 struct VP9_Common_s
*const cm
= &pbi
->common
;
2679 struct BufferPool_s
*const pool
= cm
->buffer_pool
;
2680 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
2681 unsigned long flags
;
2682 refresh_ref_frames(pbi
);
2683 pbi
->hold_ref_buf
= 0;
2684 cm
->frame_to_show
= get_frame_new_buffer(cm
);
2686 if (cm
->frame_to_show
) {
2687 /*if (!pbi->frame_parallel_decode || !cm->show_frame) {*/
2688 lock_buffer_pool(pool
, flags
);
2689 --frame_bufs
[cm
->new_fb_idx
].ref_count
;
2690 /*pr_info("[MMU DEBUG 8] dec ref_count[%d] : %d\r\n", cm->new_fb_idx,
2691 * frame_bufs[cm->new_fb_idx].ref_count);
2693 unlock_buffer_pool(pool
, flags
);
2697 /*Invalidate these references until the next frame starts.*/
2698 for (ref_index
= 0; ref_index
< 3; ref_index
++)
2699 cm
->frame_refs
[ref_index
].idx
= -1;
2703 static void check_resync(vpx_codec_alg_priv_t
*const ctx
,
2704 const struct VP9Decoder_s
*const pbi
)
2706 /* Clear resync flag if worker got a key frame or intra only frame.*/
2707 if (ctx
->need_resync
== 1 && pbi
->need_resync
== 0 &&
2708 (pbi
->common
.intra_only
|| pbi
->common
.frame_type
== KEY_FRAME
))
2709 ctx
->need_resync
= 0;
2713 int vp9_get_raw_frame(struct VP9Decoder_s
*pbi
, struct PIC_BUFFER_CONFIG_s
*sd
)
2715 struct VP9_Common_s
*const cm
= &pbi
->common
;
2718 if (pbi
->ready_for_new_data
== 1)
2721 pbi
->ready_for_new_data
= 1;
2723 /* no raw frame to show!!! */
2724 if (!cm
->show_frame
)
2727 /* may not be get buff in v4l2 */
2728 if (!cm
->frame_to_show
)
2731 pbi
->ready_for_new_data
= 1;
2733 *sd
= *cm
->frame_to_show
;
2739 int vp9_bufmgr_init(struct VP9Decoder_s
*pbi
, struct BuffInfo_s
*buf_spec_i
,
2740 struct buff_s
*mc_buf_i
) {
2741 struct VP9_Common_s
*cm
= &pbi
->common
;
2743 /*memset(pbi, 0, sizeof(struct VP9Decoder_s));*/
2744 pbi
->frame_count
= 0;
2746 pbi
->pre_stream_offset
= 0;
2747 cm
->buffer_pool
= &pbi
->vp9_buffer_pool
;
2748 spin_lock_init(&cm
->buffer_pool
->lock
);
2749 cm
->prev_fb_idx
= INVALID_IDX
;
2750 cm
->new_fb_idx
= INVALID_IDX
;
2751 pbi
->used_4k_num
= -1;
2752 cm
->cur_fb_idx_mmu
= INVALID_IDX
;
2754 ("After vp9_bufmgr_init, prev_fb_idx : %d, new_fb_idx : %d\r\n",
2755 cm
->prev_fb_idx
, cm
->new_fb_idx
);
2756 pbi
->need_resync
= 1;
2757 /* Initialize the references to not point to any frame buffers.*/
2758 memset(&cm
->ref_frame_map
, -1, sizeof(cm
->ref_frame_map
));
2759 memset(&cm
->next_ref_frame_map
, -1, sizeof(cm
->next_ref_frame_map
));
2760 cm
->current_video_frame
= 0;
2761 pbi
->ready_for_new_data
= 1;
2764 pbi
->work_space_buf
= buf_spec_i
;
2765 if (!pbi
->mmu_enable
)
2766 pbi
->mc_buf
= mc_buf_i
;
2768 pbi
->rpm_addr
= NULL
;
2769 pbi
->lmem_addr
= NULL
;
2771 pbi
->use_cma_flag
= 0;
2772 pbi
->decode_idx
= 0;
2774 /*int m_uiMaxCUWidth = 1<<7;*/
2775 /*int m_uiMaxCUHeight = 1<<7;*/
2776 pbi
->has_keyframe
= 0;
2779 pbi
->error_flag
= 0;
2781 pbi
->pts_mode
= PTS_NORMAL
;
2783 pbi
->last_lookup_pts
= 0;
2784 pbi
->last_pts_us64
= 0;
2785 pbi
->last_lookup_pts_us64
= 0;
2786 pbi
->shift_byte_count
= 0;
2787 pbi
->shift_byte_count_lo
= 0;
2788 pbi
->shift_byte_count_hi
= 0;
2789 pbi
->pts_mode_switching_count
= 0;
2790 pbi
->pts_mode_recovery_count
= 0;
2798 int vp9_bufmgr_postproc(struct VP9Decoder_s
*pbi
)
2800 struct VP9_Common_s
*cm
= &pbi
->common
;
2801 struct PIC_BUFFER_CONFIG_s sd
;
2803 if (pbi
->postproc_done
)
2805 pbi
->postproc_done
= 1;
2806 swap_frame_buffers(pbi
);
2807 if (!cm
->show_existing_frame
) {
2808 cm
->last_show_frame
= cm
->show_frame
;
2809 cm
->prev_frame
= cm
->cur_frame
;
2811 if (cm
->seg
.enabled
&& !pbi
->frame_parallel_decode
)
2812 vp9_swap_current_and_last_seg_map(cm
);
2815 cm
->last_width
= cm
->width
;
2816 cm
->last_height
= cm
->height
;
2818 cm
->current_video_frame
++;
2820 if (vp9_get_raw_frame(pbi
, &sd
) == 0) {
2821 /*pr_info("Display frame index %d\r\n", sd.index);*/
2822 sd
.stream_offset
= pbi
->pre_stream_offset
;
2823 prepare_display_buf(pbi
, &sd
);
2824 pbi
->pre_stream_offset
= READ_VREG(HEVC_SHIFT_BYTE_COUNT
);
2829 * ("Not display this frame,ready_for_new_data%d show_frame%d\r\n",
2830 * pbi->ready_for_new_data, cm->show_frame);
2835 /*struct VP9Decoder_s vp9_decoder;*/
2836 union param_u vp9_param
;
2838 /**************************************************
2840 *VP9 buffer management end
2842 ***************************************************
2846 #define HEVC_CM_BODY_START_ADDR 0x3626
2847 #define HEVC_CM_BODY_LENGTH 0x3627
2848 #define HEVC_CM_HEADER_LENGTH 0x3629
2849 #define HEVC_CM_HEADER_OFFSET 0x362b
2851 #define LOSLESS_COMPRESS_MODE
2853 /*#define DECOMP_HEADR_SURGENT*/
2855 static u32 mem_map_mode
= 2 /* 0:linear 1:32x32 2:64x32*/
2857 static u32 mem_map_mode
; /* 0:linear 1:32x32 2:64x32 ; m8baby test1902 */
2859 static u32 enable_mem_saving
= 1;
2860 static u32 force_w_h
;
2862 static u32 force_fps
;
2865 const u32 vp9_version
= 201602101;
2869 static u32 pop_shorts
;
2871 static u32 dbg_skip_decode_index
;
2872 static u32 endian
= 0xff0;
2873 #ifdef ERROR_HANDLE_DEBUG
2874 static u32 dbg_nal_skip_flag
;
2875 /* bit[0], skip vps; bit[1], skip sps; bit[2], skip pps */
2876 static u32 dbg_nal_skip_count
;
2879 static u32 decode_pic_begin
;
2880 static uint slice_parse_begin
;
2882 #ifdef MIX_STREAM_SUPPORT
2883 static u32 buf_alloc_width
= 4096;
2884 static u32 buf_alloc_height
= 2304;
2885 static u32 vp9_max_pic_w
= 4096;
2886 static u32 vp9_max_pic_h
= 2304;
2888 static u32 dynamic_buf_num_margin
;
2890 static u32 buf_alloc_width
;
2891 static u32 buf_alloc_height
;
2892 static u32 dynamic_buf_num_margin
= 7;
2894 static u32 buf_alloc_depth
= 10;
2895 static u32 buf_alloc_size
;
2898 * bit[1]: 0, always release cma buffer when stop
2899 * bit[1]: 1, never release cma buffer when stop
2900 *bit[0]: 1, when stop, release cma buffer if blackout is 1;
2901 *do not release cma buffer is blackout is not 1
2903 *bit[2]: 0, when start decoding, check current displayed buffer
2904 * (only for buffer decoded by vp9) if blackout is 0
2905 * 1, do not check current displayed buffer
2907 *bit[3]: 1, if blackout is not 1, do not release current
2908 * displayed cma buffer always.
2910 /* set to 1 for fast play;
2911 * set to 8 for other case of "keep last frame"
2913 static u32 buffer_mode
= 1;
2914 /* buffer_mode_dbg: debug only*/
2915 static u32 buffer_mode_dbg
= 0xffff0000;
2919 *bit 0, 1: only display I picture;
2920 *bit 1, 1: only decode I picture;
2922 static u32 i_only_flag
;
2924 static u32 low_latency_flag
;
2928 static u32 max_decoding_time
;
2932 /*error_handle_policy:
2933 *bit 0: 0, auto skip error_skip_nal_count nals before error recovery;
2934 *1, skip error_skip_nal_count nals before error recovery;
2935 *bit 1 (valid only when bit0 == 1):
2936 *1, wait vps/sps/pps after error recovery;
2937 *bit 2 (valid only when bit0 == 0):
2938 *0, auto search after error recovery (vp9_recover() called);
2939 *1, manual search after error recovery
2940 *(change to auto search after get IDR: WRITE_VREG(NAL_SEARCH_CTL, 0x2))
2942 *bit 4: 0, set error_mark after reset/recover
2943 * 1, do not set error_mark after reset/recover
2944 *bit 5: 0, check total lcu for every picture
2945 * 1, do not check total lcu
2949 static u32 error_handle_policy
;
2950 /*static u32 parser_sei_enable = 1;*/
2951 #define MAX_BUF_NUM_NORMAL 12
2952 #define MAX_BUF_NUM_LESS 10
2953 static u32 max_buf_num
= MAX_BUF_NUM_NORMAL
;
2954 #define MAX_BUF_NUM_SAVE_BUF 8
2956 static u32 run_ready_min_buf_num
= 2;
2959 static DEFINE_MUTEX(vvp9_mutex
);
2960 #ifndef MULTI_INSTANCE_SUPPORT
2961 static struct device
*cma_dev
;
2964 #define HEVC_DEC_STATUS_REG HEVC_ASSIST_SCRATCH_0
2965 #define HEVC_RPM_BUFFER HEVC_ASSIST_SCRATCH_1
2966 #define HEVC_SHORT_TERM_RPS HEVC_ASSIST_SCRATCH_2
2967 #define VP9_ADAPT_PROB_REG HEVC_ASSIST_SCRATCH_3
2968 #define VP9_MMU_MAP_BUFFER HEVC_ASSIST_SCRATCH_4
2969 #define HEVC_PPS_BUFFER HEVC_ASSIST_SCRATCH_5
2970 #define HEVC_SAO_UP HEVC_ASSIST_SCRATCH_6
2971 #define HEVC_STREAM_SWAP_BUFFER HEVC_ASSIST_SCRATCH_7
2972 #define HEVC_STREAM_SWAP_BUFFER2 HEVC_ASSIST_SCRATCH_8
2973 #define VP9_PROB_SWAP_BUFFER HEVC_ASSIST_SCRATCH_9
2974 #define VP9_COUNT_SWAP_BUFFER HEVC_ASSIST_SCRATCH_A
2975 #define VP9_SEG_MAP_BUFFER HEVC_ASSIST_SCRATCH_B
2976 #define HEVC_SCALELUT HEVC_ASSIST_SCRATCH_D
2977 #define HEVC_WAIT_FLAG HEVC_ASSIST_SCRATCH_E
2978 #define RPM_CMD_REG HEVC_ASSIST_SCRATCH_F
2979 #define LMEM_DUMP_ADR HEVC_ASSIST_SCRATCH_F
2980 #define HEVC_STREAM_SWAP_TEST HEVC_ASSIST_SCRATCH_L
2981 #ifdef MULTI_INSTANCE_SUPPORT
2982 #define HEVC_DECODE_COUNT HEVC_ASSIST_SCRATCH_M
2983 #define HEVC_DECODE_SIZE HEVC_ASSIST_SCRATCH_N
2985 #define HEVC_DECODE_PIC_BEGIN_REG HEVC_ASSIST_SCRATCH_M
2986 #define HEVC_DECODE_PIC_NUM_REG HEVC_ASSIST_SCRATCH_N
2988 #define DEBUG_REG1 HEVC_ASSIST_SCRATCH_G
2989 #define DEBUG_REG2 HEVC_ASSIST_SCRATCH_H
2993 *ucode parser/search control
2994 *bit 0: 0, header auto parse; 1, header manual parse
2995 *bit 1: 0, auto skip for noneseamless stream; 1, no skip
2996 *bit [3:2]: valid when bit1==0;
2997 *0, auto skip nal before first vps/sps/pps/idr;
2998 *1, auto skip nal before first vps/sps/pps
2999 *2, auto skip nal before first vps/sps/pps,
3000 * and not decode until the first I slice (with slice address of 0)
3002 *3, auto skip before first I slice (nal_type >=16 && nal_type<=21)
3003 *bit [15:4] nal skip count (valid when bit0 == 1 (manual mode) )
3004 *bit [16]: for NAL_UNIT_EOS when bit0 is 0:
3005 * 0, send SEARCH_DONE to arm ; 1, do not send SEARCH_DONE to arm
3006 *bit [17]: for NAL_SEI when bit0 is 0:
3007 * 0, do not parse SEI in ucode; 1, parse SEI in ucode
3008 *bit [31:20]: used by ucode for debug purpose
3010 #define NAL_SEARCH_CTL HEVC_ASSIST_SCRATCH_I
3011 /*[31:24] chip feature
3012 31: 0, use MBOX1; 1, use MBOX0
3014 #define DECODE_MODE HEVC_ASSIST_SCRATCH_J
3015 #define DECODE_STOP_POS HEVC_ASSIST_SCRATCH_K
3017 #ifdef MULTI_INSTANCE_SUPPORT
3018 #define RPM_BUF_SIZE (0x400 * 2)
3020 #define RPM_BUF_SIZE (0x80*2)
3022 #define LMEM_BUF_SIZE (0x400 * 2)
3024 #define WORK_BUF_SPEC_NUM 3
3025 static struct BuffInfo_s amvvp9_workbuff_spec
[WORK_BUF_SPEC_NUM
] = {
3031 /* IPP work space calculation :
3032 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
3037 .buf_size
= 0x30000,
3040 .buf_size
= 0x30000,
3043 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
3044 * total 64x16x2 = 2048 bytes (0x800)
3049 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
3050 * total 0x0800 bytes
3055 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
3056 * total 0x0800 bytes
3061 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
3062 * total 0x2000 bytes
3067 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
3068 * each has 16 bytes total 0x2800 bytes
3073 /* 256cyclex64bit = 2K bytes 0x800
3074 * (only 144 cycles valid)
3082 /* support up to 32 SCALELUT 1024x32 =
3088 /* DBLK -> Max 256(4096/16) LCU,
3089 *each para 1024bytes(total:0x40000),
3090 *data 1024bytes(total:0x40000)
3092 .buf_size
= 0x80000,
3095 .buf_size
= 0x80000,
3098 /*4096x2304/64/64 *24 = 0xd800 Bytes*/
3102 .buf_size
= 0x5000, /*2*16*(more than 2304)/4, 4K*/
3106 /*add one for keeper.*/
3107 .buf_size
= MMU_COMPRESS_HEADER_SIZE
*
3108 (FRAME_BUFFERS
+ 1),
3109 /* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8) */
3113 .buf_size
= 0x10000, /* 2 * size of hevc*/
3115 #ifdef MV_USE_FIXED_BUF
3116 .mpred_mv
= {/* 1080p, 0x40000 per buffer */
3117 .buf_size
= 0x40000 * FRAME_BUFFERS
,
3121 .buf_size
= RPM_BUF_SIZE
,
3124 .buf_size
= 0x400 * 2,
3131 /* IPP work space calculation :
3132 * 4096 * (Y+CbCr+Flags) = 12k, round to 16k
3137 .buf_size
= 0x30000,
3140 .buf_size
= 0x30000,
3143 /* SHORT_TERM_RPS - Max 64 set, 16 entry every set,
3144 * total 64x16x2 = 2048 bytes (0x800)
3149 /* VPS STORE AREA - Max 16 VPS, each has 0x80 bytes,
3150 * total 0x0800 bytes
3155 /* SPS STORE AREA - Max 16 SPS, each has 0x80 bytes,
3156 * total 0x0800 bytes
3161 /* PPS STORE AREA - Max 64 PPS, each has 0x80 bytes,
3162 * total 0x2000 bytes
3167 /* SAO UP STORE AREA - Max 640(10240/16) LCU,
3168 * each has 16 bytes total 0x2800 bytes
3173 /* 256cyclex64bit = 2K bytes 0x800
3174 * (only 144 cycles valid)
3182 /* support up to 32 SCALELUT 1024x32 = 32Kbytes
3188 /* DBLK -> Max 256(4096/16) LCU,
3189 *each para 1024bytes(total:0x40000),
3190 *data 1024bytes(total:0x40000)
3192 .buf_size
= 0x80000,
3195 .buf_size
= 0x80000,
3198 /*4096x2304/64/64 *24 = 0xd800 Bytes*/
3202 .buf_size
= 0x5000,/*2*16*(more than 2304)/4, 4K*/
3206 /*add one for keeper.*/
3207 .buf_size
= MMU_COMPRESS_HEADER_SIZE
*
3208 (FRAME_BUFFERS
+ 1),
3209 /* 0x44000 = ((1088*2*1024*4)/32/4)*(32/8) */
3213 .buf_size
= 0x10000, /* 2 * size of hevc*/
3215 #ifdef MV_USE_FIXED_BUF
3217 /* .buf_size = 0x100000*16,
3218 * //4k2k , 0x100000 per buffer
3220 /* 4096x2304 , 0x120000 per buffer */
3221 .buf_size
= 0x120000 * FRAME_BUFFERS
,
3225 .buf_size
= RPM_BUF_SIZE
,
3228 .buf_size
= 0x400 * 2,
3232 .max_width
= 4096*2,
3233 .max_height
= 2304*2,
3235 // IPP work space calculation : 4096 * (Y+CbCr+Flags) = 12k, round to 16k
3236 .buf_size
= 0x4000*2,
3239 .buf_size
= 0x30000*2,
3242 .buf_size
= 0x30000*2,
3245 // SHORT_TERM_RPS - Max 64 set, 16 entry every set, total 64x16x2 = 2048 bytes (0x800)
3249 // VPS STORE AREA - Max 16 VPS, each has 0x80 bytes, total 0x0800 bytes
3253 // SPS STORE AREA - Max 16 SPS, each has 0x80 bytes, total 0x0800 bytes
3257 // PPS STORE AREA - Max 64 PPS, each has 0x80 bytes, total 0x2000 bytes
3261 // SAO UP STORE AREA - Max 640(10240/16) LCU, each has 16 bytes total 0x2800 bytes
3262 .buf_size
= 0x2800*2,
3265 // 256cyclex64bit = 2K bytes 0x800 (only 144 cycles valid)
3272 // support up to 32 SCALELUT 1024x32 = 32Kbytes (0x8000)
3273 .buf_size
= 0x8000*2,
3276 // DBLK -> Max 256(4096/16) LCU, each para 1024bytes(total:0x40000), data 1024bytes(total:0x40000)
3277 .buf_size
= 0x80000*2,
3280 .buf_size
= 0x80000*2,
3283 /*4096x2304/64/64 *24 = 0xd800 Bytes*/
3284 .buf_size
= 0xd800*4,
3287 .buf_size
= 0x5000*2, //2*16*(more than 2304)/4, 4K
3291 //.buf_size = MMU_COMPRESS_HEADER_SIZE*8, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
3292 .buf_size
= MMU_COMPRESS_HEADER_SIZE
*16, // 0x44000 = ((1088*2*1024*4)/32/4)*(32/8)
3296 .buf_size
= 0x10000*2, /* 2 * size of hevc*/
3298 #ifdef MV_USE_FIXED_BUF
3300 //4k2k , 0x100000 per buffer */
3301 /* 4096x2304 , 0x120000 per buffer */
3302 .buf_size
= 0x120000 * FRAME_BUFFERS
* 4,
3306 .buf_size
= RPM_BUF_SIZE
,
3309 .buf_size
= 0x400 * 2,
3315 /*Losless compression body buffer size 4K per 64x32 (jt)*/
3316 int compute_losless_comp_body_size(int width
, int height
,
3317 uint8_t is_bit_depth_10
)
3323 width_x64
= width
+ 63;
3325 height_x32
= height
+ 31;
3327 bsize
= (is_bit_depth_10
?4096:3200)*width_x64
*height_x32
;
3328 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
3329 pr_info("%s(%d,%d,%d)=>%d\n",
3330 __func__
, width
, height
,
3331 is_bit_depth_10
, bsize
);
3336 /* Losless compression header buffer size 32bytes per 128x64 (jt)*/
3337 static int compute_losless_comp_header_size(int width
, int height
)
3343 width_x128
= width
+ 127;
3345 height_x64
= height
+ 63;
3348 hsize
= 32 * width_x128
* height_x64
;
3349 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
3350 pr_info("%s(%d,%d)=>%d\n",
3351 __func__
, width
, height
,
3357 static void init_buff_spec(struct VP9Decoder_s
*pbi
,
3358 struct BuffInfo_s
*buf_spec
)
3360 void *mem_start_virt
;
3362 buf_spec
->ipp
.buf_start
= buf_spec
->start_adr
;
3363 buf_spec
->sao_abv
.buf_start
=
3364 buf_spec
->ipp
.buf_start
+ buf_spec
->ipp
.buf_size
;
3366 buf_spec
->sao_vb
.buf_start
=
3367 buf_spec
->sao_abv
.buf_start
+ buf_spec
->sao_abv
.buf_size
;
3368 buf_spec
->short_term_rps
.buf_start
=
3369 buf_spec
->sao_vb
.buf_start
+ buf_spec
->sao_vb
.buf_size
;
3370 buf_spec
->vps
.buf_start
=
3371 buf_spec
->short_term_rps
.buf_start
+
3372 buf_spec
->short_term_rps
.buf_size
;
3373 buf_spec
->sps
.buf_start
=
3374 buf_spec
->vps
.buf_start
+ buf_spec
->vps
.buf_size
;
3375 buf_spec
->pps
.buf_start
=
3376 buf_spec
->sps
.buf_start
+ buf_spec
->sps
.buf_size
;
3377 buf_spec
->sao_up
.buf_start
=
3378 buf_spec
->pps
.buf_start
+ buf_spec
->pps
.buf_size
;
3379 buf_spec
->swap_buf
.buf_start
=
3380 buf_spec
->sao_up
.buf_start
+ buf_spec
->sao_up
.buf_size
;
3381 buf_spec
->swap_buf2
.buf_start
=
3382 buf_spec
->swap_buf
.buf_start
+ buf_spec
->swap_buf
.buf_size
;
3383 buf_spec
->scalelut
.buf_start
=
3384 buf_spec
->swap_buf2
.buf_start
+ buf_spec
->swap_buf2
.buf_size
;
3385 buf_spec
->dblk_para
.buf_start
=
3386 buf_spec
->scalelut
.buf_start
+ buf_spec
->scalelut
.buf_size
;
3387 buf_spec
->dblk_data
.buf_start
=
3388 buf_spec
->dblk_para
.buf_start
+ buf_spec
->dblk_para
.buf_size
;
3389 buf_spec
->seg_map
.buf_start
=
3390 buf_spec
->dblk_data
.buf_start
+ buf_spec
->dblk_data
.buf_size
;
3391 if (pbi
== NULL
|| pbi
->mmu_enable
) {
3392 buf_spec
->mmu_vbh
.buf_start
=
3393 buf_spec
->seg_map
.buf_start
+
3394 buf_spec
->seg_map
.buf_size
;
3395 buf_spec
->mpred_above
.buf_start
=
3396 buf_spec
->mmu_vbh
.buf_start
+
3397 buf_spec
->mmu_vbh
.buf_size
;
3399 buf_spec
->mpred_above
.buf_start
=
3400 buf_spec
->seg_map
.buf_start
+ buf_spec
->seg_map
.buf_size
;
3402 #ifdef MV_USE_FIXED_BUF
3403 buf_spec
->mpred_mv
.buf_start
=
3404 buf_spec
->mpred_above
.buf_start
+
3405 buf_spec
->mpred_above
.buf_size
;
3407 buf_spec
->rpm
.buf_start
=
3408 buf_spec
->mpred_mv
.buf_start
+
3409 buf_spec
->mpred_mv
.buf_size
;
3411 buf_spec
->rpm
.buf_start
=
3412 buf_spec
->mpred_above
.buf_start
+
3413 buf_spec
->mpred_above
.buf_size
;
3416 buf_spec
->lmem
.buf_start
=
3417 buf_spec
->rpm
.buf_start
+
3418 buf_spec
->rpm
.buf_size
;
3420 buf_spec
->lmem
.buf_start
+
3421 buf_spec
->lmem
.buf_size
;
3426 if (!vdec_secure(hw_to_vdec(pbi
))) {
3428 codec_mm_phys_to_virt(buf_spec
->dblk_para
.buf_start
);
3429 if (mem_start_virt
) {
3430 memset(mem_start_virt
, 0,
3431 buf_spec
->dblk_para
.buf_size
);
3432 codec_mm_dma_flush(mem_start_virt
,
3433 buf_spec
->dblk_para
.buf_size
,
3436 mem_start_virt
= codec_mm_vmap(
3437 buf_spec
->dblk_para
.buf_start
,
3438 buf_spec
->dblk_para
.buf_size
);
3439 if (mem_start_virt
) {
3440 memset(mem_start_virt
, 0,
3441 buf_spec
->dblk_para
.buf_size
);
3442 codec_mm_dma_flush(mem_start_virt
,
3443 buf_spec
->dblk_para
.buf_size
,
3445 codec_mm_unmap_phyaddr(mem_start_virt
);
3447 /*not virt for tvp playing,
3448 may need clear on ucode.*/
3449 pr_err("mem_start_virt failed\n");
3455 pr_info("%s workspace (%x %x) size = %x\n", __func__
,
3456 buf_spec
->start_adr
, buf_spec
->end_adr
,
3457 buf_spec
->end_adr
- buf_spec
->start_adr
);
3461 pr_info("ipp.buf_start :%x\n",
3462 buf_spec
->ipp
.buf_start
);
3463 pr_info("sao_abv.buf_start :%x\n",
3464 buf_spec
->sao_abv
.buf_start
);
3465 pr_info("sao_vb.buf_start :%x\n",
3466 buf_spec
->sao_vb
.buf_start
);
3467 pr_info("short_term_rps.buf_start :%x\n",
3468 buf_spec
->short_term_rps
.buf_start
);
3469 pr_info("vps.buf_start :%x\n",
3470 buf_spec
->vps
.buf_start
);
3471 pr_info("sps.buf_start :%x\n",
3472 buf_spec
->sps
.buf_start
);
3473 pr_info("pps.buf_start :%x\n",
3474 buf_spec
->pps
.buf_start
);
3475 pr_info("sao_up.buf_start :%x\n",
3476 buf_spec
->sao_up
.buf_start
);
3477 pr_info("swap_buf.buf_start :%x\n",
3478 buf_spec
->swap_buf
.buf_start
);
3479 pr_info("swap_buf2.buf_start :%x\n",
3480 buf_spec
->swap_buf2
.buf_start
);
3481 pr_info("scalelut.buf_start :%x\n",
3482 buf_spec
->scalelut
.buf_start
);
3483 pr_info("dblk_para.buf_start :%x\n",
3484 buf_spec
->dblk_para
.buf_start
);
3485 pr_info("dblk_data.buf_start :%x\n",
3486 buf_spec
->dblk_data
.buf_start
);
3487 pr_info("seg_map.buf_start :%x\n",
3488 buf_spec
->seg_map
.buf_start
);
3489 if (pbi
->mmu_enable
) {
3490 pr_info("mmu_vbh.buf_start :%x\n",
3491 buf_spec
->mmu_vbh
.buf_start
);
3493 pr_info("mpred_above.buf_start :%x\n",
3494 buf_spec
->mpred_above
.buf_start
);
3495 #ifdef MV_USE_FIXED_BUF
3496 pr_info("mpred_mv.buf_start :%x\n",
3497 buf_spec
->mpred_mv
.buf_start
);
3499 if ((debug
& VP9_DEBUG_SEND_PARAM_WITH_REG
) == 0) {
3500 pr_info("rpm.buf_start :%x\n",
3501 buf_spec
->rpm
.buf_start
);
3507 #define THODIYIL_MCRCC_CANVAS_ALGX 4
3509 static u32 mcrcc_cache_alg_flag
= THODIYIL_MCRCC_CANVAS_ALGX
;
3511 static void mcrcc_perfcount_reset(void)
3513 if (debug
& VP9_DEBUG_CACHE
)
3514 pr_info("[cache_util.c] Entered mcrcc_perfcount_reset...\n");
3515 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)0x1);
3516 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)0x0);
3520 static unsigned raw_mcr_cnt_total_prev
;
3521 static unsigned hit_mcr_0_cnt_total_prev
;
3522 static unsigned hit_mcr_1_cnt_total_prev
;
3523 static unsigned byp_mcr_cnt_nchcanv_total_prev
;
3524 static unsigned byp_mcr_cnt_nchoutwin_total_prev
;
3526 static void mcrcc_get_hitrate(unsigned reset_pre
)
3528 unsigned delta_hit_mcr_0_cnt
;
3529 unsigned delta_hit_mcr_1_cnt
;
3530 unsigned delta_raw_mcr_cnt
;
3531 unsigned delta_mcr_cnt_nchcanv
;
3532 unsigned delta_mcr_cnt_nchoutwin
;
3535 unsigned raw_mcr_cnt
;
3536 unsigned hit_mcr_cnt
;
3537 unsigned byp_mcr_cnt_nchoutwin
;
3538 unsigned byp_mcr_cnt_nchcanv
;
3541 raw_mcr_cnt_total_prev
= 0;
3542 hit_mcr_0_cnt_total_prev
= 0;
3543 hit_mcr_1_cnt_total_prev
= 0;
3544 byp_mcr_cnt_nchcanv_total_prev
= 0;
3545 byp_mcr_cnt_nchoutwin_total_prev
= 0;
3547 if (debug
& VP9_DEBUG_CACHE
)
3548 pr_info("[cache_util.c] Entered mcrcc_get_hitrate...\n");
3549 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x0<<1));
3550 raw_mcr_cnt
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3551 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x1<<1));
3552 hit_mcr_cnt
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3553 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x2<<1));
3554 byp_mcr_cnt_nchoutwin
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3555 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x3<<1));
3556 byp_mcr_cnt_nchcanv
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3558 if (debug
& VP9_DEBUG_CACHE
)
3559 pr_info("raw_mcr_cnt_total: %d\n",
3561 if (debug
& VP9_DEBUG_CACHE
)
3562 pr_info("hit_mcr_cnt_total: %d\n",
3564 if (debug
& VP9_DEBUG_CACHE
)
3565 pr_info("byp_mcr_cnt_nchoutwin_total: %d\n",
3566 byp_mcr_cnt_nchoutwin
);
3567 if (debug
& VP9_DEBUG_CACHE
)
3568 pr_info("byp_mcr_cnt_nchcanv_total: %d\n",
3569 byp_mcr_cnt_nchcanv
);
3571 delta_raw_mcr_cnt
= raw_mcr_cnt
-
3572 raw_mcr_cnt_total_prev
;
3573 delta_mcr_cnt_nchcanv
= byp_mcr_cnt_nchcanv
-
3574 byp_mcr_cnt_nchcanv_total_prev
;
3575 delta_mcr_cnt_nchoutwin
= byp_mcr_cnt_nchoutwin
-
3576 byp_mcr_cnt_nchoutwin_total_prev
;
3577 raw_mcr_cnt_total_prev
= raw_mcr_cnt
;
3578 byp_mcr_cnt_nchcanv_total_prev
= byp_mcr_cnt_nchcanv
;
3579 byp_mcr_cnt_nchoutwin_total_prev
= byp_mcr_cnt_nchoutwin
;
3581 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x4<<1));
3582 tmp
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3583 if (debug
& VP9_DEBUG_CACHE
)
3584 pr_info("miss_mcr_0_cnt_total: %d\n", tmp
);
3585 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x5<<1));
3586 tmp
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3587 if (debug
& VP9_DEBUG_CACHE
)
3588 pr_info("miss_mcr_1_cnt_total: %d\n", tmp
);
3589 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x6<<1));
3590 tmp
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3591 if (debug
& VP9_DEBUG_CACHE
)
3592 pr_info("hit_mcr_0_cnt_total: %d\n", tmp
);
3593 delta_hit_mcr_0_cnt
= tmp
- hit_mcr_0_cnt_total_prev
;
3594 hit_mcr_0_cnt_total_prev
= tmp
;
3595 WRITE_VREG(HEVCD_MCRCC_PERFMON_CTL
, (unsigned int)(0x7<<1));
3596 tmp
= READ_VREG(HEVCD_MCRCC_PERFMON_DATA
);
3597 if (debug
& VP9_DEBUG_CACHE
)
3598 pr_info("hit_mcr_1_cnt_total: %d\n", tmp
);
3599 delta_hit_mcr_1_cnt
= tmp
- hit_mcr_1_cnt_total_prev
;
3600 hit_mcr_1_cnt_total_prev
= tmp
;
3602 if (delta_raw_mcr_cnt
!= 0) {
3603 hitrate
= 100 * delta_hit_mcr_0_cnt
3604 / delta_raw_mcr_cnt
;
3605 if (debug
& VP9_DEBUG_CACHE
)
3606 pr_info("CANV0_HIT_RATE : %d\n", hitrate
);
3607 hitrate
= 100 * delta_hit_mcr_1_cnt
3608 / delta_raw_mcr_cnt
;
3609 if (debug
& VP9_DEBUG_CACHE
)
3610 pr_info("CANV1_HIT_RATE : %d\n", hitrate
);
3611 hitrate
= 100 * delta_mcr_cnt_nchcanv
3612 / delta_raw_mcr_cnt
;
3613 if (debug
& VP9_DEBUG_CACHE
)
3614 pr_info("NONCACH_CANV_BYP_RATE : %d\n", hitrate
);
3615 hitrate
= 100 * delta_mcr_cnt_nchoutwin
3616 / delta_raw_mcr_cnt
;
3617 if (debug
& VP9_DEBUG_CACHE
)
3618 pr_info("CACHE_OUTWIN_BYP_RATE : %d\n", hitrate
);
3622 if (raw_mcr_cnt
!= 0) {
3623 hitrate
= 100 * hit_mcr_cnt
/ raw_mcr_cnt
;
3624 if (debug
& VP9_DEBUG_CACHE
)
3625 pr_info("MCRCC_HIT_RATE : %d\n", hitrate
);
3626 hitrate
= 100 * (byp_mcr_cnt_nchoutwin
+ byp_mcr_cnt_nchcanv
)
3628 if (debug
& VP9_DEBUG_CACHE
)
3629 pr_info("MCRCC_BYP_RATE : %d\n", hitrate
);
3631 if (debug
& VP9_DEBUG_CACHE
)
3632 pr_info("MCRCC_HIT_RATE : na\n");
3633 if (debug
& VP9_DEBUG_CACHE
)
3634 pr_info("MCRCC_BYP_RATE : na\n");
3640 static void decomp_perfcount_reset(void)
3642 if (debug
& VP9_DEBUG_CACHE
)
3643 pr_info("[cache_util.c] Entered decomp_perfcount_reset...\n");
3644 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)0x1);
3645 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)0x0);
3649 static void decomp_get_hitrate(void)
3651 unsigned raw_mcr_cnt
;
3652 unsigned hit_mcr_cnt
;
3654 if (debug
& VP9_DEBUG_CACHE
)
3655 pr_info("[cache_util.c] Entered decomp_get_hitrate...\n");
3656 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x0<<1));
3657 raw_mcr_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3658 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x1<<1));
3659 hit_mcr_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3661 if (debug
& VP9_DEBUG_CACHE
)
3662 pr_info("hcache_raw_cnt_total: %d\n", raw_mcr_cnt
);
3663 if (debug
& VP9_DEBUG_CACHE
)
3664 pr_info("hcache_hit_cnt_total: %d\n", hit_mcr_cnt
);
3666 if (raw_mcr_cnt
!= 0) {
3667 hitrate
= hit_mcr_cnt
* 100 / raw_mcr_cnt
;
3668 if (debug
& VP9_DEBUG_CACHE
)
3669 pr_info("DECOMP_HCACHE_HIT_RATE : %d\n", hitrate
);
3671 if (debug
& VP9_DEBUG_CACHE
)
3672 pr_info("DECOMP_HCACHE_HIT_RATE : na\n");
3674 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x2<<1));
3675 raw_mcr_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3676 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x3<<1));
3677 hit_mcr_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3679 if (debug
& VP9_DEBUG_CACHE
)
3680 pr_info("dcache_raw_cnt_total: %d\n", raw_mcr_cnt
);
3681 if (debug
& VP9_DEBUG_CACHE
)
3682 pr_info("dcache_hit_cnt_total: %d\n", hit_mcr_cnt
);
3684 if (raw_mcr_cnt
!= 0) {
3685 hitrate
= hit_mcr_cnt
* 100 / raw_mcr_cnt
;
3686 if (debug
& VP9_DEBUG_CACHE
)
3687 pr_info("DECOMP_DCACHE_HIT_RATE : %d\n", hitrate
);
3689 if (debug
& VP9_DEBUG_CACHE
)
3690 pr_info("DECOMP_DCACHE_HIT_RATE : na\n");
3695 static void decomp_get_comprate(void)
3697 unsigned raw_ucomp_cnt
;
3698 unsigned fast_comp_cnt
;
3699 unsigned slow_comp_cnt
;
3702 if (debug
& VP9_DEBUG_CACHE
)
3703 pr_info("[cache_util.c] Entered decomp_get_comprate...\n");
3704 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x4<<1));
3705 fast_comp_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3706 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x5<<1));
3707 slow_comp_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3708 WRITE_VREG(HEVCD_MPP_DECOMP_PERFMON_CTL
, (unsigned int)(0x6<<1));
3709 raw_ucomp_cnt
= READ_VREG(HEVCD_MPP_DECOMP_PERFMON_DATA
);
3711 if (debug
& VP9_DEBUG_CACHE
)
3712 pr_info("decomp_fast_comp_total: %d\n", fast_comp_cnt
);
3713 if (debug
& VP9_DEBUG_CACHE
)
3714 pr_info("decomp_slow_comp_total: %d\n", slow_comp_cnt
);
3715 if (debug
& VP9_DEBUG_CACHE
)
3716 pr_info("decomp_raw_uncomp_total: %d\n", raw_ucomp_cnt
);
3718 if (raw_ucomp_cnt
!= 0) {
3719 comprate
= (fast_comp_cnt
+ slow_comp_cnt
)
3720 * 100 / raw_ucomp_cnt
;
3721 if (debug
& VP9_DEBUG_CACHE
)
3722 pr_info("DECOMP_COMP_RATIO : %d\n", comprate
);
3724 if (debug
& VP9_DEBUG_CACHE
)
3725 pr_info("DECOMP_COMP_RATIO : na\n");
3729 /* cache_util.c end */
3731 /*====================================================
3732 *========================================================================
3734 *========================================================================
3736 #define VP9_PARTITION_START 0
3737 #define VP9_PARTITION_SIZE_STEP (3 * 4)
3738 #define VP9_PARTITION_ONE_SIZE (4 * VP9_PARTITION_SIZE_STEP)
3739 #define VP9_PARTITION_KEY_START 0
3740 #define VP9_PARTITION_P_START VP9_PARTITION_ONE_SIZE
3741 #define VP9_PARTITION_SIZE (2 * VP9_PARTITION_ONE_SIZE)
3742 #define VP9_SKIP_START (VP9_PARTITION_START + VP9_PARTITION_SIZE)
3743 #define VP9_SKIP_SIZE 4 /* only use 3*/
3744 #define VP9_TX_MODE_START (VP9_SKIP_START+VP9_SKIP_SIZE)
3745 #define VP9_TX_MODE_8_0_OFFSET 0
3746 #define VP9_TX_MODE_8_1_OFFSET 1
3747 #define VP9_TX_MODE_16_0_OFFSET 2
3748 #define VP9_TX_MODE_16_1_OFFSET 4
3749 #define VP9_TX_MODE_32_0_OFFSET 6
3750 #define VP9_TX_MODE_32_1_OFFSET 9
3751 #define VP9_TX_MODE_SIZE 12
3752 #define VP9_COEF_START (VP9_TX_MODE_START+VP9_TX_MODE_SIZE)
3753 #define VP9_COEF_BAND_0_OFFSET 0
3754 #define VP9_COEF_BAND_1_OFFSET (VP9_COEF_BAND_0_OFFSET + 3 * 3 + 1)
3755 #define VP9_COEF_BAND_2_OFFSET (VP9_COEF_BAND_1_OFFSET + 6 * 3)
3756 #define VP9_COEF_BAND_3_OFFSET (VP9_COEF_BAND_2_OFFSET + 6 * 3)
3757 #define VP9_COEF_BAND_4_OFFSET (VP9_COEF_BAND_3_OFFSET + 6 * 3)
3758 #define VP9_COEF_BAND_5_OFFSET (VP9_COEF_BAND_4_OFFSET + 6 * 3)
3759 #define VP9_COEF_SIZE_ONE_SET 100 /* ((3 +5*6)*3 + 1 padding)*/
3760 #define VP9_COEF_4X4_START (VP9_COEF_START + 0 * VP9_COEF_SIZE_ONE_SET)
3761 #define VP9_COEF_8X8_START (VP9_COEF_START + 4 * VP9_COEF_SIZE_ONE_SET)
3762 #define VP9_COEF_16X16_START (VP9_COEF_START + 8 * VP9_COEF_SIZE_ONE_SET)
3763 #define VP9_COEF_32X32_START (VP9_COEF_START + 12 * VP9_COEF_SIZE_ONE_SET)
3764 #define VP9_COEF_SIZE_PLANE (2 * VP9_COEF_SIZE_ONE_SET)
3765 #define VP9_COEF_SIZE (4 * 2 * 2 * VP9_COEF_SIZE_ONE_SET)
3766 #define VP9_INTER_MODE_START (VP9_COEF_START+VP9_COEF_SIZE)
3767 #define VP9_INTER_MODE_SIZE 24 /* only use 21 ( #*7)*/
3768 #define VP9_INTERP_START (VP9_INTER_MODE_START+VP9_INTER_MODE_SIZE)
3769 #define VP9_INTERP_SIZE 8
3770 #define VP9_INTRA_INTER_START (VP9_INTERP_START+VP9_INTERP_SIZE)
3771 #define VP9_INTRA_INTER_SIZE 4
3772 #define VP9_INTERP_INTRA_INTER_START VP9_INTERP_START
3773 #define VP9_INTERP_INTRA_INTER_SIZE (VP9_INTERP_SIZE + VP9_INTRA_INTER_SIZE)
3774 #define VP9_COMP_INTER_START \
3775 (VP9_INTERP_INTRA_INTER_START+VP9_INTERP_INTRA_INTER_SIZE)
3776 #define VP9_COMP_INTER_SIZE 5
3777 #define VP9_COMP_REF_START (VP9_COMP_INTER_START+VP9_COMP_INTER_SIZE)
3778 #define VP9_COMP_REF_SIZE 5
3779 #define VP9_SINGLE_REF_START (VP9_COMP_REF_START+VP9_COMP_REF_SIZE)
3780 #define VP9_SINGLE_REF_SIZE 10
3781 #define VP9_REF_MODE_START VP9_COMP_INTER_START
3782 #define VP9_REF_MODE_SIZE \
3783 (VP9_COMP_INTER_SIZE+VP9_COMP_REF_SIZE+VP9_SINGLE_REF_SIZE)
3784 #define VP9_IF_Y_MODE_START (VP9_REF_MODE_START+VP9_REF_MODE_SIZE)
3785 #define VP9_IF_Y_MODE_SIZE 36
3786 #define VP9_IF_UV_MODE_START (VP9_IF_Y_MODE_START+VP9_IF_Y_MODE_SIZE)
3787 #define VP9_IF_UV_MODE_SIZE 92 /* only use 90*/
3788 #define VP9_MV_JOINTS_START (VP9_IF_UV_MODE_START+VP9_IF_UV_MODE_SIZE)
3789 #define VP9_MV_JOINTS_SIZE 3
3790 #define VP9_MV_SIGN_0_START (VP9_MV_JOINTS_START+VP9_MV_JOINTS_SIZE)
3791 #define VP9_MV_SIGN_0_SIZE 1
3792 #define VP9_MV_CLASSES_0_START (VP9_MV_SIGN_0_START+VP9_MV_SIGN_0_SIZE)
3793 #define VP9_MV_CLASSES_0_SIZE 10
3794 #define VP9_MV_CLASS0_0_START (VP9_MV_CLASSES_0_START+VP9_MV_CLASSES_0_SIZE)
3795 #define VP9_MV_CLASS0_0_SIZE 1
3796 #define VP9_MV_BITS_0_START (VP9_MV_CLASS0_0_START+VP9_MV_CLASS0_0_SIZE)
3797 #define VP9_MV_BITS_0_SIZE 10
3798 #define VP9_MV_SIGN_1_START (VP9_MV_BITS_0_START+VP9_MV_BITS_0_SIZE)
3799 #define VP9_MV_SIGN_1_SIZE 1
3800 #define VP9_MV_CLASSES_1_START \
3801 (VP9_MV_SIGN_1_START+VP9_MV_SIGN_1_SIZE)
3802 #define VP9_MV_CLASSES_1_SIZE 10
3803 #define VP9_MV_CLASS0_1_START \
3804 (VP9_MV_CLASSES_1_START+VP9_MV_CLASSES_1_SIZE)
3805 #define VP9_MV_CLASS0_1_SIZE 1
3806 #define VP9_MV_BITS_1_START \
3807 (VP9_MV_CLASS0_1_START+VP9_MV_CLASS0_1_SIZE)
3808 #define VP9_MV_BITS_1_SIZE 10
3809 #define VP9_MV_CLASS0_FP_0_START \
3810 (VP9_MV_BITS_1_START+VP9_MV_BITS_1_SIZE)
3811 #define VP9_MV_CLASS0_FP_0_SIZE 9
3812 #define VP9_MV_CLASS0_FP_1_START \
3813 (VP9_MV_CLASS0_FP_0_START+VP9_MV_CLASS0_FP_0_SIZE)
3814 #define VP9_MV_CLASS0_FP_1_SIZE 9
3815 #define VP9_MV_CLASS0_HP_0_START \
3816 (VP9_MV_CLASS0_FP_1_START+VP9_MV_CLASS0_FP_1_SIZE)
3817 #define VP9_MV_CLASS0_HP_0_SIZE 2
3818 #define VP9_MV_CLASS0_HP_1_START \
3819 (VP9_MV_CLASS0_HP_0_START+VP9_MV_CLASS0_HP_0_SIZE)
3820 #define VP9_MV_CLASS0_HP_1_SIZE 2
3821 #define VP9_MV_START VP9_MV_JOINTS_START
3822 #define VP9_MV_SIZE 72 /*only use 69*/
3824 #define VP9_TOTAL_SIZE (VP9_MV_START + VP9_MV_SIZE)
3827 /*========================================================================
3828 * vp9_count_mem define
3829 *========================================================================
3831 #define VP9_COEF_COUNT_START 0
3832 #define VP9_COEF_COUNT_BAND_0_OFFSET 0
3833 #define VP9_COEF_COUNT_BAND_1_OFFSET \
3834 (VP9_COEF_COUNT_BAND_0_OFFSET + 3*5)
3835 #define VP9_COEF_COUNT_BAND_2_OFFSET \
3836 (VP9_COEF_COUNT_BAND_1_OFFSET + 6*5)
3837 #define VP9_COEF_COUNT_BAND_3_OFFSET \
3838 (VP9_COEF_COUNT_BAND_2_OFFSET + 6*5)
3839 #define VP9_COEF_COUNT_BAND_4_OFFSET \
3840 (VP9_COEF_COUNT_BAND_3_OFFSET + 6*5)
3841 #define VP9_COEF_COUNT_BAND_5_OFFSET \
3842 (VP9_COEF_COUNT_BAND_4_OFFSET + 6*5)
3843 #define VP9_COEF_COUNT_SIZE_ONE_SET 165 /* ((3 +5*6)*5 */
3844 #define VP9_COEF_COUNT_4X4_START \
3845 (VP9_COEF_COUNT_START + 0*VP9_COEF_COUNT_SIZE_ONE_SET)
3846 #define VP9_COEF_COUNT_8X8_START \
3847 (VP9_COEF_COUNT_START + 4*VP9_COEF_COUNT_SIZE_ONE_SET)
3848 #define VP9_COEF_COUNT_16X16_START \
3849 (VP9_COEF_COUNT_START + 8*VP9_COEF_COUNT_SIZE_ONE_SET)
3850 #define VP9_COEF_COUNT_32X32_START \
3851 (VP9_COEF_COUNT_START + 12*VP9_COEF_COUNT_SIZE_ONE_SET)
3852 #define VP9_COEF_COUNT_SIZE_PLANE (2 * VP9_COEF_COUNT_SIZE_ONE_SET)
3853 #define VP9_COEF_COUNT_SIZE (4 * 2 * 2 * VP9_COEF_COUNT_SIZE_ONE_SET)
3855 #define VP9_INTRA_INTER_COUNT_START \
3856 (VP9_COEF_COUNT_START+VP9_COEF_COUNT_SIZE)
3857 #define VP9_INTRA_INTER_COUNT_SIZE (4*2)
3858 #define VP9_COMP_INTER_COUNT_START \
3859 (VP9_INTRA_INTER_COUNT_START+VP9_INTRA_INTER_COUNT_SIZE)
3860 #define VP9_COMP_INTER_COUNT_SIZE (5*2)
3861 #define VP9_COMP_REF_COUNT_START \
3862 (VP9_COMP_INTER_COUNT_START+VP9_COMP_INTER_COUNT_SIZE)
3863 #define VP9_COMP_REF_COUNT_SIZE (5*2)
3864 #define VP9_SINGLE_REF_COUNT_START \
3865 (VP9_COMP_REF_COUNT_START+VP9_COMP_REF_COUNT_SIZE)
3866 #define VP9_SINGLE_REF_COUNT_SIZE (10*2)
3867 #define VP9_TX_MODE_COUNT_START \
3868 (VP9_SINGLE_REF_COUNT_START+VP9_SINGLE_REF_COUNT_SIZE)
3869 #define VP9_TX_MODE_COUNT_SIZE (12*2)
3870 #define VP9_SKIP_COUNT_START \
3871 (VP9_TX_MODE_COUNT_START+VP9_TX_MODE_COUNT_SIZE)
3872 #define VP9_SKIP_COUNT_SIZE (3*2)
3873 #define VP9_MV_SIGN_0_COUNT_START \
3874 (VP9_SKIP_COUNT_START+VP9_SKIP_COUNT_SIZE)
3875 #define VP9_MV_SIGN_0_COUNT_SIZE (1*2)
3876 #define VP9_MV_SIGN_1_COUNT_START \
3877 (VP9_MV_SIGN_0_COUNT_START+VP9_MV_SIGN_0_COUNT_SIZE)
3878 #define VP9_MV_SIGN_1_COUNT_SIZE (1*2)
3879 #define VP9_MV_BITS_0_COUNT_START \
3880 (VP9_MV_SIGN_1_COUNT_START+VP9_MV_SIGN_1_COUNT_SIZE)
3881 #define VP9_MV_BITS_0_COUNT_SIZE (10*2)
3882 #define VP9_MV_BITS_1_COUNT_START \
3883 (VP9_MV_BITS_0_COUNT_START+VP9_MV_BITS_0_COUNT_SIZE)
3884 #define VP9_MV_BITS_1_COUNT_SIZE (10*2)
3885 #define VP9_MV_CLASS0_HP_0_COUNT_START \
3886 (VP9_MV_BITS_1_COUNT_START+VP9_MV_BITS_1_COUNT_SIZE)
3887 #define VP9_MV_CLASS0_HP_0_COUNT_SIZE (2*2)
3888 #define VP9_MV_CLASS0_HP_1_COUNT_START \
3889 (VP9_MV_CLASS0_HP_0_COUNT_START+VP9_MV_CLASS0_HP_0_COUNT_SIZE)
3890 #define VP9_MV_CLASS0_HP_1_COUNT_SIZE (2*2)
3891 /* Start merge_tree*/
3892 #define VP9_INTER_MODE_COUNT_START \
3893 (VP9_MV_CLASS0_HP_1_COUNT_START+VP9_MV_CLASS0_HP_1_COUNT_SIZE)
3894 #define VP9_INTER_MODE_COUNT_SIZE (7*4)
3895 #define VP9_IF_Y_MODE_COUNT_START \
3896 (VP9_INTER_MODE_COUNT_START+VP9_INTER_MODE_COUNT_SIZE)
3897 #define VP9_IF_Y_MODE_COUNT_SIZE (10*4)
3898 #define VP9_IF_UV_MODE_COUNT_START \
3899 (VP9_IF_Y_MODE_COUNT_START+VP9_IF_Y_MODE_COUNT_SIZE)
3900 #define VP9_IF_UV_MODE_COUNT_SIZE (10*10)
3901 #define VP9_PARTITION_P_COUNT_START \
3902 (VP9_IF_UV_MODE_COUNT_START+VP9_IF_UV_MODE_COUNT_SIZE)
3903 #define VP9_PARTITION_P_COUNT_SIZE (4*4*4)
3904 #define VP9_INTERP_COUNT_START \
3905 (VP9_PARTITION_P_COUNT_START+VP9_PARTITION_P_COUNT_SIZE)
3906 #define VP9_INTERP_COUNT_SIZE (4*3)
3907 #define VP9_MV_JOINTS_COUNT_START \
3908 (VP9_INTERP_COUNT_START+VP9_INTERP_COUNT_SIZE)
3909 #define VP9_MV_JOINTS_COUNT_SIZE (1 * 4)
3910 #define VP9_MV_CLASSES_0_COUNT_START \
3911 (VP9_MV_JOINTS_COUNT_START+VP9_MV_JOINTS_COUNT_SIZE)
3912 #define VP9_MV_CLASSES_0_COUNT_SIZE (1*11)
3913 #define VP9_MV_CLASS0_0_COUNT_START \
3914 (VP9_MV_CLASSES_0_COUNT_START+VP9_MV_CLASSES_0_COUNT_SIZE)
3915 #define VP9_MV_CLASS0_0_COUNT_SIZE (1*2)
3916 #define VP9_MV_CLASSES_1_COUNT_START \
3917 (VP9_MV_CLASS0_0_COUNT_START+VP9_MV_CLASS0_0_COUNT_SIZE)
3918 #define VP9_MV_CLASSES_1_COUNT_SIZE (1*11)
3919 #define VP9_MV_CLASS0_1_COUNT_START \
3920 (VP9_MV_CLASSES_1_COUNT_START+VP9_MV_CLASSES_1_COUNT_SIZE)
3921 #define VP9_MV_CLASS0_1_COUNT_SIZE (1*2)
3922 #define VP9_MV_CLASS0_FP_0_COUNT_START \
3923 (VP9_MV_CLASS0_1_COUNT_START+VP9_MV_CLASS0_1_COUNT_SIZE)
3924 #define VP9_MV_CLASS0_FP_0_COUNT_SIZE (3*4)
3925 #define VP9_MV_CLASS0_FP_1_COUNT_START \
3926 (VP9_MV_CLASS0_FP_0_COUNT_START+VP9_MV_CLASS0_FP_0_COUNT_SIZE)
3927 #define VP9_MV_CLASS0_FP_1_COUNT_SIZE (3*4)
3930 #define DC_PRED 0 /* Average of above and left pixels*/
3931 #define V_PRED 1 /* Vertical*/
3932 #define H_PRED 2 /* Horizontal*/
3933 #define D45_PRED 3 /*Directional 45 deg = round(arctan(1/1) * 180/pi)*/
3934 #define D135_PRED 4 /* Directional 135 deg = 180 - 45*/
3935 #define D117_PRED 5 /* Directional 117 deg = 180 - 63*/
3936 #define D153_PRED 6 /* Directional 153 deg = 180 - 27*/
3937 #define D207_PRED 7 /* Directional 207 deg = 180 + 27*/
3938 #define D63_PRED 8 /*Directional 63 deg = round(arctan(2/1) * 180/pi)*/
3939 #define TM_PRED 9 /*True-motion*/
3941 int clip_prob(int p
)
3943 return (p
> 255) ? 255 : (p
< 1) ? 1 : p
;
3946 #define ROUND_POWER_OF_TWO(value, n) \
3947 (((value) + (1 << ((n) - 1))) >> (n))
3949 #define MODE_MV_COUNT_SAT 20
3950 static const int count_to_update_factor
[MODE_MV_COUNT_SAT
+ 1] = {
3951 0, 6, 12, 19, 25, 32, 38, 44, 51, 57, 64,
3952 70, 76, 83, 89, 96, 102, 108, 115, 121, 128
3955 void vp9_tree_merge_probs(unsigned int *prev_prob
, unsigned int *cur_prob
,
3956 int coef_node_start
, int tree_left
, int tree_right
, int tree_i
,
3959 int prob_32
, prob_res
, prob_shift
;
3960 int pre_prob
, new_prob
;
3961 int den
, m_count
, get_prob
, factor
;
3963 prob_32
= prev_prob
[coef_node_start
/ 4 * 2];
3964 prob_res
= coef_node_start
& 3;
3965 prob_shift
= prob_res
* 8;
3966 pre_prob
= (prob_32
>> prob_shift
) & 0xff;
3968 den
= tree_left
+ tree_right
;
3971 new_prob
= pre_prob
;
3973 m_count
= (den
< MODE_MV_COUNT_SAT
) ?
3974 den
: MODE_MV_COUNT_SAT
;
3975 get_prob
= clip_prob(
3976 div_r32(((int64_t)tree_left
* 256 + (den
>> 1)),
3979 factor
= count_to_update_factor
[m_count
];
3980 new_prob
= ROUND_POWER_OF_TWO(pre_prob
* (256 - factor
)
3981 + get_prob
* factor
, 8);
3983 cur_prob
[coef_node_start
/ 4 * 2] = (cur_prob
[coef_node_start
/ 4 * 2]
3984 & (~(0xff << prob_shift
))) | (new_prob
<< prob_shift
);
3986 /*pr_info(" - [%d][%d] 0x%02X --> 0x%02X (0x%X 0x%X) (%X)\n",
3987 *tree_i, node, pre_prob, new_prob, tree_left, tree_right,
3988 *cur_prob[coef_node_start/4*2]);
3993 /*void adapt_coef_probs(void)*/
3994 void adapt_coef_probs(int pic_count
, int prev_kf
, int cur_kf
, int pre_fc
,
3995 unsigned int *prev_prob
, unsigned int *cur_prob
, unsigned int *count
)
3997 /* 80 * 64bits = 0xF00 ( use 0x1000 4K bytes)
3998 *unsigned int prev_prob[496*2];
3999 *unsigned int cur_prob[496*2];
4000 *0x300 * 128bits = 0x3000 (32K Bytes)
4001 *unsigned int count[0x300*4];
4004 int tx_size
, coef_tx_size_start
, coef_count_tx_size_start
;
4005 int plane
, coef_plane_start
, coef_count_plane_start
;
4006 int type
, coef_type_start
, coef_count_type_start
;
4007 int band
, coef_band_start
, coef_count_band_start
;
4009 int cxt
, coef_cxt_start
, coef_count_cxt_start
;
4010 int node
, coef_node_start
, coef_count_node_start
;
4012 int tree_i
, tree_left
, tree_right
;
4016 /*int update_factor = 112;*/ /*If COEF_MAX_UPDATE_FACTOR_AFTER_KEY,
4019 /* If COEF_MAX_UPDATE_FACTOR_AFTER_KEY, use 128*/
4020 /*int update_factor = (pic_count == 1) ? 128 : 112;*/
4021 int update_factor
= cur_kf
? 112 :
4022 prev_kf
? 128 : 112;
4036 if (debug
& VP9_DEBUG_MERGE
)
4038 ("\n ##adapt_coef_probs (pre_fc : %d ,prev_kf : %d,cur_kf : %d)##\n\n",
4039 pre_fc
, prev_kf
, cur_kf
);
4041 /*adapt_coef_probs*/
4042 for (tx_size
= 0; tx_size
< 4; tx_size
++) {
4043 coef_tx_size_start
= VP9_COEF_START
4044 + tx_size
* 4 * VP9_COEF_SIZE_ONE_SET
;
4045 coef_count_tx_size_start
= VP9_COEF_COUNT_START
4046 + tx_size
* 4 * VP9_COEF_COUNT_SIZE_ONE_SET
;
4047 coef_plane_start
= coef_tx_size_start
;
4048 coef_count_plane_start
= coef_count_tx_size_start
;
4049 for (plane
= 0; plane
< 2; plane
++) {
4050 coef_type_start
= coef_plane_start
;
4051 coef_count_type_start
= coef_count_plane_start
;
4052 for (type
= 0; type
< 2; type
++) {
4053 coef_band_start
= coef_type_start
;
4054 coef_count_band_start
= coef_count_type_start
;
4055 for (band
= 0; band
< 6; band
++) {
4060 coef_cxt_start
= coef_band_start
;
4061 coef_count_cxt_start
=
4062 coef_count_band_start
;
4063 for (cxt
= 0; cxt
< cxt_num
; cxt
++) {
4065 count
[coef_count_cxt_start
];
4067 count
[coef_count_cxt_start
+ 1];
4069 count
[coef_count_cxt_start
+ 2];
4071 count
[coef_count_cxt_start
+ 3];
4073 count
[coef_count_cxt_start
+ 4];
4083 (node
= 0; node
< 3; node
++) {
4089 coef_node_start
& 3;
4093 (prob_32
>> prob_shift
)
4100 branch_ct
[node
][0] +
4115 update_factor
* m_count
4121 get_prob
* factor
, 8);
4123 cur_prob
[coef_node_start
4127 / 4 * 2] & (~(0xff <<
4132 coef_node_start
+= 1;
4137 coef_count_cxt_start
=
4138 coef_count_cxt_start
4142 coef_band_start
+= 10;
4143 coef_count_band_start
+= 15;
4145 coef_band_start
+= 18;
4146 coef_count_band_start
+= 30;
4149 coef_type_start
+= VP9_COEF_SIZE_ONE_SET
;
4150 coef_count_type_start
+=
4151 VP9_COEF_COUNT_SIZE_ONE_SET
;
4153 coef_plane_start
+= 2 * VP9_COEF_SIZE_ONE_SET
;
4154 coef_count_plane_start
+=
4155 2 * VP9_COEF_COUNT_SIZE_ONE_SET
;
4160 /*mode_mv_merge_probs - merge_intra_inter_prob*/
4161 for (coef_count_node_start
= VP9_INTRA_INTER_COUNT_START
;
4162 coef_count_node_start
< (VP9_MV_CLASS0_HP_1_COUNT_START
+
4163 VP9_MV_CLASS0_HP_1_COUNT_SIZE
); coef_count_node_start
+= 2) {
4165 if (coef_count_node_start
==
4166 VP9_INTRA_INTER_COUNT_START
) {
4167 if (debug
& VP9_DEBUG_MERGE
)
4168 pr_info(" # merge_intra_inter_prob\n");
4169 coef_node_start
= VP9_INTRA_INTER_START
;
4170 } else if (coef_count_node_start
==
4171 VP9_COMP_INTER_COUNT_START
) {
4172 if (debug
& VP9_DEBUG_MERGE
)
4173 pr_info(" # merge_comp_inter_prob\n");
4174 coef_node_start
= VP9_COMP_INTER_START
;
4177 *else if (coef_count_node_start ==
4178 * VP9_COMP_REF_COUNT_START) {
4179 * pr_info(" # merge_comp_inter_prob\n");
4180 * coef_node_start = VP9_COMP_REF_START;
4182 *else if (coef_count_node_start ==
4183 * VP9_SINGLE_REF_COUNT_START) {
4184 * pr_info(" # merge_comp_inter_prob\n");
4185 * coef_node_start = VP9_SINGLE_REF_START;
4188 else if (coef_count_node_start
==
4189 VP9_TX_MODE_COUNT_START
) {
4190 if (debug
& VP9_DEBUG_MERGE
)
4191 pr_info(" # merge_tx_mode_probs\n");
4192 coef_node_start
= VP9_TX_MODE_START
;
4193 } else if (coef_count_node_start
==
4194 VP9_SKIP_COUNT_START
) {
4195 if (debug
& VP9_DEBUG_MERGE
)
4196 pr_info(" # merge_skip_probs\n");
4197 coef_node_start
= VP9_SKIP_START
;
4198 } else if (coef_count_node_start
==
4199 VP9_MV_SIGN_0_COUNT_START
) {
4200 if (debug
& VP9_DEBUG_MERGE
)
4201 pr_info(" # merge_sign_0\n");
4202 coef_node_start
= VP9_MV_SIGN_0_START
;
4203 } else if (coef_count_node_start
==
4204 VP9_MV_SIGN_1_COUNT_START
) {
4205 if (debug
& VP9_DEBUG_MERGE
)
4206 pr_info(" # merge_sign_1\n");
4207 coef_node_start
= VP9_MV_SIGN_1_START
;
4208 } else if (coef_count_node_start
==
4209 VP9_MV_BITS_0_COUNT_START
) {
4210 if (debug
& VP9_DEBUG_MERGE
)
4211 pr_info(" # merge_bits_0\n");
4212 coef_node_start
= VP9_MV_BITS_0_START
;
4213 } else if (coef_count_node_start
==
4214 VP9_MV_BITS_1_COUNT_START
) {
4215 if (debug
& VP9_DEBUG_MERGE
)
4216 pr_info(" # merge_bits_1\n");
4217 coef_node_start
= VP9_MV_BITS_1_START
;
4218 } else if (coef_count_node_start
==
4219 VP9_MV_CLASS0_HP_0_COUNT_START
) {
4220 if (debug
& VP9_DEBUG_MERGE
)
4221 pr_info(" # merge_class0_hp\n");
4222 coef_node_start
= VP9_MV_CLASS0_HP_0_START
;
4226 den
= count
[coef_count_node_start
] +
4227 count
[coef_count_node_start
+ 1];
4229 prob_32
= prev_prob
[coef_node_start
/ 4 * 2];
4230 prob_res
= coef_node_start
& 3;
4231 prob_shift
= prob_res
* 8;
4232 pre_prob
= (prob_32
>> prob_shift
) & 0xff;
4235 new_prob
= pre_prob
;
4237 m_count
= (den
< MODE_MV_COUNT_SAT
) ?
4238 den
: MODE_MV_COUNT_SAT
;
4241 div_r32(((int64_t)count
[coef_count_node_start
]
4242 * 256 + (den
>> 1)),
4245 factor
= count_to_update_factor
[m_count
];
4247 ROUND_POWER_OF_TWO(pre_prob
* (256 - factor
)
4248 + get_prob
* factor
, 8);
4250 cur_prob
[coef_node_start
/ 4 * 2] =
4251 (cur_prob
[coef_node_start
/ 4 * 2] &
4252 (~(0xff << prob_shift
)))
4253 | (new_prob
<< prob_shift
);
4255 coef_node_start
= coef_node_start
+ 1;
4257 if (debug
& VP9_DEBUG_MERGE
)
4258 pr_info(" # merge_vp9_inter_mode_tree\n");
4259 coef_node_start
= VP9_INTER_MODE_START
;
4260 coef_count_node_start
= VP9_INTER_MODE_COUNT_START
;
4261 for (tree_i
= 0; tree_i
< 7; tree_i
++) {
4262 for (node
= 0; node
< 3; node
++) {
4266 count
[coef_count_node_start
+ 1];
4268 count
[coef_count_node_start
+ 3];
4272 count
[coef_count_node_start
+ 0];
4274 count
[coef_count_node_start
+ 1]
4275 + count
[coef_count_node_start
+ 3];
4279 count
[coef_count_node_start
+ 2];
4281 count
[coef_count_node_start
+ 0]
4282 + count
[coef_count_node_start
+ 1]
4283 + count
[coef_count_node_start
+ 3];
4288 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4289 coef_node_start
, tree_left
, tree_right
,
4292 coef_node_start
= coef_node_start
+ 1;
4294 coef_count_node_start
= coef_count_node_start
+ 4;
4296 if (debug
& VP9_DEBUG_MERGE
)
4297 pr_info(" # merge_vp9_intra_mode_tree\n");
4298 coef_node_start
= VP9_IF_Y_MODE_START
;
4299 coef_count_node_start
= VP9_IF_Y_MODE_COUNT_START
;
4300 for (tree_i
= 0; tree_i
< 14; tree_i
++) {
4301 for (node
= 0; node
< 9; node
++) {
4305 count
[coef_count_node_start
+D153_PRED
];
4307 count
[coef_count_node_start
+D207_PRED
];
4311 count
[coef_count_node_start
+D63_PRED
];
4313 count
[coef_count_node_start
+D207_PRED
] +
4314 count
[coef_count_node_start
+D153_PRED
];
4318 count
[coef_count_node_start
+ D45_PRED
];
4320 count
[coef_count_node_start
+D207_PRED
] +
4321 count
[coef_count_node_start
+D153_PRED
] +
4322 count
[coef_count_node_start
+D63_PRED
];
4326 count
[coef_count_node_start
+D135_PRED
];
4328 count
[coef_count_node_start
+D117_PRED
];
4332 count
[coef_count_node_start
+H_PRED
];
4334 count
[coef_count_node_start
+D117_PRED
] +
4335 count
[coef_count_node_start
+D135_PRED
];
4339 count
[coef_count_node_start
+H_PRED
] +
4340 count
[coef_count_node_start
+D117_PRED
] +
4341 count
[coef_count_node_start
+D135_PRED
];
4343 count
[coef_count_node_start
+D45_PRED
] +
4344 count
[coef_count_node_start
+D207_PRED
] +
4345 count
[coef_count_node_start
+D153_PRED
] +
4346 count
[coef_count_node_start
+D63_PRED
];
4350 count
[coef_count_node_start
+V_PRED
];
4352 count
[coef_count_node_start
+H_PRED
] +
4353 count
[coef_count_node_start
+D117_PRED
] +
4354 count
[coef_count_node_start
+D135_PRED
] +
4355 count
[coef_count_node_start
+D45_PRED
] +
4356 count
[coef_count_node_start
+D207_PRED
] +
4357 count
[coef_count_node_start
+D153_PRED
] +
4358 count
[coef_count_node_start
+D63_PRED
];
4362 count
[coef_count_node_start
+TM_PRED
];
4364 count
[coef_count_node_start
+V_PRED
] +
4365 count
[coef_count_node_start
+H_PRED
] +
4366 count
[coef_count_node_start
+D117_PRED
] +
4367 count
[coef_count_node_start
+D135_PRED
] +
4368 count
[coef_count_node_start
+D45_PRED
] +
4369 count
[coef_count_node_start
+D207_PRED
] +
4370 count
[coef_count_node_start
+D153_PRED
] +
4371 count
[coef_count_node_start
+D63_PRED
];
4375 count
[coef_count_node_start
+DC_PRED
];
4377 count
[coef_count_node_start
+TM_PRED
] +
4378 count
[coef_count_node_start
+V_PRED
] +
4379 count
[coef_count_node_start
+H_PRED
] +
4380 count
[coef_count_node_start
+D117_PRED
] +
4381 count
[coef_count_node_start
+D135_PRED
] +
4382 count
[coef_count_node_start
+D45_PRED
] +
4383 count
[coef_count_node_start
+D207_PRED
] +
4384 count
[coef_count_node_start
+D153_PRED
] +
4385 count
[coef_count_node_start
+D63_PRED
];
4390 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4391 coef_node_start
, tree_left
, tree_right
,
4394 coef_node_start
= coef_node_start
+ 1;
4396 coef_count_node_start
= coef_count_node_start
+ 10;
4399 if (debug
& VP9_DEBUG_MERGE
)
4400 pr_info(" # merge_vp9_partition_tree\n");
4401 coef_node_start
= VP9_PARTITION_P_START
;
4402 coef_count_node_start
= VP9_PARTITION_P_COUNT_START
;
4403 for (tree_i
= 0; tree_i
< 16; tree_i
++) {
4404 for (node
= 0; node
< 3; node
++) {
4408 count
[coef_count_node_start
+ 2];
4410 count
[coef_count_node_start
+ 3];
4414 count
[coef_count_node_start
+ 1];
4416 count
[coef_count_node_start
+ 2] +
4417 count
[coef_count_node_start
+ 3];
4421 count
[coef_count_node_start
+ 0];
4423 count
[coef_count_node_start
+ 1] +
4424 count
[coef_count_node_start
+ 2] +
4425 count
[coef_count_node_start
+ 3];
4430 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4432 tree_left
, tree_right
, tree_i
, node
);
4434 coef_node_start
= coef_node_start
+ 1;
4436 coef_count_node_start
= coef_count_node_start
+ 4;
4439 if (debug
& VP9_DEBUG_MERGE
)
4440 pr_info(" # merge_vp9_switchable_interp_tree\n");
4441 coef_node_start
= VP9_INTERP_START
;
4442 coef_count_node_start
= VP9_INTERP_COUNT_START
;
4443 for (tree_i
= 0; tree_i
< 4; tree_i
++) {
4444 for (node
= 0; node
< 2; node
++) {
4448 count
[coef_count_node_start
+ 1];
4450 count
[coef_count_node_start
+ 2];
4454 count
[coef_count_node_start
+ 0];
4456 count
[coef_count_node_start
+ 1] +
4457 count
[coef_count_node_start
+ 2];
4462 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4464 tree_left
, tree_right
, tree_i
, node
);
4466 coef_node_start
= coef_node_start
+ 1;
4468 coef_count_node_start
= coef_count_node_start
+ 3;
4471 if (debug
& VP9_DEBUG_MERGE
)
4472 pr_info("# merge_vp9_mv_joint_tree\n");
4473 coef_node_start
= VP9_MV_JOINTS_START
;
4474 coef_count_node_start
= VP9_MV_JOINTS_COUNT_START
;
4475 for (tree_i
= 0; tree_i
< 1; tree_i
++) {
4476 for (node
= 0; node
< 3; node
++) {
4480 count
[coef_count_node_start
+ 2];
4482 count
[coef_count_node_start
+ 3];
4486 count
[coef_count_node_start
+ 1];
4488 count
[coef_count_node_start
+ 2] +
4489 count
[coef_count_node_start
+ 3];
4493 count
[coef_count_node_start
+ 0];
4495 count
[coef_count_node_start
+ 1] +
4496 count
[coef_count_node_start
+ 2] +
4497 count
[coef_count_node_start
+ 3];
4501 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4503 tree_left
, tree_right
, tree_i
, node
);
4505 coef_node_start
= coef_node_start
+ 1;
4507 coef_count_node_start
= coef_count_node_start
+ 4;
4510 for (mvd_i
= 0; mvd_i
< 2; mvd_i
++) {
4511 if (debug
& VP9_DEBUG_MERGE
)
4512 pr_info(" # merge_vp9_mv_class_tree [%d] -\n", mvd_i
);
4514 mvd_i
? VP9_MV_CLASSES_1_START
: VP9_MV_CLASSES_0_START
;
4515 coef_count_node_start
=
4516 mvd_i
? VP9_MV_CLASSES_1_COUNT_START
4517 : VP9_MV_CLASSES_0_COUNT_START
;
4519 for (node
= 0; node
< 10; node
++) {
4523 count
[coef_count_node_start
+ 9];
4525 count
[coef_count_node_start
+ 10];
4529 count
[coef_count_node_start
+ 7];
4531 count
[coef_count_node_start
+ 8];
4535 count
[coef_count_node_start
+ 7] +
4536 count
[coef_count_node_start
+ 8];
4538 count
[coef_count_node_start
+ 9] +
4539 count
[coef_count_node_start
+ 10];
4543 count
[coef_count_node_start
+ 6];
4545 count
[coef_count_node_start
+ 7] +
4546 count
[coef_count_node_start
+ 8] +
4547 count
[coef_count_node_start
+ 9] +
4548 count
[coef_count_node_start
+ 10];
4552 count
[coef_count_node_start
+ 4];
4554 count
[coef_count_node_start
+ 5];
4558 count
[coef_count_node_start
+ 4] +
4559 count
[coef_count_node_start
+ 5];
4561 count
[coef_count_node_start
+ 6] +
4562 count
[coef_count_node_start
+ 7] +
4563 count
[coef_count_node_start
+ 8] +
4564 count
[coef_count_node_start
+ 9] +
4565 count
[coef_count_node_start
+ 10];
4569 count
[coef_count_node_start
+ 2];
4571 count
[coef_count_node_start
+ 3];
4575 count
[coef_count_node_start
+ 2] +
4576 count
[coef_count_node_start
+ 3];
4578 count
[coef_count_node_start
+ 4] +
4579 count
[coef_count_node_start
+ 5] +
4580 count
[coef_count_node_start
+ 6] +
4581 count
[coef_count_node_start
+ 7] +
4582 count
[coef_count_node_start
+ 8] +
4583 count
[coef_count_node_start
+ 9] +
4584 count
[coef_count_node_start
+ 10];
4588 count
[coef_count_node_start
+ 1];
4590 count
[coef_count_node_start
+ 2] +
4591 count
[coef_count_node_start
+ 3] +
4592 count
[coef_count_node_start
+ 4] +
4593 count
[coef_count_node_start
+ 5] +
4594 count
[coef_count_node_start
+ 6] +
4595 count
[coef_count_node_start
+ 7] +
4596 count
[coef_count_node_start
+ 8] +
4597 count
[coef_count_node_start
+ 9] +
4598 count
[coef_count_node_start
+ 10];
4602 count
[coef_count_node_start
+ 0];
4604 count
[coef_count_node_start
+ 1] +
4605 count
[coef_count_node_start
+ 2] +
4606 count
[coef_count_node_start
+ 3] +
4607 count
[coef_count_node_start
+ 4] +
4608 count
[coef_count_node_start
+ 5] +
4609 count
[coef_count_node_start
+ 6] +
4610 count
[coef_count_node_start
+ 7] +
4611 count
[coef_count_node_start
+ 8] +
4612 count
[coef_count_node_start
+ 9] +
4613 count
[coef_count_node_start
+ 10];
4618 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4619 coef_node_start
, tree_left
, tree_right
,
4622 coef_node_start
= coef_node_start
+ 1;
4625 if (debug
& VP9_DEBUG_MERGE
)
4626 pr_info(" # merge_vp9_mv_class0_tree [%d] -\n", mvd_i
);
4628 mvd_i
? VP9_MV_CLASS0_1_START
: VP9_MV_CLASS0_0_START
;
4629 coef_count_node_start
=
4630 mvd_i
? VP9_MV_CLASS0_1_COUNT_START
:
4631 VP9_MV_CLASS0_0_COUNT_START
;
4634 tree_left
= count
[coef_count_node_start
+ 0];
4635 tree_right
= count
[coef_count_node_start
+ 1];
4637 vp9_tree_merge_probs(prev_prob
, cur_prob
, coef_node_start
,
4638 tree_left
, tree_right
, tree_i
, node
);
4639 if (debug
& VP9_DEBUG_MERGE
)
4640 pr_info(" # merge_vp9_mv_fp_tree_class0_fp [%d] -\n",
4643 mvd_i
? VP9_MV_CLASS0_FP_1_START
:
4644 VP9_MV_CLASS0_FP_0_START
;
4645 coef_count_node_start
=
4646 mvd_i
? VP9_MV_CLASS0_FP_1_COUNT_START
:
4647 VP9_MV_CLASS0_FP_0_COUNT_START
;
4648 for (tree_i
= 0; tree_i
< 3; tree_i
++) {
4649 for (node
= 0; node
< 3; node
++) {
4653 count
[coef_count_node_start
+ 2];
4655 count
[coef_count_node_start
+ 3];
4659 count
[coef_count_node_start
+ 1];
4661 count
[coef_count_node_start
+ 2]
4662 + count
[coef_count_node_start
+ 3];
4666 count
[coef_count_node_start
+ 0];
4668 count
[coef_count_node_start
+ 1]
4669 + count
[coef_count_node_start
+ 2]
4670 + count
[coef_count_node_start
+ 3];
4675 vp9_tree_merge_probs(prev_prob
, cur_prob
,
4676 coef_node_start
, tree_left
, tree_right
,
4679 coef_node_start
= coef_node_start
+ 1;
4681 coef_count_node_start
= coef_count_node_start
+ 4;
4684 } /* for mvd_i (mvd_y or mvd_x)*/
4689 static bool v4l_is_there_vframe_bound(struct VP9Decoder_s
*pbi
)
4692 struct VP9_Common_s
*const cm
= &pbi
->common
;
4693 struct RefCntBuffer_s
*frame_bufs
= cm
->buffer_pool
->frame_bufs
;
4695 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
4696 if (frame_bufs
[i
].buf
.vframe_bound
)
4703 static void v4l_mmu_buffer_release(struct VP9Decoder_s
*pbi
)
4705 struct VP9_Common_s
*const cm
= &pbi
->common
;
4706 struct RefCntBuffer_s
*frame_bufs
= cm
->buffer_pool
->frame_bufs
;
4709 /* release workspace */
4711 decoder_bmmu_box_free_idx(pbi
->bmmu_box
,
4714 * it's only when vframe get back to driver, right now we can be sure
4715 * that vframe and fd are related. if the playback exits, the capture
4716 * requires the upper app to release when the fd is closed, and others
4717 * buffers drivers are released by driver.
4719 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
4720 if (!frame_bufs
[i
].buf
.vframe_bound
) {
4722 decoder_bmmu_box_free_idx(pbi
->bmmu_box
,
4723 HEADER_BUFFER_IDX(i
));
4725 decoder_mmu_box_free_idx(pbi
->mmu_box
, i
);
4727 vp9_print(pbi
, PRINT_FLAG_V4L_DETAIL
,
4728 "%s free buffer[%d], bmmu_box: %p, mmu_box: %p\n",
4729 __func__
, i
, pbi
->bmmu_box
, pbi
->mmu_box
);
4734 static void uninit_mmu_buffers(struct VP9Decoder_s
*pbi
)
4736 #ifndef MV_USE_FIXED_BUF
4737 dealloc_mv_bufs(pbi
);
4739 if (pbi
->is_used_v4l
&&
4740 v4l_is_there_vframe_bound(pbi
)) {
4741 if (get_double_write_mode(pbi
) != 0x10) {
4742 v4l_mmu_buffer_release(pbi
);
4748 decoder_mmu_box_free(pbi
->mmu_box
);
4749 pbi
->mmu_box
= NULL
;
4752 decoder_bmmu_box_free(pbi
->bmmu_box
);
4753 pbi
->bmmu_box
= NULL
;
4756 static int calc_luc_quantity(u32 w
, u32 h
)
4758 int lcu_size
= 64; /*fixed 64*/
4759 int pic_width_64
= (w
+ 63) & (~0x3f);
4760 int pic_height_32
= (h
+ 31) & (~0x1f);
4761 int pic_width_lcu
= (pic_width_64
% lcu_size
) ?
4762 pic_width_64
/ lcu_size
+ 1 : pic_width_64
/ lcu_size
;
4763 int pic_height_lcu
= (pic_height_32
% lcu_size
) ?
4764 pic_height_32
/ lcu_size
+ 1 : pic_height_32
/ lcu_size
;
4766 return pic_width_lcu
* pic_height_lcu
;
4769 static int v4l_alloc_and_config_pic(struct VP9Decoder_s
*pbi
,
4770 struct PIC_BUFFER_CONFIG_s
*pic
)
4774 int dw_mode
= get_double_write_mode_init(pbi
);
4775 int lcu_total
= calc_luc_quantity(pbi
->frame_width
, pbi
->frame_height
);
4776 #ifdef MV_USE_FIXED_BUF
4777 u32 mpred_mv_end
= pbi
->work_space_buf
->mpred_mv
.buf_start
+
4778 pbi
->work_space_buf
->mpred_mv
.buf_size
;
4780 struct vdec_v4l2_buffer
*fb
= NULL
;
4782 ret
= vdec_v4l_get_buffer(pbi
->v4l2_ctx
, &fb
);
4784 vp9_print(pbi
, 0, "[%d] VP9 get buffer fail.\n",
4785 ((struct aml_vcodec_ctx
*) (pbi
->v4l2_ctx
))->id
);
4789 if (pbi
->mmu_enable
) {
4790 pbi
->m_BUF
[i
].header_addr
= decoder_bmmu_box_get_phy_addr(
4791 pbi
->bmmu_box
, HEADER_BUFFER_IDX(i
));
4792 if (debug
& VP9_DEBUG_BUFMGR_MORE
) {
4793 pr_info("MMU header_adr %d: %ld\n",
4794 i
, pbi
->m_BUF
[i
].header_addr
);
4798 #ifdef MV_USE_FIXED_BUF
4799 if ((pbi
->work_space_buf
->mpred_mv
.buf_start
+
4800 (((i
+ 1) * lcu_total
) * MV_MEM_UNIT
))
4803 pbi
->m_BUF
[i
].v4l_ref_buf_addr
= (ulong
)fb
;
4804 pic
->cma_alloc_addr
= fb
->m
.mem
[0].addr
;
4805 if (fb
->num_planes
== 1) {
4806 pbi
->m_BUF
[i
].start_adr
= fb
->m
.mem
[0].addr
;
4807 pbi
->m_BUF
[i
].size
= fb
->m
.mem
[0].size
;
4808 pbi
->m_BUF
[i
].luma_size
= fb
->m
.mem
[0].offset
;
4809 fb
->m
.mem
[0].bytes_used
= fb
->m
.mem
[0].size
;
4810 } else if (fb
->num_planes
== 2) {
4811 pbi
->m_BUF
[i
].start_adr
= fb
->m
.mem
[0].addr
;
4812 pbi
->m_BUF
[i
].size
= fb
->m
.mem
[0].size
+ fb
->m
.mem
[1].size
;
4813 pbi
->m_BUF
[i
].luma_size
= fb
->m
.mem
[0].size
;
4814 fb
->m
.mem
[0].bytes_used
= fb
->m
.mem
[0].size
;
4815 fb
->m
.mem
[1].bytes_used
= fb
->m
.mem
[1].size
;
4818 /* config frame buffer */
4819 if (pbi
->mmu_enable
)
4820 pic
->header_adr
= pbi
->m_BUF
[i
].header_addr
;
4823 pic
->lcu_total
= lcu_total
;
4824 pic
->mc_canvas_y
= pic
->index
;
4825 pic
->mc_canvas_u_v
= pic
->index
;
4827 if (dw_mode
& 0x10) {
4828 pic
->dw_y_adr
= pbi
->m_BUF
[i
].start_adr
;
4829 pic
->dw_u_v_adr
= pic
->dw_y_adr
+ pbi
->m_BUF
[i
].luma_size
;
4830 pic
->mc_canvas_y
= (pic
->index
<< 1);
4831 pic
->mc_canvas_u_v
= (pic
->index
<< 1) + 1;
4832 } else if (dw_mode
) {
4833 pic
->dw_y_adr
= pbi
->m_BUF
[i
].start_adr
;
4834 pic
->dw_u_v_adr
= pic
->dw_y_adr
+ pbi
->m_BUF
[i
].luma_size
;
4837 #ifdef MV_USE_FIXED_BUF
4838 pic
->mpred_mv_wr_start_addr
=
4839 pbi
->work_space_buf
->mpred_mv
.buf_start
+
4840 ((pic
->index
* lcu_total
) * MV_MEM_UNIT
);
4843 pr_info("%s index %d BUF_index %d ",
4844 __func__
, pic
->index
,
4846 pr_info("comp_body_size %x comp_buf_size %x ",
4847 pic
->comp_body_size
,
4849 pr_info("mpred_mv_wr_start_adr %ld\n",
4850 pic
->mpred_mv_wr_start_addr
);
4851 pr_info("dw_y_adr %d, pic_config->dw_u_v_adr =%d\n",
4855 #ifdef MV_USE_FIXED_BUF
4861 static int config_pic(struct VP9Decoder_s
*pbi
,
4862 struct PIC_BUFFER_CONFIG_s
*pic_config
)
4866 int pic_width
= pbi
->init_pic_w
;
4867 int pic_height
= pbi
->init_pic_h
;
4868 int lcu_size
= 64; /*fixed 64*/
4869 int pic_width_64
= (pic_width
+ 63) & (~0x3f);
4870 int pic_height_32
= (pic_height
+ 31) & (~0x1f);
4871 int pic_width_lcu
= (pic_width_64
% lcu_size
) ?
4872 pic_width_64
/ lcu_size
+ 1
4873 : pic_width_64
/ lcu_size
;
4874 int pic_height_lcu
= (pic_height_32
% lcu_size
) ?
4875 pic_height_32
/ lcu_size
+ 1
4876 : pic_height_32
/ lcu_size
;
4877 int lcu_total
= pic_width_lcu
* pic_height_lcu
;
4878 #ifdef MV_USE_FIXED_BUF
4879 u32 mpred_mv_end
= pbi
->work_space_buf
->mpred_mv
.buf_start
+
4880 pbi
->work_space_buf
->mpred_mv
.buf_size
;
4885 int losless_comp_header_size
=
4886 compute_losless_comp_header_size(pic_width
,
4888 int losless_comp_body_size
= compute_losless_comp_body_size(pic_width
,
4889 pic_height
, buf_alloc_depth
== 10);
4890 int mc_buffer_size
= losless_comp_header_size
+ losless_comp_body_size
;
4891 int mc_buffer_size_h
= (mc_buffer_size
+ 0xffff) >> 16;
4892 int mc_buffer_size_u_v
= 0;
4893 int mc_buffer_size_u_v_h
= 0;
4894 int dw_mode
= get_double_write_mode_init(pbi
);
4896 pbi
->lcu_total
= lcu_total
;
4899 int pic_width_dw
= pic_width
/
4900 get_double_write_ratio(pbi
, dw_mode
);
4901 int pic_height_dw
= pic_height
/
4902 get_double_write_ratio(pbi
, dw_mode
);
4904 int pic_width_64_dw
= (pic_width_dw
+ 63) & (~0x3f);
4905 int pic_height_32_dw
= (pic_height_dw
+ 31) & (~0x1f);
4906 int pic_width_lcu_dw
= (pic_width_64_dw
% lcu_size
) ?
4907 pic_width_64_dw
/ lcu_size
+ 1
4908 : pic_width_64_dw
/ lcu_size
;
4909 int pic_height_lcu_dw
= (pic_height_32_dw
% lcu_size
) ?
4910 pic_height_32_dw
/ lcu_size
+ 1
4911 : pic_height_32_dw
/ lcu_size
;
4912 int lcu_total_dw
= pic_width_lcu_dw
* pic_height_lcu_dw
;
4913 mc_buffer_size_u_v
= lcu_total_dw
* lcu_size
* lcu_size
/ 2;
4914 mc_buffer_size_u_v_h
= (mc_buffer_size_u_v
+ 0xffff) >> 16;
4916 buf_size
= ((mc_buffer_size_u_v_h
<< 16) * 3);
4917 buf_size
= ((buf_size
+ 0xffff) >> 16) << 16;
4920 if (mc_buffer_size
& 0xffff) /*64k alignment*/
4921 mc_buffer_size_h
+= 1;
4922 if ((!pbi
->mmu_enable
) && ((dw_mode
& 0x10) == 0))
4923 buf_size
+= (mc_buffer_size_h
<< 16);
4925 if (pbi
->mmu_enable
) {
4926 pic_config
->header_adr
= decoder_bmmu_box_get_phy_addr(
4927 pbi
->bmmu_box
, HEADER_BUFFER_IDX(pic_config
->index
));
4929 if (debug
& VP9_DEBUG_BUFMGR_MORE
) {
4930 pr_info("MMU header_adr %d: %ld\n",
4931 pic_config
->index
, pic_config
->header_adr
);
4935 i
= pic_config
->index
;
4936 #ifdef MV_USE_FIXED_BUF
4937 if ((pbi
->work_space_buf
->mpred_mv
.buf_start
+
4938 (((i
+ 1) * lcu_total
) * MV_MEM_UNIT
))
4943 ret
= decoder_bmmu_box_alloc_buf_phy(pbi
->bmmu_box
,
4945 buf_size
, DRIVER_NAME
,
4946 &pic_config
->cma_alloc_addr
);
4949 "decoder_bmmu_box_alloc_buf_phy idx %d size %d fail\n",
4956 if (pic_config
->cma_alloc_addr
)
4957 y_adr
= pic_config
->cma_alloc_addr
;
4960 "decoder_bmmu_box_alloc_buf_phy idx %d size %d return null\n",
4968 /*ensure get_pic_by_POC()
4969 not get the buffer not decoded*/
4970 pic_config
->BUF_index
= i
;
4971 pic_config
->lcu_total
= lcu_total
;
4973 pic_config
->comp_body_size
= losless_comp_body_size
;
4974 pic_config
->buf_size
= buf_size
;
4976 pic_config
->mc_canvas_y
= pic_config
->index
;
4977 pic_config
->mc_canvas_u_v
= pic_config
->index
;
4978 if (dw_mode
& 0x10) {
4979 pic_config
->dw_y_adr
= y_adr
;
4980 pic_config
->dw_u_v_adr
= y_adr
+
4981 ((mc_buffer_size_u_v_h
<< 16) << 1);
4983 pic_config
->mc_canvas_y
=
4984 (pic_config
->index
<< 1);
4985 pic_config
->mc_canvas_u_v
=
4986 (pic_config
->index
<< 1) + 1;
4987 } else if (dw_mode
) {
4988 pic_config
->dw_y_adr
= y_adr
;
4989 pic_config
->dw_u_v_adr
= pic_config
->dw_y_adr
+
4990 ((mc_buffer_size_u_v_h
<< 16) << 1);
4992 #ifdef MV_USE_FIXED_BUF
4993 pic_config
->mpred_mv_wr_start_addr
=
4994 pbi
->work_space_buf
->mpred_mv
.buf_start
+
4995 ((pic_config
->index
* lcu_total
)
5000 ("%s index %d BUF_index %d ",
5001 __func__
, pic_config
->index
,
5002 pic_config
->BUF_index
);
5004 ("comp_body_size %x comp_buf_size %x ",
5005 pic_config
->comp_body_size
,
5006 pic_config
->buf_size
);
5008 ("mpred_mv_wr_start_adr %ld\n",
5009 pic_config
->mpred_mv_wr_start_addr
);
5010 pr_info("dw_y_adr %d, pic_config->dw_u_v_adr =%d\n",
5011 pic_config
->dw_y_adr
,
5012 pic_config
->dw_u_v_adr
);
5016 #ifdef MV_USE_FIXED_BUF
5022 static int vvp9_mmu_compress_header_size(struct VP9Decoder_s
*pbi
)
5024 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) &&
5025 IS_8K_SIZE(pbi
->max_pic_w
, pbi
->max_pic_h
))
5026 return (MMU_COMPRESS_8K_HEADER_SIZE
);
5028 return (MMU_COMPRESS_HEADER_SIZE
);
5031 /*#define FRAME_MMU_MAP_SIZE (MAX_FRAME_4K_NUM * 4)*/
5032 static int vvp9_frame_mmu_map_size(struct VP9Decoder_s
*pbi
)
5034 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) &&
5035 IS_8K_SIZE(pbi
->max_pic_w
, pbi
->max_pic_h
))
5036 return (MAX_FRAME_8K_NUM
* 4);
5038 return (MAX_FRAME_4K_NUM
* 4);
5041 static void init_pic_list(struct VP9Decoder_s
*pbi
)
5044 struct VP9_Common_s
*cm
= &pbi
->common
;
5045 struct PIC_BUFFER_CONFIG_s
*pic_config
;
5047 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
5049 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
5050 header_size
= vvp9_mmu_compress_header_size(pbi
);
5051 /*alloc VP9 compress header first*/
5052 for (i
= 0; i
< pbi
->used_buf_num
; i
++) {
5053 unsigned long buf_addr
;
5054 if (decoder_bmmu_box_alloc_buf_phy
5056 HEADER_BUFFER_IDX(i
), header_size
,
5059 pr_info("%s malloc compress header failed %d\n",
5060 DRIVER_HEADER_NAME
, i
);
5061 pbi
->fatal_error
|= DECODER_FATAL_ERROR_NO_MEM
;
5066 for (i
= 0; i
< pbi
->used_buf_num
; i
++) {
5067 pic_config
= &cm
->buffer_pool
->frame_bufs
[i
].buf
;
5068 pic_config
->index
= i
;
5069 pic_config
->BUF_index
= -1;
5070 pic_config
->mv_buf_index
= -1;
5071 if (vdec
->parallel_dec
== 1) {
5072 pic_config
->y_canvas_index
= -1;
5073 pic_config
->uv_canvas_index
= -1;
5075 pic_config
->y_crop_width
= pbi
->init_pic_w
;
5076 pic_config
->y_crop_height
= pbi
->init_pic_h
;
5077 pic_config
->double_write_mode
= get_double_write_mode(pbi
);
5079 if (!pbi
->is_used_v4l
) {
5080 if (config_pic(pbi
, pic_config
) < 0) {
5082 pr_info("Config_pic %d fail\n",
5084 pic_config
->index
= -1;
5088 if (pic_config
->double_write_mode
) {
5089 set_canvas(pbi
, pic_config
);
5093 for (; i
< pbi
->used_buf_num
; i
++) {
5094 pic_config
= &cm
->buffer_pool
->frame_bufs
[i
].buf
;
5095 pic_config
->index
= -1;
5096 pic_config
->BUF_index
= -1;
5097 pic_config
->mv_buf_index
= -1;
5098 if (vdec
->parallel_dec
== 1) {
5099 pic_config
->y_canvas_index
= -1;
5100 pic_config
->uv_canvas_index
= -1;
5103 pr_info("%s ok, used_buf_num = %d\n",
5104 __func__
, pbi
->used_buf_num
);
5107 static void init_pic_list_hw(struct VP9Decoder_s
*pbi
)
5110 struct VP9_Common_s
*cm
= &pbi
->common
;
5111 struct PIC_BUFFER_CONFIG_s
*pic_config
;
5112 /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR, 0x0);*/
5113 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR
,
5114 (0x1 << 1) | (0x1 << 2));
5117 for (i
= 0; i
< pbi
->used_buf_num
; i
++) {
5118 pic_config
= &cm
->buffer_pool
->frame_bufs
[i
].buf
;
5119 if (pic_config
->index
< 0)
5122 if (pbi
->mmu_enable
&& ((pic_config
->double_write_mode
& 0x10) == 0)) {
5124 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA
,
5125 pic_config
->header_adr
>> 5);
5127 /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
5128 * pic_config->mc_y_adr
5129 * | (pic_config->mc_canvas_y << 8) | 0x1);
5131 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA
,
5132 pic_config
->dw_y_adr
>> 5);
5134 #ifndef LOSLESS_COMPRESS_MODE
5135 /*WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CMD_ADDR,
5136 * pic_config->mc_u_v_adr
5137 * | (pic_config->mc_canvas_u_v << 8)| 0x1);
5139 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA
,
5140 pic_config
->header_adr
>> 5);
5142 if (pic_config
->double_write_mode
& 0x10) {
5143 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_DATA
,
5144 pic_config
->dw_u_v_adr
>> 5);
5148 WRITE_VREG(HEVCD_MPP_ANC2AXI_TBL_CONF_ADDR
, 0x1);
5150 /*Zero out canvas registers in IPP -- avoid simulation X*/
5151 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
,
5152 (0 << 8) | (0 << 1) | 1);
5153 for (i
= 0; i
< 32; i
++)
5154 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
, 0);
5158 static void dump_pic_list(struct VP9Decoder_s
*pbi
)
5160 struct VP9_Common_s
*const cm
= &pbi
->common
;
5161 struct PIC_BUFFER_CONFIG_s
*pic_config
;
5163 for (i
= 0; i
< FRAME_BUFFERS
; i
++) {
5164 pic_config
= &cm
->buffer_pool
->frame_bufs
[i
].buf
;
5166 "Buf(%d) index %d mv_buf_index %d ref_count %d vf_ref %d dec_idx %d slice_type %d w/h %d/%d adr%ld\n",
5169 #ifndef MV_USE_FIXED_BUF
5170 pic_config
->mv_buf_index
,
5175 frame_bufs
[i
].ref_count
,
5177 pic_config
->decode_idx
,
5178 pic_config
->slice_type
,
5179 pic_config
->y_crop_width
,
5180 pic_config
->y_crop_height
,
5181 pic_config
->cma_alloc_addr
5187 static int config_pic_size(struct VP9Decoder_s
*pbi
, unsigned short bit_depth
)
5189 #ifdef LOSLESS_COMPRESS_MODE
5190 unsigned int data32
;
5192 int losless_comp_header_size
, losless_comp_body_size
;
5193 struct VP9_Common_s
*cm
= &pbi
->common
;
5194 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
= &cm
->cur_frame
->buf
;
5196 frame_width
= cur_pic_config
->y_crop_width
;
5197 frame_height
= cur_pic_config
->y_crop_height
;
5198 cur_pic_config
->bit_depth
= bit_depth
;
5199 cur_pic_config
->double_write_mode
= get_double_write_mode(pbi
);
5200 losless_comp_header_size
=
5201 compute_losless_comp_header_size(cur_pic_config
->y_crop_width
,
5202 cur_pic_config
->y_crop_height
);
5203 losless_comp_body_size
=
5204 compute_losless_comp_body_size(cur_pic_config
->y_crop_width
,
5205 cur_pic_config
->y_crop_height
, (bit_depth
== VPX_BITS_10
));
5206 cur_pic_config
->comp_body_size
= losless_comp_body_size
;
5207 #ifdef LOSLESS_COMPRESS_MODE
5208 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5209 if (bit_depth
== VPX_BITS_10
)
5210 data32
&= ~(1 << 9);
5214 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5216 if (pbi
->mmu_enable
) {
5217 /*bit[4] : paged_mem_mode*/
5218 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, (0x1 << 4));
5220 /*bit[3] smem mdoe*/
5221 if (bit_depth
== VPX_BITS_10
)
5222 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, (0 << 3));
5224 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, (1 << 3));
5226 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_SM1
)
5227 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2
, (losless_comp_body_size
>> 5));
5228 /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,(0xff<<20) | (0xff<<10) | 0xff);*/
5229 WRITE_VREG(HEVC_CM_BODY_LENGTH
, losless_comp_body_size
);
5230 WRITE_VREG(HEVC_CM_HEADER_OFFSET
, losless_comp_body_size
);
5231 WRITE_VREG(HEVC_CM_HEADER_LENGTH
, losless_comp_header_size
);
5232 if (get_double_write_mode(pbi
) & 0x10)
5233 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, 0x1 << 31);
5235 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, 0x1 << 31);
5240 static int config_mc_buffer(struct VP9Decoder_s
*pbi
, unsigned short bit_depth
)
5243 struct VP9_Common_s
*cm
= &pbi
->common
;
5244 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
= &cm
->cur_frame
->buf
;
5245 uint8_t scale_enable
= 0;
5247 if (debug
&VP9_DEBUG_BUFMGR_MORE
)
5248 pr_info("config_mc_buffer entered .....\n");
5250 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
,
5251 (0 << 8) | (0 << 1) | 1);
5252 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
5253 struct PIC_BUFFER_CONFIG_s
*pic_config
= cm
->frame_refs
[i
].buf
;
5256 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
,
5257 (pic_config
->mc_canvas_u_v
<< 16)
5258 | (pic_config
->mc_canvas_u_v
<< 8)
5259 | pic_config
->mc_canvas_y
);
5260 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
5261 pr_info("refid %x mc_canvas_u_v %x mc_canvas_y %x\n",
5262 i
, pic_config
->mc_canvas_u_v
,
5263 pic_config
->mc_canvas_y
);
5265 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
,
5266 (16 << 8) | (0 << 1) | 1);
5267 for (i
= 0; i
< REFS_PER_FRAME
; ++i
) {
5268 struct PIC_BUFFER_CONFIG_s
*pic_config
= cm
->frame_refs
[i
].buf
;
5271 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
,
5272 (pic_config
->mc_canvas_u_v
<< 16)
5273 | (pic_config
->mc_canvas_u_v
<< 8)
5274 | pic_config
->mc_canvas_y
);
5277 /*auto_inc start index:0 field:0*/
5278 WRITE_VREG(VP9D_MPP_REFINFO_TBL_ACCCONFIG
, 0x1 << 2);
5279 /*index 0:last 1:golden 2:altref*/
5280 for (i
= 0; i
< REFS_PER_FRAME
; i
++) {
5281 int ref_pic_body_size
;
5282 struct PIC_BUFFER_CONFIG_s
*pic_config
= cm
->frame_refs
[i
].buf
;
5285 WRITE_VREG(VP9D_MPP_REFINFO_DATA
, pic_config
->y_crop_width
);
5286 WRITE_VREG(VP9D_MPP_REFINFO_DATA
, pic_config
->y_crop_height
);
5288 if (pic_config
->y_crop_width
!= cur_pic_config
->y_crop_width
||
5289 pic_config
->y_crop_height
!= cur_pic_config
->y_crop_height
) {
5290 scale_enable
|= (1 << i
);
5293 compute_losless_comp_body_size(pic_config
->y_crop_width
,
5294 pic_config
->y_crop_height
, (bit_depth
== VPX_BITS_10
));
5295 WRITE_VREG(VP9D_MPP_REFINFO_DATA
,
5296 (pic_config
->y_crop_width
<< 14)
5297 / cur_pic_config
->y_crop_width
);
5298 WRITE_VREG(VP9D_MPP_REFINFO_DATA
,
5299 (pic_config
->y_crop_height
<< 14)
5300 / cur_pic_config
->y_crop_height
);
5301 if (pbi
->mmu_enable
)
5302 WRITE_VREG(VP9D_MPP_REFINFO_DATA
, 0);
5304 WRITE_VREG(VP9D_MPP_REFINFO_DATA
, ref_pic_body_size
>> 5);
5306 WRITE_VREG(VP9D_MPP_REF_SCALE_ENBL
, scale_enable
);
5310 static void clear_mpred_hw(struct VP9Decoder_s
*pbi
)
5312 unsigned int data32
;
5314 data32
= READ_VREG(HEVC_MPRED_CTRL4
);
5315 data32
&= (~(1 << 6));
5316 WRITE_VREG(HEVC_MPRED_CTRL4
, data32
);
5319 static void config_mpred_hw(struct VP9Decoder_s
*pbi
)
5321 struct VP9_Common_s
*cm
= &pbi
->common
;
5322 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
= &cm
->cur_frame
->buf
;
5323 struct PIC_BUFFER_CONFIG_s
*last_frame_pic_config
=
5324 &cm
->prev_frame
->buf
;
5326 unsigned int data32
;
5327 int mpred_curr_lcu_x
;
5328 int mpred_curr_lcu_y
;
5329 int mpred_mv_rd_end_addr
;
5332 mpred_mv_rd_end_addr
= last_frame_pic_config
->mpred_mv_wr_start_addr
5333 + (last_frame_pic_config
->lcu_total
* MV_MEM_UNIT
);
5335 data32
= READ_VREG(HEVC_MPRED_CURR_LCU
);
5336 mpred_curr_lcu_x
= data32
& 0xffff;
5337 mpred_curr_lcu_y
= (data32
>> 16) & 0xffff;
5339 if (debug
& VP9_DEBUG_BUFMGR
)
5340 pr_info("cur pic_config index %d col pic_config index %d\n",
5341 cur_pic_config
->index
, last_frame_pic_config
->index
);
5342 WRITE_VREG(HEVC_MPRED_CTRL3
, 0x24122412);
5343 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR
,
5344 pbi
->work_space_buf
->mpred_above
.buf_start
);
5346 data32
= READ_VREG(HEVC_MPRED_CTRL4
);
5348 data32
&= (~(1 << 6));
5349 data32
|= (cm
->use_prev_frame_mvs
<< 6);
5350 WRITE_VREG(HEVC_MPRED_CTRL4
, data32
);
5352 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR
,
5353 cur_pic_config
->mpred_mv_wr_start_addr
);
5354 WRITE_VREG(HEVC_MPRED_MV_WPTR
, cur_pic_config
->mpred_mv_wr_start_addr
);
5356 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR
,
5357 last_frame_pic_config
->mpred_mv_wr_start_addr
);
5358 WRITE_VREG(HEVC_MPRED_MV_RPTR
,
5359 last_frame_pic_config
->mpred_mv_wr_start_addr
);
5360 /*data32 = ((pbi->lcu_x_num - pbi->tile_width_lcu)*MV_MEM_UNIT);*/
5361 /*WRITE_VREG(HEVC_MPRED_MV_WR_ROW_JUMP,data32);*/
5362 /*WRITE_VREG(HEVC_MPRED_MV_RD_ROW_JUMP,data32);*/
5363 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR
, mpred_mv_rd_end_addr
);
5367 static void config_sao_hw(struct VP9Decoder_s
*pbi
, union param_u
*params
)
5369 struct VP9_Common_s
*cm
= &pbi
->common
;
5370 struct PIC_BUFFER_CONFIG_s
*pic_config
= &cm
->cur_frame
->buf
;
5372 unsigned int data32
;
5374 int mc_buffer_size_u_v
=
5375 pic_config
->lcu_total
* lcu_size
*lcu_size
/2;
5376 int mc_buffer_size_u_v_h
=
5377 (mc_buffer_size_u_v
+ 0xffff) >> 16;/*64k alignment*/
5378 struct aml_vcodec_ctx
* v4l2_ctx
= pbi
->v4l2_ctx
;
5380 if (get_double_write_mode(pbi
)) {
5381 WRITE_VREG(HEVC_SAO_Y_START_ADDR
, pic_config
->dw_y_adr
);
5382 WRITE_VREG(HEVC_SAO_C_START_ADDR
, pic_config
->dw_u_v_adr
);
5383 WRITE_VREG(HEVC_SAO_Y_WPTR
, pic_config
->dw_y_adr
);
5384 WRITE_VREG(HEVC_SAO_C_WPTR
, pic_config
->dw_u_v_adr
);
5386 WRITE_VREG(HEVC_SAO_Y_START_ADDR
, 0xffffffff);
5387 WRITE_VREG(HEVC_SAO_C_START_ADDR
, 0xffffffff);
5389 if (pbi
->mmu_enable
)
5390 WRITE_VREG(HEVC_CM_HEADER_START_ADDR
, pic_config
->header_adr
);
5392 data32
= (mc_buffer_size_u_v_h
<< 16) << 1;
5393 /*pr_info("data32=%x,mc_buffer_size_u_v_h=%x,lcu_total=%x\n",
5394 * data32, mc_buffer_size_u_v_h, pic_config->lcu_total);
5396 WRITE_VREG(HEVC_SAO_Y_LENGTH
, data32
);
5398 data32
= (mc_buffer_size_u_v_h
<< 16);
5399 WRITE_VREG(HEVC_SAO_C_LENGTH
, data32
);
5403 data32
= READ_VREG(HEVC_SAO_CTRL1
);
5404 data32
&= (~0x3000);
5405 /*[13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32*/
5406 data32
|= (pbi
->mem_map_mode
<< 12);
5408 data32
|= 0x1; /* [1]:dw_disable [0]:cm_disable*/
5409 WRITE_VREG(HEVC_SAO_CTRL1
, data32
);
5410 /*[23:22] dw_v1_ctrl [21:20] dw_v0_ctrl [19:18] dw_h1_ctrl
5411 * [17:16] dw_h0_ctrl
5413 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5414 /*set them all 0 for H265_NV21 (no down-scale)*/
5415 data32
&= ~(0xff << 16);
5416 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5417 data32
= READ_VREG(HEVCD_IPP_AXIIF_CONFIG
);
5419 /*[5:4] address_format 00:linear 01:32x32 10:64x32*/
5420 data32
|= (pbi
->mem_map_mode
<< 4);
5421 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG
, data32
);
5424 data32
= READ_VREG(HEVC_SAO_CTRL1
);
5425 data32
&= (~0x3000);
5426 /*[13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32*/
5427 data32
|= (pbi
->mem_map_mode
<< 12);
5429 /*data32 |= 0x670;*/ /*Big-Endian per 64-bit*/
5430 data32
|= 0x880; /*.Big-Endian per 64-bit */
5432 data32
|= 0x1; /*[1]:dw_disable [0]:cm_disable*/
5433 WRITE_VREG(HEVC_SAO_CTRL1
, data32
);
5434 /* [23:22] dw_v1_ctrl [21:20] dw_v0_ctrl
5435 *[19:18] dw_h1_ctrl [17:16] dw_h0_ctrl
5437 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5438 /* set them all 0 for H265_NV21 (no down-scale)*/
5439 data32
&= ~(0xff << 16);
5440 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5442 data32
= READ_VREG(HEVCD_IPP_AXIIF_CONFIG
);
5444 /*[5:4] address_format 00:linear 01:32x32 10:64x32*/
5445 data32
|= (pbi
->mem_map_mode
<< 4);
5447 data32
|= 0x8; /*Big-Endian per 64-bit*/
5448 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG
, data32
);
5451 data32
= READ_VREG(HEVC_SAO_CTRL1
);
5452 data32
&= (~0x3000);
5453 data32
|= (pbi
->mem_map_mode
<<
5456 /* [13:12] axi_aformat, 0-Linear,
5460 /* data32 |= 0x670; // Big-Endian per 64-bit */
5461 data32
|= endian
; /* Big-Endian per 64-bit */
5462 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A
) {
5463 data32
&= (~0x3); /*[1]:dw_disable [0]:cm_disable*/
5464 if (get_double_write_mode(pbi
) == 0)
5465 data32
|= 0x2; /*disable double write*/
5466 else if (get_double_write_mode(pbi
) & 0x10)
5467 data32
|= 0x1; /*disable cm*/
5468 } else { /* >= G12A dw write control */
5470 data
= READ_VREG(HEVC_DBLK_CFGB
);
5471 data
&= (~0x300); /*[8]:first write enable (compress) [9]:double write enable (uncompress)*/
5472 if (get_double_write_mode(pbi
) == 0)
5473 data
|= (0x1 << 8); /*enable first write*/
5474 else if (get_double_write_mode(pbi
) & 0x10)
5475 data
|= (0x1 << 9); /*double write only*/
5477 data
|= ((0x1 << 8) |(0x1 << 9));
5478 WRITE_VREG(HEVC_DBLK_CFGB
, data
);
5482 if (pbi
->is_used_v4l
) {
5483 if ((v4l2_ctx
->q_data
[AML_Q_DATA_DST
].fmt
->fourcc
== V4L2_PIX_FMT_NV21
) ||
5484 (v4l2_ctx
->q_data
[AML_Q_DATA_DST
].fmt
->fourcc
== V4L2_PIX_FMT_NV21M
))
5485 data32
&= ~(1 << 8); /* NV21 */
5487 data32
|= (1 << 8); /* NV12 */
5491 * [31:24] ar_fifo1_axi_thred
5492 * [23:16] ar_fifo0_axi_thred
5493 * [15:14] axi_linealign, 0-16bytes, 1-32bytes, 2-64bytes
5494 * [13:12] axi_aformat, 0-Linear, 1-32x32, 2-64x32
5495 * [11:08] axi_lendian_C
5496 * [07:04] axi_lendian_Y
5499 * [1] dw_disable:disable double write output
5500 * [0] cm_disable:disable compress output
5502 WRITE_VREG(HEVC_SAO_CTRL1
, data32
);
5504 if (get_double_write_mode(pbi
) & 0x10) {
5505 /* [23:22] dw_v1_ctrl
5510 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5511 /*set them all 0 for H265_NV21 (no down-scale)*/
5512 data32
&= ~(0xff << 16);
5513 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5515 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5516 data32
&= (~(0xff << 16));
5517 if (get_double_write_mode(pbi
) == 2 ||
5518 get_double_write_mode(pbi
) == 3)
5519 data32
|= (0xff<<16);
5520 else if (get_double_write_mode(pbi
) == 4)
5521 data32
|= (0x33<<16);
5522 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5525 data32
= READ_VREG(HEVCD_IPP_AXIIF_CONFIG
);
5527 /* [5:4] -- address_format 00:linear 01:32x32 10:64x32 */
5528 data32
|= (pbi
->mem_map_mode
<<
5531 data32
|= 0xf; /* valid only when double write only */
5532 /*data32 |= 0x8;*/ /* Big-Endian per 64-bit */
5535 if (pbi
->is_used_v4l
) {
5536 if ((v4l2_ctx
->q_data
[AML_Q_DATA_DST
].fmt
->fourcc
== V4L2_PIX_FMT_NV21
) ||
5537 (v4l2_ctx
->q_data
[AML_Q_DATA_DST
].fmt
->fourcc
== V4L2_PIX_FMT_NV21M
))
5538 data32
|= (1 << 12); /* NV21 */
5540 data32
&= ~(1 << 12); /* NV12 */
5544 * [3:0] little_endian
5545 * [5:4] address_format 00:linear 01:32x32 10:64x32
5547 * [9:8] Linear_LineAlignment 00:16byte 01:32byte 10:64byte
5549 * [12] CbCr_byte_swap
5552 WRITE_VREG(HEVCD_IPP_AXIIF_CONFIG
, data32
);
5556 static void vp9_config_work_space_hw(struct VP9Decoder_s
*pbi
, u32 mask
)
5558 struct BuffInfo_s
*buf_spec
= pbi
->work_space_buf
;
5559 unsigned int data32
;
5561 if (debug
&& pbi
->init_flag
== 0)
5562 pr_info("%s %x %x %x %x %x %x %x %x %x %x %x %x\n",
5564 buf_spec
->ipp
.buf_start
,
5565 buf_spec
->start_adr
,
5566 buf_spec
->short_term_rps
.buf_start
,
5567 buf_spec
->vps
.buf_start
,
5568 buf_spec
->sps
.buf_start
,
5569 buf_spec
->pps
.buf_start
,
5570 buf_spec
->sao_up
.buf_start
,
5571 buf_spec
->swap_buf
.buf_start
,
5572 buf_spec
->swap_buf2
.buf_start
,
5573 buf_spec
->scalelut
.buf_start
,
5574 buf_spec
->dblk_para
.buf_start
,
5575 buf_spec
->dblk_data
.buf_start
);
5577 if (mask
& HW_MASK_FRONT
) {
5578 if ((debug
& VP9_DEBUG_SEND_PARAM_WITH_REG
) == 0)
5579 WRITE_VREG(HEVC_RPM_BUFFER
, (u32
)pbi
->rpm_phy_addr
);
5581 WRITE_VREG(HEVC_SHORT_TERM_RPS
,
5582 buf_spec
->short_term_rps
.buf_start
);
5583 /*WRITE_VREG(HEVC_VPS_BUFFER, buf_spec->vps.buf_start);*/
5584 /*WRITE_VREG(HEVC_SPS_BUFFER, buf_spec->sps.buf_start);*/
5585 WRITE_VREG(HEVC_PPS_BUFFER
, buf_spec
->pps
.buf_start
);
5586 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER
,
5587 buf_spec
->swap_buf
.buf_start
);
5588 WRITE_VREG(HEVC_STREAM_SWAP_BUFFER2
,
5589 buf_spec
->swap_buf2
.buf_start
);
5590 WRITE_VREG(LMEM_DUMP_ADR
, (u32
)pbi
->lmem_phy_addr
);
5594 if (mask
& HW_MASK_BACK
) {
5595 #ifdef LOSLESS_COMPRESS_MODE
5596 int losless_comp_header_size
=
5597 compute_losless_comp_header_size(pbi
->init_pic_w
,
5599 int losless_comp_body_size
=
5600 compute_losless_comp_body_size(pbi
->init_pic_w
,
5601 pbi
->init_pic_h
, buf_alloc_depth
== 10);
5603 WRITE_VREG(HEVCD_IPP_LINEBUFF_BASE
,
5604 buf_spec
->ipp
.buf_start
);
5605 WRITE_VREG(HEVC_SAO_UP
, buf_spec
->sao_up
.buf_start
);
5606 WRITE_VREG(HEVC_SCALELUT
, buf_spec
->scalelut
.buf_start
);
5607 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) {
5609 WRITE_VREG(HEVC_DBLK_CFGE
, buf_spec
->dblk_para
.buf_start
);
5610 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
5611 pr_info("Write HEVC_DBLK_CFGE\n");
5614 WRITE_VREG(HEVC_DBLK_CFG4
, buf_spec
->dblk_para
.buf_start
);
5616 WRITE_VREG(HEVC_DBLK_CFG5
, buf_spec
->dblk_data
.buf_start
);
5618 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
5620 * data32 = (READ_VREG(P_HEVC_DBLK_CFG3)>>8) & 0xff; // xio left offset, default is 0x40
5621 * data32 = data32 * 2;
5622 * data32 = (READ_VREG(P_HEVC_DBLK_CFG3)>>16) & 0xff; // adp left offset, default is 0x040
5623 * data32 = data32 * 2;
5625 WRITE_VREG(HEVC_DBLK_CFG3
, 0x808010); // make left storage 2 x 4k]
5627 #ifdef LOSLESS_COMPRESS_MODE
5628 if (pbi
->mmu_enable
) {
5629 /*bit[4] : paged_mem_mode*/
5630 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, (0x1 << 4));
5631 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_SM1
)
5632 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2
, 0);
5634 /*if(cur_pic_config->bit_depth == VPX_BITS_10)
5635 * WRITE_VREG(P_HEVCD_MPP_DECOMP_CTL1, (0<<3));
5637 /*bit[3] smem mdoe*/
5638 /*else WRITE_VREG(P_HEVCD_MPP_DECOMP_CTL1, (1<<3));*/
5639 /*bit[3] smem mdoe*/
5640 WRITE_VREG(HEVCD_MPP_DECOMP_CTL2
,
5641 (losless_comp_body_size
>> 5));
5643 /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL2,
5644 (losless_comp_body_size >> 5));*/
5645 /*WRITE_VREG(HEVCD_MPP_DECOMP_CTL3,
5646 (0xff<<20) | (0xff<<10) | 0xff);*/
5648 WRITE_VREG(HEVC_CM_BODY_LENGTH
, losless_comp_body_size
);
5649 WRITE_VREG(HEVC_CM_HEADER_OFFSET
, losless_comp_body_size
);
5650 WRITE_VREG(HEVC_CM_HEADER_LENGTH
, losless_comp_header_size
);
5651 if (get_double_write_mode(pbi
) & 0x10)
5652 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, 0x1 << 31);
5654 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, 0x1 << 31);
5657 if (pbi
->mmu_enable
) {
5658 WRITE_VREG(HEVC_SAO_MMU_VH0_ADDR
, buf_spec
->mmu_vbh
.buf_start
);
5659 WRITE_VREG(HEVC_SAO_MMU_VH1_ADDR
, buf_spec
->mmu_vbh
.buf_start
5660 + buf_spec
->mmu_vbh
.buf_size
/2);
5661 /*data32 = READ_VREG(P_HEVC_SAO_CTRL9);*/
5663 /*WRITE_VREG(P_HEVC_SAO_CTRL9, data32);*/
5665 /* use HEVC_CM_HEADER_START_ADDR */
5666 data32
= READ_VREG(HEVC_SAO_CTRL5
);
5668 WRITE_VREG(HEVC_SAO_CTRL5
, data32
);
5671 WRITE_VREG(VP9_SEG_MAP_BUFFER
, buf_spec
->seg_map
.buf_start
);
5673 WRITE_VREG(LMEM_DUMP_ADR
, (u32
)pbi
->lmem_phy_addr
);
5675 WRITE_VREG(VP9_PROB_SWAP_BUFFER
, pbi
->prob_buffer_phy_addr
);
5676 WRITE_VREG(VP9_COUNT_SWAP_BUFFER
, pbi
->count_buffer_phy_addr
);
5677 if (pbi
->mmu_enable
) {
5678 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
)
5679 WRITE_VREG(HEVC_ASSIST_MMU_MAP_ADDR
, pbi
->frame_mmu_map_phy_addr
);
5681 WRITE_VREG(VP9_MMU_MAP_BUFFER
, pbi
->frame_mmu_map_phy_addr
);
5687 #ifdef VP9_LPF_LVL_UPDATE
5689 * Defines, declarations, sub-functions for vp9 de-block loop
5690 filter Thr/Lvl table update
5691 * - struct segmentation is for loop filter only (removed something)
5692 * - function "vp9_loop_filter_init" and "vp9_loop_filter_frame_init" will
5693 be instantiated in C_Entry
5694 * - vp9_loop_filter_init run once before decoding start
5695 * - vp9_loop_filter_frame_init run before every frame decoding start
5696 * - set video format to VP9 is in vp9_loop_filter_init
5698 #define MAX_LOOP_FILTER 63
5699 #define MAX_REF_LF_DELTAS 4
5700 #define MAX_MODE_LF_DELTAS 2
5701 /*#define INTRA_FRAME 0*/
5702 /*#define LAST_FRAME 1*/
5703 /*#define MAX_REF_FRAMES 4*/
5704 #define SEGMENT_DELTADATA 0
5705 #define SEGMENT_ABSDATA 1
5706 #define MAX_SEGMENTS 8
5707 /*.#define SEG_TREE_PROBS (MAX_SEGMENTS-1)*/
5708 /*no use for loop filter, if this struct for common use, pls add it back*/
5709 /*#define PREDICTION_PROBS 3*/
5710 /* no use for loop filter, if this struct for common use, pls add it back*/
5712 enum SEG_LVL_FEATURES
{
5713 SEG_LVL_ALT_Q
= 0, /*Use alternate Quantizer ....*/
5714 SEG_LVL_ALT_LF
= 1, /*Use alternate loop filter value...*/
5715 SEG_LVL_REF_FRAME
= 2, /*Optional Segment reference frame*/
5716 SEG_LVL_SKIP
= 3, /*Optional Segment (0,0) + skip mode*/
5717 SEG_LVL_MAX
= 4 /*Number of features supported*/
5720 struct segmentation
{
5723 uint8_t update_data
;
5725 uint8_t temporal_update
;
5727 /*no use for loop filter, if this struct
5728 *for common use, pls add it back
5730 /*vp9_prob tree_probs[SEG_TREE_PROBS]; */
5731 /* no use for loop filter, if this struct
5732 * for common use, pls add it back
5734 /*vp9_prob pred_probs[PREDICTION_PROBS];*/
5736 int16_t feature_data
[MAX_SEGMENTS
][SEG_LVL_MAX
];
5737 unsigned int feature_mask
[MAX_SEGMENTS
];
5740 struct loop_filter_thresh
{
5746 struct loop_filter_info_n
{
5747 struct loop_filter_thresh lfthr
[MAX_LOOP_FILTER
+ 1];
5748 uint8_t lvl
[MAX_SEGMENTS
][MAX_REF_FRAMES
][MAX_MODE_LF_DELTAS
];
5754 int sharpness_level
;
5755 int last_sharpness_level
;
5757 uint8_t mode_ref_delta_enabled
;
5758 uint8_t mode_ref_delta_update
;
5760 /*0 = Intra, Last, GF, ARF*/
5761 signed char ref_deltas
[MAX_REF_LF_DELTAS
];
5762 signed char last_ref_deltas
[MAX_REF_LF_DELTAS
];
5765 signed char mode_deltas
[MAX_MODE_LF_DELTAS
];
5766 signed char last_mode_deltas
[MAX_MODE_LF_DELTAS
];
5769 static int vp9_clamp(int value
, int low
, int high
)
5771 return value
< low
? low
: (value
> high
? high
: value
);
5774 int segfeature_active(struct segmentation
*seg
,
5776 enum SEG_LVL_FEATURES feature_id
) {
5777 return seg
->enabled
&&
5778 (seg
->feature_mask
[segment_id
] & (1 << feature_id
));
5781 int get_segdata(struct segmentation
*seg
, int segment_id
,
5782 enum SEG_LVL_FEATURES feature_id
) {
5783 return seg
->feature_data
[segment_id
][feature_id
];
5786 static void vp9_update_sharpness(struct loop_filter_info_n
*lfi
,
5790 /*For each possible value for the loop filter fill out limits*/
5791 for (lvl
= 0; lvl
<= MAX_LOOP_FILTER
; lvl
++) {
5792 /*Set loop filter parameters that control sharpness.*/
5793 int block_inside_limit
= lvl
>> ((sharpness_lvl
> 0) +
5794 (sharpness_lvl
> 4));
5796 if (sharpness_lvl
> 0) {
5797 if (block_inside_limit
> (9 - sharpness_lvl
))
5798 block_inside_limit
= (9 - sharpness_lvl
);
5801 if (block_inside_limit
< 1)
5802 block_inside_limit
= 1;
5804 lfi
->lfthr
[lvl
].lim
= (uint8_t)block_inside_limit
;
5805 lfi
->lfthr
[lvl
].mblim
= (uint8_t)(2 * (lvl
+ 2) +
5806 block_inside_limit
);
5810 /*instantiate this function once when decode is started*/
5811 void vp9_loop_filter_init(struct VP9Decoder_s
*pbi
)
5813 struct loop_filter_info_n
*lfi
= pbi
->lfi
;
5814 struct loopfilter
*lf
= pbi
->lf
;
5815 struct segmentation
*seg_4lf
= pbi
->seg_4lf
;
5817 unsigned int data32
;
5819 memset(lfi
, 0, sizeof(struct loop_filter_info_n
));
5820 memset(lf
, 0, sizeof(struct loopfilter
));
5821 memset(seg_4lf
, 0, sizeof(struct segmentation
));
5822 lf
->sharpness_level
= 0; /*init to 0 */
5823 /*init limits for given sharpness*/
5824 vp9_update_sharpness(lfi
, lf
->sharpness_level
);
5825 lf
->last_sharpness_level
= lf
->sharpness_level
;
5826 /*init hev threshold const vectors (actually no use)
5827 *for (i = 0; i <= MAX_LOOP_FILTER; i++)
5828 * lfi->lfthr[i].hev_thr = (uint8_t)(i >> 4);
5831 /*Write to register*/
5832 for (i
= 0; i
< 32; i
++) {
5835 thr
= ((lfi
->lfthr
[i
* 2 + 1].lim
& 0x3f)<<8) |
5836 (lfi
->lfthr
[i
* 2 + 1].mblim
& 0xff);
5837 thr
= (thr
<<16) | ((lfi
->lfthr
[i
*2].lim
& 0x3f)<<8) |
5838 (lfi
->lfthr
[i
* 2].mblim
& 0xff);
5839 WRITE_VREG(HEVC_DBLK_CFG9
, thr
);
5842 /*video format is VP9*/
5843 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
5844 data32
= (0x3 << 14) | // (dw fifo thres r and b)
5845 (0x3 << 12) | // (dw fifo thres r or b)
5846 (0x3 << 10) | // (dw fifo thres not r/b)
5847 (0x3 << 8) | // 1st/2nd write both enable
5848 (0x1 << 0); // vp9 video format
5849 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) {
5850 data32
= (0x57 << 8) | /*1st/2nd write both enable*/
5851 (0x1 << 0); /*vp9 video format*/
5853 data32
= 0x40400001;
5855 WRITE_VREG(HEVC_DBLK_CFGB
, data32
);
5856 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
5857 pr_info("[DBLK DEBUG] CFGB : 0x%x\n", data32
);
5859 /* perform this function per frame*/
5860 void vp9_loop_filter_frame_init(struct segmentation
*seg
,
5861 struct loop_filter_info_n
*lfi
, struct loopfilter
*lf
,
5862 int default_filt_lvl
) {
5865 /*n_shift is the multiplier for lf_deltas
5866 *the multiplier is 1 for when filter_lvl is between 0 and 31;
5867 *2 when filter_lvl is between 32 and 63
5869 const int scale
= 1 << (default_filt_lvl
>> 5);
5871 /*update limits if sharpness has changed*/
5872 if (lf
->last_sharpness_level
!= lf
->sharpness_level
) {
5873 vp9_update_sharpness(lfi
, lf
->sharpness_level
);
5874 lf
->last_sharpness_level
= lf
->sharpness_level
;
5876 /*Write to register*/
5877 for (i
= 0; i
< 32; i
++) {
5880 thr
= ((lfi
->lfthr
[i
* 2 + 1].lim
& 0x3f) << 8)
5881 | (lfi
->lfthr
[i
* 2 + 1].mblim
& 0xff);
5882 thr
= (thr
<< 16) | ((lfi
->lfthr
[i
* 2].lim
& 0x3f) << 8)
5883 | (lfi
->lfthr
[i
* 2].mblim
& 0xff);
5884 WRITE_VREG(HEVC_DBLK_CFG9
, thr
);
5888 for (seg_id
= 0; seg_id
< MAX_SEGMENTS
; seg_id
++) {/*MAX_SEGMENTS = 8*/
5889 int lvl_seg
= default_filt_lvl
;
5891 if (segfeature_active(seg
, seg_id
, SEG_LVL_ALT_LF
)) {
5892 const int data
= get_segdata(seg
, seg_id
,
5894 lvl_seg
= vp9_clamp(seg
->abs_delta
== SEGMENT_ABSDATA
?
5895 data
: default_filt_lvl
+ data
,
5896 0, MAX_LOOP_FILTER
);
5898 pr_info("segfeature_active!!!seg_id=%d,lvl_seg=%d\n", seg_id
, lvl_seg
);
5902 if (!lf
->mode_ref_delta_enabled
) {
5903 /*we could get rid of this if we assume that deltas are set to
5904 *zero when not in use; encoder always uses deltas
5906 memset(lfi
->lvl
[seg_id
], lvl_seg
, sizeof(lfi
->lvl
[seg_id
]));
5909 const int intra_lvl
= lvl_seg
+ lf
->ref_deltas
[INTRA_FRAME
]
5912 pr_info("LF_PRINT:vp9_loop_filter_frame_init,seg_id=%d\n", seg_id
);
5913 pr_info("ref_deltas[INTRA_FRAME]=%d\n", lf
->ref_deltas
[INTRA_FRAME
]);
5915 lfi
->lvl
[seg_id
][INTRA_FRAME
][0] =
5916 vp9_clamp(intra_lvl
, 0, MAX_LOOP_FILTER
);
5918 for (ref
= LAST_FRAME
; ref
< MAX_REF_FRAMES
; ++ref
) {
5919 /* LAST_FRAME = 1, MAX_REF_FRAMES = 4*/
5920 for (mode
= 0; mode
< MAX_MODE_LF_DELTAS
; ++mode
) {
5921 /*MAX_MODE_LF_DELTAS = 2*/
5922 const int inter_lvl
=
5923 lvl_seg
+ lf
->ref_deltas
[ref
] * scale
5924 + lf
->mode_deltas
[mode
] * scale
;
5927 lfi
->lvl
[seg_id
][ref
][mode
] =
5928 vp9_clamp(inter_lvl
, 0,
5936 /*print out thr/lvl table per frame*/
5937 for (i
= 0; i
<= MAX_LOOP_FILTER
; i
++) {
5938 pr_info("LF_PRINT:(%d)thr=%d,blim=%d,lim=%d\n",
5939 i
, lfi
->lfthr
[i
].hev_thr
, lfi
->lfthr
[i
].mblim
,
5942 for (seg_id
= 0; seg_id
< MAX_SEGMENTS
; seg_id
++) {
5943 pr_info("LF_PRINT:lvl(seg_id=%d)(mode=0,%d,%d,%d,%d)\n",
5944 seg_id
, lfi
->lvl
[seg_id
][0][0],
5945 lfi
->lvl
[seg_id
][1][0], lfi
->lvl
[seg_id
][2][0],
5946 lfi
->lvl
[seg_id
][3][0]);
5947 pr_info("i(mode=1,%d,%d,%d,%d)\n", lfi
->lvl
[seg_id
][0][1],
5948 lfi
->lvl
[seg_id
][1][1], lfi
->lvl
[seg_id
][2][1],
5949 lfi
->lvl
[seg_id
][3][1]);
5953 /*Write to register */
5954 for (i
= 0; i
< 16; i
++) {
5957 level
= ((lfi
->lvl
[i
>> 1][3][i
& 1] & 0x3f) << 24) |
5958 ((lfi
->lvl
[i
>> 1][2][i
& 1] & 0x3f) << 16) |
5959 ((lfi
->lvl
[i
>> 1][1][i
& 1] & 0x3f) << 8) |
5960 (lfi
->lvl
[i
>> 1][0][i
& 1] & 0x3f);
5961 if (!default_filt_lvl
)
5963 WRITE_VREG(HEVC_DBLK_CFGA
, level
);
5966 /* VP9_LPF_LVL_UPDATE */
5969 static void vp9_init_decoder_hw(struct VP9Decoder_s
*pbi
, u32 mask
)
5971 unsigned int data32
;
5973 const unsigned short parser_cmd
[PARSER_CMD_NUMBER
] = {
5974 0x0401, 0x8401, 0x0800, 0x0402, 0x9002, 0x1423,
5975 0x8CC3, 0x1423, 0x8804, 0x9825, 0x0800, 0x04FE,
5976 0x8406, 0x8411, 0x1800, 0x8408, 0x8409, 0x8C2A,
5977 0x9C2B, 0x1C00, 0x840F, 0x8407, 0x8000, 0x8408,
5978 0x2000, 0xA800, 0x8410, 0x04DE, 0x840C, 0x840D,
5979 0xAC00, 0xA000, 0x08C0, 0x08E0, 0xA40E, 0xFC00,
5983 if (get_cpu_major_id() >= MESON_CPU_MAJOR_ID_G12A
) {
5984 /* Set MCR fetch priorities*/
5985 data32
= 0x1 | (0x1 << 2) | (0x1 <<3) |
5986 (24 << 4) | (32 << 11) | (24 << 18) | (32 << 25);
5987 WRITE_VREG(HEVCD_MPP_DECOMP_AXIURG_CTL
, data32
);
5990 /*if (debug & VP9_DEBUG_BUFMGR_MORE)
5991 pr_info("%s\n", __func__);*/
5992 if (mask
& HW_MASK_FRONT
) {
5993 data32
= READ_VREG(HEVC_PARSER_INT_CONTROL
);
5995 /* set bit 31~29 to 3 if HEVC_STREAM_FIFO_CTL[29] is 1 */
5996 data32
&= ~(7 << 29);
5997 data32
|= (3 << 29);
6000 (1 << 24) |/*stream_buffer_empty_int_amrisc_enable*/
6001 (1 << 22) |/*stream_fifo_empty_int_amrisc_enable*/
6002 (1 << 7) |/*dec_done_int_cpu_enable*/
6003 (1 << 4) |/*startcode_found_int_cpu_enable*/
6004 (0 << 3) |/*startcode_found_int_amrisc_enable*/
6005 (1 << 0) /*parser_int_enable*/
6007 #ifdef SUPPORT_FB_DECODING
6008 #ifndef FB_DECODING_TEST_SCHEDULE
6009 /*fed_fb_slice_done_int_cpu_enable*/
6010 if (pbi
->used_stage_buf_num
> 0)
6011 data32
|= (1 << 10);
6014 WRITE_VREG(HEVC_PARSER_INT_CONTROL
, data32
);
6016 data32
= READ_VREG(HEVC_SHIFT_STATUS
);
6018 (0 << 1) |/*emulation_check_off VP9
6019 do not have emulation*/
6020 (1 << 0)/*startcode_check_on*/
6022 WRITE_VREG(HEVC_SHIFT_STATUS
, data32
);
6023 WRITE_VREG(HEVC_SHIFT_CONTROL
,
6024 (0 << 14) | /*disable_start_code_protect*/
6025 (1 << 10) | /*length_zero_startcode_en for VP9*/
6026 (1 << 9) | /*length_valid_startcode_en for VP9*/
6027 (3 << 6) | /*sft_valid_wr_position*/
6028 (2 << 4) | /*emulate_code_length_sub_1*/
6029 (3 << 1) | /*start_code_length_sub_1
6030 VP9 use 0x00000001 as startcode (4 Bytes)*/
6031 (1 << 0) /*stream_shift_enable*/
6034 WRITE_VREG(HEVC_CABAC_CONTROL
,
6035 (1 << 0)/*cabac_enable*/
6038 WRITE_VREG(HEVC_PARSER_CORE_CONTROL
,
6039 (1 << 0)/* hevc_parser_core_clk_en*/
6043 WRITE_VREG(HEVC_DEC_STATUS_REG
, 0);
6047 if (mask
& HW_MASK_BACK
) {
6048 /*Initial IQIT_SCALELUT memory
6049 -- just to avoid X in simulation*/
6051 WRITE_VREG(HEVC_IQIT_SCALELUT_WR_ADDR
, 0);/*cfg_p_addr*/
6052 for (i
= 0; i
< 1024; i
++)
6053 WRITE_VREG(HEVC_IQIT_SCALELUT_DATA
, 0);
6056 if (mask
& HW_MASK_FRONT
) {
6058 #ifdef ENABLE_SWAP_TEST
6059 WRITE_VREG(HEVC_STREAM_SWAP_TEST
, 100);
6061 WRITE_VREG(HEVC_STREAM_SWAP_TEST
, 0);
6063 #ifdef MULTI_INSTANCE_SUPPORT
6064 if (!pbi
->m_ins_flag
) {
6065 if (pbi
->low_latency_flag
)
6066 decode_mode
= DECODE_MODE_SINGLE_LOW_LATENCY
;
6068 decode_mode
= DECODE_MODE_SINGLE
;
6069 } else if (vdec_frame_based(hw_to_vdec(pbi
)))
6070 decode_mode
= pbi
->no_head
?
6071 DECODE_MODE_MULTI_FRAMEBASE_NOHEAD
:
6072 DECODE_MODE_MULTI_FRAMEBASE
;
6074 decode_mode
= DECODE_MODE_MULTI_STREAMBASE
;
6075 #ifdef SUPPORT_FB_DECODING
6076 #ifndef FB_DECODING_TEST_SCHEDULE
6077 if (pbi
->used_stage_buf_num
> 0)
6078 decode_mode
|= (0x01 << 24);
6081 WRITE_VREG(DECODE_MODE
, decode_mode
);
6082 WRITE_VREG(HEVC_DECODE_SIZE
, 0);
6083 WRITE_VREG(HEVC_DECODE_COUNT
, 0);
6085 WRITE_VREG(DECODE_MODE
, DECODE_MODE_SINGLE
);
6086 WRITE_VREG(HEVC_DECODE_PIC_BEGIN_REG
, 0);
6087 WRITE_VREG(HEVC_DECODE_PIC_NUM_REG
, 0x7fffffff); /*to remove*/
6090 WRITE_VREG(HEVC_PARSER_CMD_WRITE
, (1 << 16) | (0 << 0));
6091 for (i
= 0; i
< PARSER_CMD_NUMBER
; i
++)
6092 WRITE_VREG(HEVC_PARSER_CMD_WRITE
, parser_cmd
[i
]);
6093 WRITE_VREG(HEVC_PARSER_CMD_SKIP_0
, PARSER_CMD_SKIP_CFG_0
);
6094 WRITE_VREG(HEVC_PARSER_CMD_SKIP_1
, PARSER_CMD_SKIP_CFG_1
);
6095 WRITE_VREG(HEVC_PARSER_CMD_SKIP_2
, PARSER_CMD_SKIP_CFG_2
);
6098 WRITE_VREG(HEVC_PARSER_IF_CONTROL
,
6099 /* (1 << 8) |*/ /*sao_sw_pred_enable*/
6100 (1 << 5) | /*parser_sao_if_en*/
6101 (1 << 2) | /*parser_mpred_if_en*/
6102 (1 << 0) /*parser_scaler_if_en*/
6106 if (mask
& HW_MASK_BACK
) {
6107 /*Changed to Start MPRED in microcode*/
6109 pr_info("[test.c] Start MPRED\n");
6110 WRITE_VREG(HEVC_MPRED_INT_STATUS,
6114 WRITE_VREG(HEVCD_IPP_TOP_CNTL
,
6115 (0 << 1) | /*enable ipp*/
6116 (1 << 0) /*software reset ipp and mpp*/
6118 WRITE_VREG(HEVCD_IPP_TOP_CNTL
,
6119 (1 << 1) | /*enable ipp*/
6120 (0 << 0) /*software reset ipp and mpp*/
6122 if (get_double_write_mode(pbi
) & 0x10) {
6123 /*Enable NV21 reference read mode for MC*/
6124 WRITE_VREG(HEVCD_MPP_DECOMP_CTL1
, 0x1 << 31);
6127 /*Initialize mcrcc and decomp perf counters*/
6128 if (mcrcc_cache_alg_flag
&&
6129 pbi
->init_flag
== 0) {
6130 mcrcc_perfcount_reset();
6131 decomp_perfcount_reset();
6138 #ifdef CONFIG_HEVC_CLK_FORCED_ON
6139 static void config_vp9_clk_forced_on(void)
6141 unsigned int rdata32
;
6143 rdata32
= READ_VREG(HEVC_IQIT_CLK_RST_CTRL
);
6144 WRITE_VREG(HEVC_IQIT_CLK_RST_CTRL
, rdata32
| (0x1 << 2));
6147 rdata32
= READ_VREG(HEVC_DBLK_CFG0
);
6148 WRITE_VREG(HEVC_DBLK_CFG0
, rdata32
| (0x1 << 2));
6151 rdata32
= READ_VREG(HEVC_SAO_CTRL1
);
6152 WRITE_VREG(HEVC_SAO_CTRL1
, rdata32
| (0x1 << 2));
6155 rdata32
= READ_VREG(HEVC_MPRED_CTRL1
);
6156 WRITE_VREG(HEVC_MPRED_CTRL1
, rdata32
| (0x1 << 24));
6159 rdata32
= READ_VREG(HEVC_STREAM_CONTROL
);
6160 WRITE_VREG(HEVC_STREAM_CONTROL
, rdata32
| (0x1 << 15));
6161 rdata32
= READ_VREG(HEVC_SHIFT_CONTROL
);
6162 WRITE_VREG(HEVC_SHIFT_CONTROL
, rdata32
| (0x1 << 15));
6163 rdata32
= READ_VREG(HEVC_CABAC_CONTROL
);
6164 WRITE_VREG(HEVC_CABAC_CONTROL
, rdata32
| (0x1 << 13));
6165 rdata32
= READ_VREG(HEVC_PARSER_CORE_CONTROL
);
6166 WRITE_VREG(HEVC_PARSER_CORE_CONTROL
, rdata32
| (0x1 << 15));
6167 rdata32
= READ_VREG(HEVC_PARSER_INT_CONTROL
);
6168 WRITE_VREG(HEVC_PARSER_INT_CONTROL
, rdata32
| (0x1 << 15));
6169 rdata32
= READ_VREG(HEVC_PARSER_IF_CONTROL
);
6170 WRITE_VREG(HEVC_PARSER_IF_CONTROL
,
6171 rdata32
| (0x1 << 6) | (0x1 << 3) | (0x1 << 1));
6174 rdata32
= READ_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG
);
6175 WRITE_VREG(HEVCD_IPP_DYNCLKGATE_CONFIG
, rdata32
| 0xffffffff);
6178 rdata32
= READ_VREG(HEVCD_MCRCC_CTL1
);
6179 WRITE_VREG(HEVCD_MCRCC_CTL1
, rdata32
| (0x1 << 3));
6185 static void dump_hit_rate(struct VP9Decoder_s
*pbi
)
6187 if (debug
& VP9_DEBUG_CACHE_HIT_RATE
) {
6188 mcrcc_get_hitrate(pbi
->m_ins_flag
);
6189 decomp_get_hitrate();
6190 decomp_get_comprate();
6194 static void config_mcrcc_axi_hw(struct VP9Decoder_s
*pbi
)
6196 unsigned int rdata32
;
6197 unsigned short is_inter
;
6198 /*pr_info("Entered config_mcrcc_axi_hw...\n");*/
6199 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0x2);/* reset mcrcc*/
6200 is_inter
= ((pbi
->common
.frame_type
!= KEY_FRAME
) &&
6201 (!pbi
->common
.intra_only
)) ? 1 : 0;
6202 if (!is_inter
) { /* I-PIC*/
6203 /*remove reset -- disables clock*/
6204 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0x0);
6208 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
6209 mcrcc_get_hitrate(pbi
->m_ins_flag
);
6210 decomp_get_hitrate();
6211 decomp_get_comprate();
6214 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
,
6215 (0 << 8) | (1 << 1) | 0);
6216 rdata32
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6217 rdata32
= rdata32
& 0xffff;
6218 rdata32
= rdata32
| (rdata32
<< 16);
6219 WRITE_VREG(HEVCD_MCRCC_CTL2
, rdata32
);
6220 /*Programme canvas1 */
6221 rdata32
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6222 rdata32
= rdata32
& 0xffff;
6223 rdata32
= rdata32
| (rdata32
<< 16);
6224 WRITE_VREG(HEVCD_MCRCC_CTL3
, rdata32
);
6225 /*enable mcrcc progressive-mode*/
6226 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0xff0);
6229 static void config_mcrcc_axi_hw_new(struct VP9Decoder_s
*pbi
)
6231 u32 curr_picnum
= -1;
6232 u32 lastref_picnum
= -1;
6233 u32 goldenref_picnum
= -1;
6234 u32 altref_picnum
= -1;
6236 u32 lastref_delta_picnum
;
6237 u32 goldenref_delta_picnum
;
6238 u32 altref_delta_picnum
;
6248 u16 goldenref_inref
;
6251 u32 refcanvas_array
[3], utmp
;
6252 int deltapicnum_array
[3], tmp
;
6254 struct VP9_Common_s
*cm
= &pbi
->common
;
6255 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
6256 = &cm
->cur_frame
->buf
;
6257 curr_picnum
= cur_pic_config
->decode_idx
;
6258 if (cm
->frame_refs
[0].buf
)
6259 lastref_picnum
= cm
->frame_refs
[0].buf
->decode_idx
;
6260 if (cm
->frame_refs
[1].buf
)
6261 goldenref_picnum
= cm
->frame_refs
[1].buf
->decode_idx
;
6262 if (cm
->frame_refs
[2].buf
)
6263 altref_picnum
= cm
->frame_refs
[2].buf
->decode_idx
;
6265 lastref_delta_picnum
= (lastref_picnum
>= curr_picnum
) ?
6266 (lastref_picnum
- curr_picnum
) : (curr_picnum
- lastref_picnum
);
6267 goldenref_delta_picnum
= (goldenref_picnum
>= curr_picnum
) ?
6268 (goldenref_picnum
- curr_picnum
) :
6269 (curr_picnum
- goldenref_picnum
);
6270 altref_delta_picnum
=
6271 (altref_picnum
>= curr_picnum
) ?
6272 (altref_picnum
- curr_picnum
) : (curr_picnum
- altref_picnum
);
6274 lastref_inref
= (cm
->frame_refs
[0].idx
!= INVALID_IDX
) ? 1 : 0;
6275 goldenref_inref
= (cm
->frame_refs
[1].idx
!= INVALID_IDX
) ? 1 : 0;
6276 altref_inref
= (cm
->frame_refs
[2].idx
!= INVALID_IDX
) ? 1 : 0;
6278 if (debug
& VP9_DEBUG_CACHE
)
6279 pr_info("%s--0--lastref_inref:%d goldenref_inref:%d altref_inref:%d\n",
6280 __func__
, lastref_inref
, goldenref_inref
, altref_inref
);
6282 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0x2); /* reset mcrcc */
6284 is_inter
= ((pbi
->common
.frame_type
!= KEY_FRAME
)
6285 && (!pbi
->common
.intra_only
)) ? 1 : 0;
6287 if (!is_inter
) { /* I-PIC */
6288 /* remove reset -- disables clock */
6289 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0x0);
6293 if (!pbi
->m_ins_flag
)
6296 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
, (0 << 8) | (1<<1) | 0);
6297 lastcanvas
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6298 goldencanvas
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6299 altrefcanvas
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6301 if (debug
& VP9_DEBUG_CACHE
)
6302 pr_info("[test.c] lastref_canv:%x goldenref_canv:%x altref_canv:%x\n",
6303 lastcanvas
, goldencanvas
, altrefcanvas
);
6305 altref_inref
= ((altref_inref
== 1) &&
6306 (altrefcanvas
!= (goldenref_inref
6307 ? goldencanvas
: 0xffffffff)) &&
6308 (altrefcanvas
!= (lastref_inref
?
6309 lastcanvas
: 0xffffffff))) ? 1 : 0;
6310 goldenref_inref
= ((goldenref_inref
== 1) &&
6311 (goldencanvas
!= (lastref_inref
?
6312 lastcanvas
: 0xffffffff))) ? 1 : 0;
6313 if (debug
& VP9_DEBUG_CACHE
)
6314 pr_info("[test.c]--1--lastref_inref:%d goldenref_inref:%d altref_inref:%d\n",
6315 lastref_inref
, goldenref_inref
, altref_inref
);
6317 altref_delta_picnum
= altref_inref
? altref_delta_picnum
: 0x7fffffff;
6318 goldenref_delta_picnum
= goldenref_inref
?
6319 goldenref_delta_picnum
: 0x7fffffff;
6320 lastref_delta_picnum
= lastref_inref
?
6321 lastref_delta_picnum
: 0x7fffffff;
6322 if (debug
& VP9_DEBUG_CACHE
)
6323 pr_info("[test.c]--1--lastref_delta_picnum:%d goldenref_delta_picnum:%d altref_delta_picnum:%d\n",
6324 lastref_delta_picnum
, goldenref_delta_picnum
,
6325 altref_delta_picnum
);
6326 /*ARRAY SORT HERE DELTA/CANVAS ARRAY SORT -- use DELTA*/
6328 refcanvas_array
[0] = lastcanvas
;
6329 refcanvas_array
[1] = goldencanvas
;
6330 refcanvas_array
[2] = altrefcanvas
;
6332 deltapicnum_array
[0] = lastref_delta_picnum
;
6333 deltapicnum_array
[1] = goldenref_delta_picnum
;
6334 deltapicnum_array
[2] = altref_delta_picnum
;
6336 /* sort0 : 2-to-1 */
6337 if (deltapicnum_array
[2] < deltapicnum_array
[1]) {
6338 utmp
= refcanvas_array
[2];
6339 refcanvas_array
[2] = refcanvas_array
[1];
6340 refcanvas_array
[1] = utmp
;
6341 tmp
= deltapicnum_array
[2];
6342 deltapicnum_array
[2] = deltapicnum_array
[1];
6343 deltapicnum_array
[1] = tmp
;
6345 /* sort1 : 1-to-0 */
6346 if (deltapicnum_array
[1] < deltapicnum_array
[0]) {
6347 utmp
= refcanvas_array
[1];
6348 refcanvas_array
[1] = refcanvas_array
[0];
6349 refcanvas_array
[0] = utmp
;
6350 tmp
= deltapicnum_array
[1];
6351 deltapicnum_array
[1] = deltapicnum_array
[0];
6352 deltapicnum_array
[0] = tmp
;
6354 /* sort2 : 2-to-1 */
6355 if (deltapicnum_array
[2] < deltapicnum_array
[1]) {
6356 utmp
= refcanvas_array
[2]; refcanvas_array
[2] =
6357 refcanvas_array
[1]; refcanvas_array
[1] = utmp
;
6358 tmp
= deltapicnum_array
[2]; deltapicnum_array
[2] =
6359 deltapicnum_array
[1]; deltapicnum_array
[1] = tmp
;
6361 if (mcrcc_cache_alg_flag
==
6362 THODIYIL_MCRCC_CANVAS_ALGX
) { /*09/15/2017*/
6363 /* lowest delta_picnum */
6364 rdata32
= refcanvas_array
[0];
6365 rdata32
= rdata32
& 0xffff;
6366 rdata32
= rdata32
| (rdata32
<< 16);
6367 WRITE_VREG(HEVCD_MCRCC_CTL2
, rdata32
);
6369 /* 2nd-lowest delta_picnum */
6370 rdata32
= refcanvas_array
[1];
6371 rdata32
= rdata32
& 0xffff;
6372 rdata32
= rdata32
| (rdata32
<< 16);
6373 WRITE_VREG(HEVCD_MCRCC_CTL3
, rdata32
);
6375 /* previous version -- LAST/GOLDEN ALWAYS -- before 09/13/2017*/
6376 WRITE_VREG(HEVCD_MPP_ANC_CANVAS_ACCCONFIG_ADDR
,
6377 (0 << 8) | (1<<1) | 0);
6378 rdata32
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6379 rdata32
= rdata32
& 0xffff;
6380 rdata32
= rdata32
| (rdata32
<< 16);
6381 WRITE_VREG(HEVCD_MCRCC_CTL2
, rdata32
);
6383 /* Programme canvas1 */
6384 rdata32
= READ_VREG(HEVCD_MPP_ANC_CANVAS_DATA_ADDR
);
6385 rdata32
= rdata32
& 0xffff;
6386 rdata32
= rdata32
| (rdata32
<< 16);
6387 WRITE_VREG(HEVCD_MCRCC_CTL3
, rdata32
);
6390 WRITE_VREG(HEVCD_MCRCC_CTL1
, 0xff0); /* enable mcrcc progressive-mode */
6397 static void free_lf_buf(struct VP9Decoder_s
*pbi
)
6404 vfree(pbi
->seg_4lf
);
6407 pbi
->seg_4lf
= NULL
;
6410 static int alloc_lf_buf(struct VP9Decoder_s
*pbi
)
6412 pbi
->lfi
= vmalloc(sizeof(struct loop_filter_info_n
));
6413 pbi
->lf
= vmalloc(sizeof(struct loopfilter
));
6414 pbi
->seg_4lf
= vmalloc(sizeof(struct segmentation
));
6415 if (pbi
->lfi
== NULL
|| pbi
->lf
== NULL
|| pbi
->seg_4lf
== NULL
) {
6417 pr_err("[test.c] vp9_loop_filter init malloc error!!!\n");
6423 static void vp9_local_uninit(struct VP9Decoder_s
*pbi
)
6425 pbi
->rpm_ptr
= NULL
;
6426 pbi
->lmem_ptr
= NULL
;
6427 if (pbi
->rpm_addr
) {
6428 dma_free_coherent(amports_get_dma_device(),
6432 pbi
->rpm_addr
= NULL
;
6434 if (pbi
->lmem_addr
) {
6435 if (pbi
->lmem_phy_addr
)
6436 dma_free_coherent(amports_get_dma_device(),
6437 LMEM_BUF_SIZE
, pbi
->lmem_addr
,
6438 pbi
->lmem_phy_addr
);
6439 pbi
->lmem_addr
= NULL
;
6441 if (pbi
->prob_buffer_addr
) {
6442 if (pbi
->prob_buffer_phy_addr
)
6443 dma_free_coherent(amports_get_dma_device(),
6444 PROB_BUF_SIZE
, pbi
->prob_buffer_addr
,
6445 pbi
->prob_buffer_phy_addr
);
6447 pbi
->prob_buffer_addr
= NULL
;
6449 if (pbi
->count_buffer_addr
) {
6450 if (pbi
->count_buffer_phy_addr
)
6451 dma_free_coherent(amports_get_dma_device(),
6452 COUNT_BUF_SIZE
, pbi
->count_buffer_addr
,
6453 pbi
->count_buffer_phy_addr
);
6455 pbi
->count_buffer_addr
= NULL
;
6457 if (pbi
->mmu_enable
) {
6458 u32 mmu_map_size
= vvp9_frame_mmu_map_size(pbi
);
6459 if (pbi
->frame_mmu_map_addr
) {
6460 if (pbi
->frame_mmu_map_phy_addr
)
6461 dma_free_coherent(amports_get_dma_device(),
6463 pbi
->frame_mmu_map_addr
,
6464 pbi
->frame_mmu_map_phy_addr
);
6465 pbi
->frame_mmu_map_addr
= NULL
;
6468 #ifdef SUPPORT_FB_DECODING
6469 if (pbi
->stage_mmu_map_addr
) {
6470 if (pbi
->stage_mmu_map_phy_addr
)
6471 dma_free_coherent(amports_get_dma_device(),
6472 STAGE_MMU_MAP_SIZE
* STAGE_MAX_BUFFERS
,
6473 pbi
->stage_mmu_map_addr
,
6474 pbi
->stage_mmu_map_phy_addr
);
6475 pbi
->stage_mmu_map_addr
= NULL
;
6478 uninit_stage_buf(pbi
);
6481 #ifdef VP9_LPF_LVL_UPDATE
6489 static int vp9_local_init(struct VP9Decoder_s
*pbi
)
6492 /*int losless_comp_header_size, losless_comp_body_size;*/
6494 struct BuffInfo_s
*cur_buf_info
= NULL
;
6496 memset(&pbi
->param
, 0, sizeof(union param_u
));
6497 memset(&pbi
->common
, 0, sizeof(struct VP9_Common_s
));
6498 #ifdef MULTI_INSTANCE_SUPPORT
6499 cur_buf_info
= &pbi
->work_space_buf_store
;
6501 if (vdec_is_support_4k()) {
6502 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
6503 memcpy(cur_buf_info
, &amvvp9_workbuff_spec
[2], /* 8k */
6504 sizeof(struct BuffInfo_s
));
6506 memcpy(cur_buf_info
, &amvvp9_workbuff_spec
[1], /* 4k */
6507 sizeof(struct BuffInfo_s
));
6509 memcpy(cur_buf_info
, &amvvp9_workbuff_spec
[0],/* 1080p */
6510 sizeof(struct BuffInfo_s
));
6512 cur_buf_info
->start_adr
= pbi
->buf_start
;
6513 if (!pbi
->mmu_enable
)
6514 pbi
->mc_buf_spec
.buf_end
= pbi
->buf_start
+ pbi
->buf_size
;
6517 /*! MULTI_INSTANCE_SUPPORT*/
6518 if (vdec_is_support_4k()) {
6519 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
)
6520 cur_buf_info
= &amvvp9_workbuff_spec
[2];/* 8k work space */
6522 cur_buf_info
= &amvvp9_workbuff_spec
[1];/* 4k2k work space */
6524 cur_buf_info
= &amvvp9_workbuff_spec
[0];/* 1080p work space */
6528 init_buff_spec(pbi
, cur_buf_info
);
6529 vp9_bufmgr_init(pbi
, cur_buf_info
, NULL
);
6531 if (!vdec_is_support_4k()
6532 && (buf_alloc_width
> 1920 && buf_alloc_height
> 1088)) {
6533 buf_alloc_width
= 1920;
6534 buf_alloc_height
= 1088;
6535 if (pbi
->max_pic_w
> 1920 && pbi
->max_pic_h
> 1088) {
6536 pbi
->max_pic_w
= 1920;
6537 pbi
->max_pic_h
= 1088;
6539 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
6540 buf_alloc_width
= 8192;
6541 buf_alloc_height
= 4608;
6543 pbi
->init_pic_w
= pbi
->max_pic_w
? pbi
->max_pic_w
:
6544 (buf_alloc_width
? buf_alloc_width
:
6545 (pbi
->vvp9_amstream_dec_info
.width
?
6546 pbi
->vvp9_amstream_dec_info
.width
:
6547 pbi
->work_space_buf
->max_width
));
6548 pbi
->init_pic_h
= pbi
->max_pic_h
? pbi
->max_pic_h
:
6549 (buf_alloc_height
? buf_alloc_height
:
6550 (pbi
->vvp9_amstream_dec_info
.height
?
6551 pbi
->vvp9_amstream_dec_info
.height
:
6552 pbi
->work_space_buf
->max_height
));
6554 /* video is not support unaligned with 64 in tl1
6555 ** vdec canvas mode will be linear when dump yuv is set
6557 if ((get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) &&
6558 (pbi
->double_write_mode
!= 0) &&
6559 (((pbi
->max_pic_w
% 64) != 0) ||
6560 (pbi
->vvp9_amstream_dec_info
.width
% 64) != 0)) {
6561 if (hw_to_vdec(pbi
)->canvas_mode
!=
6562 CANVAS_BLKMODE_LINEAR
)
6563 pbi
->mem_map_mode
= 2;
6565 pbi
->mem_map_mode
= 0;
6566 pr_info("vdec blkmod linear, force mem_map_mode 0\n");
6570 #ifndef MV_USE_FIXED_BUF
6571 if (init_mv_buf_list(pbi
) < 0) {
6572 pr_err("%s: init_mv_buf_list fail\n", __func__
);
6576 if (pbi
->save_buffer_mode
)
6577 pbi
->used_buf_num
= MAX_BUF_NUM_SAVE_BUF
;
6579 if (pbi
->is_used_v4l
)
6580 pbi
->used_buf_num
= 5 + pbi
->dynamic_buf_num_margin
;
6582 pbi
->used_buf_num
= max_buf_num
;
6585 if (pbi
->used_buf_num
> MAX_BUF_NUM
)
6586 pbi
->used_buf_num
= MAX_BUF_NUM
;
6587 if (pbi
->used_buf_num
> FRAME_BUFFERS
)
6588 pbi
->used_buf_num
= FRAME_BUFFERS
;
6590 pbi
->pts_unstable
= ((unsigned long)(pbi
->vvp9_amstream_dec_info
.param
)
6593 if ((debug
& VP9_DEBUG_SEND_PARAM_WITH_REG
) == 0) {
6594 pbi
->rpm_addr
= dma_alloc_coherent(amports_get_dma_device(),
6596 &pbi
->rpm_phy_addr
, GFP_KERNEL
);
6597 if (pbi
->rpm_addr
== NULL
) {
6598 pr_err("%s: failed to alloc rpm buffer\n", __func__
);
6602 pbi
->rpm_ptr
= pbi
->rpm_addr
;
6605 pbi
->lmem_addr
= dma_alloc_coherent(amports_get_dma_device(),
6607 &pbi
->lmem_phy_addr
, GFP_KERNEL
);
6608 if (pbi
->lmem_addr
== NULL
) {
6609 pr_err("%s: failed to alloc lmem buffer\n", __func__
);
6612 pbi
->lmem_ptr
= pbi
->lmem_addr
;
6614 pbi
->prob_buffer_addr
= dma_alloc_coherent(amports_get_dma_device(),
6616 &pbi
->prob_buffer_phy_addr
, GFP_KERNEL
);
6617 if (pbi
->prob_buffer_addr
== NULL
) {
6618 pr_err("%s: failed to alloc prob_buffer\n", __func__
);
6621 memset(pbi
->prob_buffer_addr
, 0, PROB_BUF_SIZE
);
6622 pbi
->count_buffer_addr
= dma_alloc_coherent(amports_get_dma_device(),
6624 &pbi
->count_buffer_phy_addr
, GFP_KERNEL
);
6625 if (pbi
->count_buffer_addr
== NULL
) {
6626 pr_err("%s: failed to alloc count_buffer\n", __func__
);
6629 memset(pbi
->count_buffer_addr
, 0, COUNT_BUF_SIZE
);
6631 if (pbi
->mmu_enable
) {
6632 u32 mmu_map_size
= vvp9_frame_mmu_map_size(pbi
);
6633 pbi
->frame_mmu_map_addr
=
6634 dma_alloc_coherent(amports_get_dma_device(),
6636 &pbi
->frame_mmu_map_phy_addr
, GFP_KERNEL
);
6637 if (pbi
->frame_mmu_map_addr
== NULL
) {
6638 pr_err("%s: failed to alloc count_buffer\n", __func__
);
6641 memset(pbi
->frame_mmu_map_addr
, 0, COUNT_BUF_SIZE
);
6643 #ifdef SUPPORT_FB_DECODING
6644 if (pbi
->m_ins_flag
&& stage_buf_num
> 0) {
6645 pbi
->stage_mmu_map_addr
=
6646 dma_alloc_coherent(amports_get_dma_device(),
6647 STAGE_MMU_MAP_SIZE
* STAGE_MAX_BUFFERS
,
6648 &pbi
->stage_mmu_map_phy_addr
, GFP_KERNEL
);
6649 if (pbi
->stage_mmu_map_addr
== NULL
) {
6650 pr_err("%s: failed to alloc count_buffer\n", __func__
);
6653 memset(pbi
->stage_mmu_map_addr
,
6654 0, STAGE_MMU_MAP_SIZE
* STAGE_MAX_BUFFERS
);
6656 init_stage_buf(pbi
);
6664 /********************************************
6666 ********************************************/
6667 #define CMD_FINISHED 0
6668 #define CMD_ALLOC_VIEW 1
6669 #define CMD_FRAME_DISPLAY 3
6670 #define CMD_DEBUG 10
6673 #define DECODE_BUFFER_NUM_MAX 32
6674 #define DISPLAY_BUFFER_NUM 6
6676 #define video_domain_addr(adr) (adr&0x7fffffff)
6677 #define DECODER_WORK_SPACE_SIZE 0x800000
6679 #define spec2canvas(x) \
6680 (((x)->uv_canvas_index << 16) | \
6681 ((x)->uv_canvas_index << 8) | \
6682 ((x)->y_canvas_index << 0))
6685 static void set_canvas(struct VP9Decoder_s
*pbi
,
6686 struct PIC_BUFFER_CONFIG_s
*pic_config
)
6688 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
6689 int canvas_w
= ALIGN(pic_config
->y_crop_width
, 64)/4;
6690 int canvas_h
= ALIGN(pic_config
->y_crop_height
, 32)/4;
6691 int blkmode
= pbi
->mem_map_mode
;
6692 /*CANVAS_BLKMODE_64X32*/
6693 if (pic_config
->double_write_mode
) {
6694 canvas_w
= pic_config
->y_crop_width
/
6695 get_double_write_ratio(pbi
,
6696 pic_config
->double_write_mode
);
6697 canvas_h
= pic_config
->y_crop_height
/
6698 get_double_write_ratio(pbi
,
6699 pic_config
->double_write_mode
);
6701 if (pbi
->mem_map_mode
== 0)
6702 canvas_w
= ALIGN(canvas_w
, 32);
6704 canvas_w
= ALIGN(canvas_w
, 64);
6705 canvas_h
= ALIGN(canvas_h
, 32);
6707 if (vdec
->parallel_dec
== 1) {
6708 if (pic_config
->y_canvas_index
== -1)
6709 pic_config
->y_canvas_index
=
6710 vdec
->get_canvas_ex(CORE_MASK_HEVC
, vdec
->id
);
6711 if (pic_config
->uv_canvas_index
== -1)
6712 pic_config
->uv_canvas_index
=
6713 vdec
->get_canvas_ex(CORE_MASK_HEVC
, vdec
->id
);
6715 pic_config
->y_canvas_index
= 128 + pic_config
->index
* 2;
6716 pic_config
->uv_canvas_index
= 128 + pic_config
->index
* 2 + 1;
6719 canvas_config_ex(pic_config
->y_canvas_index
,
6720 pic_config
->dw_y_adr
, canvas_w
, canvas_h
,
6721 CANVAS_ADDR_NOWRAP
, blkmode
, pbi
->is_used_v4l
? 0 : 7);
6722 canvas_config_ex(pic_config
->uv_canvas_index
,
6723 pic_config
->dw_u_v_adr
, canvas_w
, canvas_h
,
6724 CANVAS_ADDR_NOWRAP
, blkmode
, pbi
->is_used_v4l
? 0 : 7);
6726 #ifdef MULTI_INSTANCE_SUPPORT
6727 pic_config
->canvas_config
[0].phy_addr
=
6728 pic_config
->dw_y_adr
;
6729 pic_config
->canvas_config
[0].width
=
6731 pic_config
->canvas_config
[0].height
=
6733 pic_config
->canvas_config
[0].block_mode
=
6735 pic_config
->canvas_config
[0].endian
= pbi
->is_used_v4l
? 0 : 7;
6737 pic_config
->canvas_config
[1].phy_addr
=
6738 pic_config
->dw_u_v_adr
;
6739 pic_config
->canvas_config
[1].width
=
6741 pic_config
->canvas_config
[1].height
=
6743 pic_config
->canvas_config
[1].block_mode
=
6745 pic_config
->canvas_config
[1].endian
= pbi
->is_used_v4l
? 0 : 7;
6751 static void set_frame_info(struct VP9Decoder_s
*pbi
, struct vframe_s
*vf
)
6754 vf
->duration
= pbi
->frame_dur
;
6755 vf
->duration_pulldown
= 0;
6757 vf
->prop
.master_display_colour
= pbi
->vf_dp
;
6758 vf
->signal_type
= pbi
->video_signal_type
;
6759 if (vf
->compWidth
&& vf
->compHeight
)
6760 pbi
->frame_ar
= vf
->compHeight
* 0x100 / vf
->compWidth
;
6761 ar
= min_t(u32
, pbi
->frame_ar
, DISP_RATIO_ASPECT_RATIO_MAX
);
6762 vf
->ratio_control
= (ar
<< DISP_RATIO_ASPECT_RATIO_BIT
);
6764 if (pbi
->is_used_v4l
&& pbi
->vf_dp
.present_flag
) {
6765 struct aml_vdec_hdr_infos hdr
;
6766 struct aml_vcodec_ctx
*ctx
=
6767 (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
6769 memset(&hdr
, 0, sizeof(hdr
));
6770 hdr
.signal_type
= vf
->signal_type
;
6771 hdr
.color_parms
= pbi
->vf_dp
;
6772 vdec_v4l_set_hdr_infos(ctx
, &hdr
);
6776 static int vvp9_vf_states(struct vframe_states
*states
, void *op_arg
)
6778 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)op_arg
;
6780 states
->vf_pool_size
= VF_POOL_SIZE
;
6781 states
->buf_free_num
= kfifo_len(&pbi
->newframe_q
);
6782 states
->buf_avail_num
= kfifo_len(&pbi
->display_q
);
6785 states
->buf_avail_num
= 0;
6789 static struct vframe_s
*vvp9_vf_peek(void *op_arg
)
6791 struct vframe_s
*vf
[2] = {0, 0};
6792 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)op_arg
;
6797 if (kfifo_out_peek(&pbi
->display_q
, (void *)&vf
, 2)) {
6799 vf
[0]->next_vf_pts_valid
= true;
6800 vf
[0]->next_vf_pts
= vf
[1]->pts
;
6802 vf
[0]->next_vf_pts_valid
= false;
6809 static struct vframe_s
*vvp9_vf_get(void *op_arg
)
6811 struct vframe_s
*vf
;
6812 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)op_arg
;
6819 if (kfifo_get(&pbi
->display_q
, &vf
)) {
6820 struct vframe_s
*next_vf
;
6821 uint8_t index
= vf
->index
& 0xff;
6822 if (index
< pbi
->used_buf_num
||
6823 (vf
->type
& VIDTYPE_V4L_EOS
)) {
6824 vf
->index_disp
= pbi
->vf_get_count
;
6825 pbi
->vf_get_count
++;
6826 if (debug
& VP9_DEBUG_BUFMGR
)
6827 pr_info("%s type 0x%x w/h %d/%d, pts %d, %lld\n",
6829 vf
->width
, vf
->height
,
6833 if (kfifo_peek(&pbi
->display_q
, &next_vf
)) {
6834 vf
->next_vf_pts_valid
= true;
6835 vf
->next_vf_pts
= next_vf
->pts
;
6837 vf
->next_vf_pts_valid
= false;
6845 static void vvp9_vf_put(struct vframe_s
*vf
, void *op_arg
)
6847 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)op_arg
;
6848 uint8_t index
= vf
->index
& 0xff;
6850 if (vf
== (&pbi
->vframe_dummy
))
6853 kfifo_put(&pbi
->newframe_q
, (const struct vframe_s
*)vf
);
6854 pbi
->vf_put_count
++;
6855 if (index
< pbi
->used_buf_num
) {
6856 struct VP9_Common_s
*cm
= &pbi
->common
;
6857 struct BufferPool_s
*pool
= cm
->buffer_pool
;
6858 unsigned long flags
;
6860 lock_buffer_pool(pool
, flags
);
6861 if (pool
->frame_bufs
[index
].buf
.vf_ref
> 0)
6862 pool
->frame_bufs
[index
].buf
.vf_ref
--;
6864 if (pbi
->is_used_v4l
)
6865 pool
->frame_bufs
[index
].buf
.vframe_bound
= true;
6868 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG
,
6870 pbi
->last_put_idx
= index
;
6871 pbi
->new_frame_displayed
++;
6872 unlock_buffer_pool(pool
, flags
);
6873 #ifdef SUPPORT_FB_DECODING
6874 if (pbi
->used_stage_buf_num
> 0 &&
6875 pbi
->back_not_run_ready
)
6876 trigger_schedule(pbi
);
6882 static int vvp9_event_cb(int type
, void *data
, void *private_data
)
6884 if (type
& VFRAME_EVENT_RECEIVER_RESET
) {
6886 unsigned long flags
;
6889 #ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
6890 vf_light_unreg_provider(&vvp9_vf_prov
);
6892 spin_lock_irqsave(&pbi
->lock
, flags
);
6895 spin_unlock_irqrestore(&pbi
->lock
, flags
);
6896 #ifndef CONFIG_AMLOGIC_POST_PROCESS_MANAGER
6897 vf_reg_provider(&vvp9_vf_prov
);
6906 void inc_vf_ref(struct VP9Decoder_s
*pbi
, int index
)
6908 struct VP9_Common_s
*cm
= &pbi
->common
;
6910 cm
->buffer_pool
->frame_bufs
[index
].buf
.vf_ref
++;
6912 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
6913 pr_info("%s index = %d new vf_ref = %d\r\n",
6915 cm
->buffer_pool
->frame_bufs
[index
].buf
.vf_ref
);
6918 static int frame_duration_adapt(struct VP9Decoder_s
*pbi
, struct vframe_s
*vf
, u32 valid
)
6920 u32 old_duration
, pts_duration
= 0;
6923 if (pbi
->get_frame_dur
== true)
6926 pbi
->frame_cnt_window
++;
6927 if (!(pbi
->vp9_first_pts_ready
== 1)) {
6930 pbi
->frame_cnt_window
= 0;
6931 pbi
->duration_from_pts_done
= 0;
6932 pbi
->vp9_first_pts_ready
= 1;
6937 if (pts
< pbi
->pts1
) {
6938 if (pbi
->frame_cnt_window
> FRAME_CNT_WINDOW_SIZE
) {
6940 pbi
->frame_cnt_window
= 0;
6944 if (valid
&& (pbi
->frame_cnt_window
> FRAME_CNT_WINDOW_SIZE
) &&
6945 (pts
> pbi
->pts1
) && (pbi
->duration_from_pts_done
== 0)) {
6946 old_duration
= pbi
->frame_dur
;
6948 pts_duration
= (((pbi
->pts2
- pbi
->pts1
) * 16) /
6949 (pbi
->frame_cnt_window
* 15));
6951 if (close_to(pts_duration
, old_duration
, 2000)) {
6952 pbi
->frame_dur
= pts_duration
;
6953 if ((debug
& VP9_DEBUG_OUT_PTS
) != 0)
6954 pr_info("use calc duration %d\n", pts_duration
);
6957 if (pbi
->duration_from_pts_done
== 0) {
6958 if (close_to(pts_duration
, old_duration
, RATE_CORRECTION_THRESHOLD
)) {
6959 pbi
->duration_from_pts_done
= 1;
6961 if (!close_to(pts_duration
,
6962 old_duration
, 1000) &&
6963 !close_to(pts_duration
,
6964 pbi
->frame_dur
, 1000) &&
6965 close_to(pts_duration
,
6966 pbi
->last_duration
, 200)) {
6970 pbi
->frame_dur
= pts_duration
;
6972 pbi
->pts1
= pbi
->pts2
;
6973 pbi
->frame_cnt_window
= 0;
6974 pbi
->duration_from_pts_done
= 0;
6977 pbi
->last_duration
= pts_duration
;
6983 static void update_vf_memhandle(struct VP9Decoder_s
*pbi
,
6984 struct vframe_s
*vf
, struct PIC_BUFFER_CONFIG_s
*pic
)
6986 if (pic
->index
< 0) {
6987 vf
->mem_handle
= NULL
;
6988 vf
->mem_head_handle
= NULL
;
6989 } else if (vf
->type
& VIDTYPE_SCATTER
) {
6991 decoder_mmu_box_get_mem_handle(
6992 pbi
->mmu_box
, pic
->index
);
6993 vf
->mem_head_handle
=
6994 decoder_bmmu_box_get_mem_handle(
6996 HEADER_BUFFER_IDX(pic
->BUF_index
));
6999 decoder_bmmu_box_get_mem_handle(
7000 pbi
->bmmu_box
, VF_BUFFER_IDX(pic
->BUF_index
));
7001 vf
->mem_head_handle
= NULL
;
7002 /*vf->mem_head_handle =
7003 *decoder_bmmu_box_get_mem_handle(
7004 *hevc->bmmu_box, VF_BUFFER_IDX(BUF_index));
7009 static int prepare_display_buf(struct VP9Decoder_s
*pbi
,
7010 struct PIC_BUFFER_CONFIG_s
*pic_config
)
7012 struct vframe_s
*vf
= NULL
;
7013 int stream_offset
= pic_config
->stream_offset
;
7014 unsigned short slice_type
= pic_config
->slice_type
;
7015 u32 pts_valid
= 0, pts_us64_valid
= 0;
7020 if (debug
& VP9_DEBUG_BUFMGR
)
7021 pr_info("%s index = %d\r\n", __func__
, pic_config
->index
);
7022 if (kfifo_get(&pbi
->newframe_q
, &vf
) == 0) {
7023 pr_info("fatal error, no available buffer slot.");
7027 if (pic_config
->double_write_mode
)
7028 set_canvas(pbi
, pic_config
);
7030 display_frame_count
[pbi
->index
]++;
7032 if (pbi
->is_used_v4l
) {
7034 = pbi
->m_BUF
[pic_config
->BUF_index
].v4l_ref_buf_addr
;
7035 if (pbi
->mmu_enable
) {
7036 vf
->mm_box
.bmmu_box
= pbi
->bmmu_box
;
7037 vf
->mm_box
.bmmu_idx
= HEADER_BUFFER_IDX(pic_config
->BUF_index
);
7038 vf
->mm_box
.mmu_box
= pbi
->mmu_box
;
7039 vf
->mm_box
.mmu_idx
= pic_config
->index
;
7043 #ifdef MULTI_INSTANCE_SUPPORT
7044 if (vdec_frame_based(hw_to_vdec(pbi
))) {
7045 vf
->pts
= pic_config
->pts
;
7046 vf
->pts_us64
= pic_config
->pts64
;
7047 vf
->timestamp
= pic_config
->timestamp
;
7048 if (vf
->pts
!= 0 || vf
->pts_us64
!= 0) {
7057 /* if (pts_lookup_offset(PTS_TYPE_VIDEO,
7058 * stream_offset, &vf->pts, 0) != 0) {
7060 if (pts_lookup_offset_us64
7061 (PTS_TYPE_VIDEO
, stream_offset
, &vf
->pts
,
7063 &vf
->pts_us64
) != 0) {
7079 fill_frame_info(pbi
, pic_config
, frame_size
, vf
->pts
);
7082 pts_us64_save
= vf
->pts_us64
;
7083 if (pbi
->pts_unstable
) {
7084 frame_duration_adapt(pbi
, vf
, pts_valid
);
7085 if (pbi
->duration_from_pts_done
) {
7086 pbi
->pts_mode
= PTS_NONE_REF_USE_DURATION
;
7088 if (pts_valid
|| pts_us64_valid
)
7089 pbi
->pts_mode
= PTS_NORMAL
;
7093 if ((pbi
->pts_mode
== PTS_NORMAL
) && (vf
->pts
!= 0)
7094 && pbi
->get_frame_dur
) {
7095 int pts_diff
= (int)vf
->pts
- pbi
->last_lookup_pts
;
7098 pbi
->pts_mode_switching_count
++;
7099 pbi
->pts_mode_recovery_count
= 0;
7101 if (pbi
->pts_mode_switching_count
>=
7102 PTS_MODE_SWITCHING_THRESHOLD
) {
7104 PTS_NONE_REF_USE_DURATION
;
7106 ("HEVC: switch to n_d mode.\n");
7110 int p
= PTS_MODE_SWITCHING_RECOVERY_THREASHOLD
;
7112 pbi
->pts_mode_recovery_count
++;
7113 if (pbi
->pts_mode_recovery_count
> p
) {
7114 pbi
->pts_mode_switching_count
= 0;
7115 pbi
->pts_mode_recovery_count
= 0;
7121 pbi
->last_lookup_pts
= vf
->pts
;
7123 if ((pbi
->pts_mode
== PTS_NONE_REF_USE_DURATION
)
7124 && (slice_type
!= KEY_FRAME
))
7125 vf
->pts
= pbi
->last_pts
+ DUR2PTS(pbi
->frame_dur
);
7126 pbi
->last_pts
= vf
->pts
;
7128 if (vf
->pts_us64
!= 0)
7129 pbi
->last_lookup_pts_us64
= vf
->pts_us64
;
7131 if ((pbi
->pts_mode
== PTS_NONE_REF_USE_DURATION
)
7132 && (slice_type
!= KEY_FRAME
)) {
7134 pbi
->last_pts_us64
+
7135 (DUR2PTS(pbi
->frame_dur
) * 100 / 9);
7137 pbi
->last_pts_us64
= vf
->pts_us64
;
7138 if ((debug
& VP9_DEBUG_OUT_PTS
) != 0) {
7140 ("VP9 dec out pts: pts_mode=%d,dur=%d,pts(%d,%lld)(%d,%lld)\n",
7141 pbi
->pts_mode
, pbi
->frame_dur
, vf
->pts
,
7142 vf
->pts_us64
, pts_save
, pts_us64_save
);
7145 if (pbi
->pts_mode
== PTS_NONE_REF_USE_DURATION
) {
7146 vf
->disp_pts
= vf
->pts
;
7147 vf
->disp_pts_us64
= vf
->pts_us64
;
7149 vf
->pts_us64
= pts_us64_save
;
7152 vf
->disp_pts_us64
= 0;
7155 vf
->index
= 0xff00 | pic_config
->index
;
7157 if (pic_config
->double_write_mode
& 0x10) {
7158 /* double write only */
7159 vf
->compBodyAddr
= 0;
7160 vf
->compHeadAddr
= 0;
7162 if (pbi
->mmu_enable
) {
7163 vf
->compBodyAddr
= 0;
7164 vf
->compHeadAddr
= pic_config
->header_adr
;
7166 /*vf->compBodyAddr = pic_config->mc_y_adr;
7167 *vf->compHeadAddr = pic_config->mc_y_adr +
7168 *pic_config->comp_body_size; */
7171 vf
->canvas0Addr
= vf
->canvas1Addr
= 0;
7173 if (pic_config
->double_write_mode
) {
7174 vf
->type
= VIDTYPE_PROGRESSIVE
|
7176 vf
->type
|= VIDTYPE_VIU_NV21
;
7177 if ((pic_config
->double_write_mode
== 3) &&
7178 (!IS_8K_SIZE(pic_config
->y_crop_width
,
7179 pic_config
->y_crop_height
))) {
7180 vf
->type
|= VIDTYPE_COMPRESS
;
7181 if (pbi
->mmu_enable
)
7182 vf
->type
|= VIDTYPE_SCATTER
;
7184 #ifdef MULTI_INSTANCE_SUPPORT
7185 if (pbi
->m_ins_flag
) {
7186 vf
->canvas0Addr
= vf
->canvas1Addr
= -1;
7188 vf
->canvas0_config
[0] =
7189 pic_config
->canvas_config
[0];
7190 vf
->canvas0_config
[1] =
7191 pic_config
->canvas_config
[1];
7192 vf
->canvas1_config
[0] =
7193 pic_config
->canvas_config
[0];
7194 vf
->canvas1_config
[1] =
7195 pic_config
->canvas_config
[1];
7199 vf
->canvas0Addr
= vf
->canvas1Addr
=
7200 spec2canvas(pic_config
);
7202 vf
->canvas0Addr
= vf
->canvas1Addr
= 0;
7203 vf
->type
= VIDTYPE_COMPRESS
| VIDTYPE_VIU_FIELD
;
7204 if (pbi
->mmu_enable
)
7205 vf
->type
|= VIDTYPE_SCATTER
;
7208 switch (pic_config
->bit_depth
) {
7210 vf
->bitdepth
= BITDEPTH_Y8
|
7211 BITDEPTH_U8
| BITDEPTH_V8
;
7215 vf
->bitdepth
= BITDEPTH_Y10
|
7216 BITDEPTH_U10
| BITDEPTH_V10
;
7219 vf
->bitdepth
= BITDEPTH_Y10
|
7220 BITDEPTH_U10
| BITDEPTH_V10
;
7223 if ((vf
->type
& VIDTYPE_COMPRESS
) == 0)
7225 BITDEPTH_Y8
| BITDEPTH_U8
| BITDEPTH_V8
;
7226 if (pic_config
->bit_depth
== VPX_BITS_8
)
7227 vf
->bitdepth
|= BITDEPTH_SAVING_MODE
;
7229 /* if((vf->width!=pic_config->width)|
7230 * (vf->height!=pic_config->height))
7232 /* pr_info("aaa: %d/%d, %d/%d\n",
7233 vf->width,vf->height, pic_config->width,
7234 pic_config->height); */
7235 vf
->width
= pic_config
->y_crop_width
/
7236 get_double_write_ratio(pbi
,
7237 pic_config
->double_write_mode
);
7238 vf
->height
= pic_config
->y_crop_height
/
7239 get_double_write_ratio(pbi
,
7240 pic_config
->double_write_mode
);
7241 if (force_w_h
!= 0) {
7242 vf
->width
= (force_w_h
>> 16) & 0xffff;
7243 vf
->height
= force_w_h
& 0xffff;
7245 vf
->compWidth
= pic_config
->y_crop_width
;
7246 vf
->compHeight
= pic_config
->y_crop_height
;
7247 set_frame_info(pbi
, vf
);
7248 if (force_fps
& 0x100) {
7249 u32 rate
= force_fps
& 0xff;
7252 vf
->duration
= 96000/rate
;
7256 update_vf_memhandle(pbi
, vf
, pic_config
);
7257 if (!(pic_config
->y_crop_width
== 196
7258 && pic_config
->y_crop_height
== 196
7259 && (debug
& VP9_DEBUG_NO_TRIGGER_FRAME
) == 0
7260 && (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX
))) {
7261 inc_vf_ref(pbi
, pic_config
->index
);
7262 decoder_do_frame_check(hw_to_vdec(pbi
), vf
);
7263 kfifo_put(&pbi
->display_q
, (const struct vframe_s
*)vf
);
7264 ATRACE_COUNTER(MODULE_NAME
, vf
->pts
);
7265 pbi
->vf_pre_count
++;
7266 #ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
7268 gvs
->frame_dur
= pbi
->frame_dur
;
7269 vdec_count_info(gvs
, 0, stream_offset
);
7271 hw_to_vdec(pbi
)->vdec_fps_detec(hw_to_vdec(pbi
)->id
);
7272 if (without_display_mode
== 0) {
7273 vf_notify_receiver(pbi
->provider_name
,
7274 VFRAME_EVENT_PROVIDER_VFRAME_READY
, NULL
);
7276 vvp9_vf_put(vvp9_vf_get(pbi
), pbi
);
7278 pbi
->stat
|= VP9_TRIGGER_FRAME_DONE
;
7279 hevc_source_changed(VFORMAT_VP9
, 196, 196, 30);
7280 pr_debug("[%s %d] drop trigger frame width %d height %d state 0x%x\n",
7281 __func__
, __LINE__
, vf
->width
,
7282 vf
->height
, pbi
->stat
);
7289 static int notify_v4l_eos(struct vdec_s
*vdec
)
7291 struct VP9Decoder_s
*hw
= (struct VP9Decoder_s
*)vdec
->private;
7292 struct aml_vcodec_ctx
*ctx
= (struct aml_vcodec_ctx
*)(hw
->v4l2_ctx
);
7293 struct vframe_s
*vf
= &hw
->vframe_dummy
;
7294 struct vdec_v4l2_buffer
*fb
= NULL
;
7295 int index
= INVALID_IDX
;
7298 if (hw
->is_used_v4l
&& hw
->eos
) {
7299 expires
= jiffies
+ msecs_to_jiffies(2000);
7300 while (INVALID_IDX
== (index
= v4l_get_free_fb(hw
))) {
7301 if (time_after(jiffies
, expires
) ||
7302 v4l2_m2m_num_dst_bufs_ready(ctx
->m2m_ctx
))
7306 if (index
== INVALID_IDX
) {
7307 if (vdec_v4l_get_buffer(hw
->v4l2_ctx
, &fb
) < 0) {
7308 pr_err("[%d] EOS get free buff fail.\n", ctx
->id
);
7313 vf
->type
|= VIDTYPE_V4L_EOS
;
7314 vf
->timestamp
= ULONG_MAX
;
7315 vf
->flag
= VFRAME_FLAG_EMPTY_FRAME_V4L
;
7316 vf
->v4l_mem_handle
= (index
== INVALID_IDX
) ? (ulong
)fb
:
7317 hw
->m_BUF
[index
].v4l_ref_buf_addr
;
7319 kfifo_put(&hw
->display_q
, (const struct vframe_s
*)vf
);
7320 vf_notify_receiver(vdec
->vf_provider_name
,
7321 VFRAME_EVENT_PROVIDER_VFRAME_READY
, NULL
);
7323 pr_info("[%d] VP9 EOS notify.\n", ctx
->id
);
7329 static void get_rpm_param(union param_u
*params
)
7332 unsigned int data32
;
7334 if (debug
& VP9_DEBUG_BUFMGR
)
7335 pr_info("enter %s\r\n", __func__
);
7336 for (i
= 0; i
< 128; i
++) {
7338 data32
= READ_VREG(RPM_CMD_REG
);
7339 /*pr_info("%x\n", data32);*/
7340 } while ((data32
& 0x10000) == 0);
7341 params
->l
.data
[i
] = data32
&0xffff;
7342 /*pr_info("%x\n", data32);*/
7343 WRITE_VREG(RPM_CMD_REG
, 0);
7345 if (debug
& VP9_DEBUG_BUFMGR
)
7346 pr_info("leave %s\r\n", __func__
);
7348 static void debug_buffer_mgr_more(struct VP9Decoder_s
*pbi
)
7352 if (!(debug
& VP9_DEBUG_BUFMGR_MORE
))
7354 pr_info("vp9_param: (%d)\n", pbi
->slice_idx
);
7355 for (i
= 0; i
< (RPM_END
-RPM_BEGIN
); i
++) {
7356 pr_info("%04x ", vp9_param
.l
.data
[i
]);
7357 if (((i
+ 1) & 0xf) == 0)
7360 pr_info("=============param==========\r\n");
7361 pr_info("profile %x\r\n", vp9_param
.p
.profile
);
7362 pr_info("show_existing_frame %x\r\n",
7363 vp9_param
.p
.show_existing_frame
);
7364 pr_info("frame_to_show_idx %x\r\n",
7365 vp9_param
.p
.frame_to_show_idx
);
7366 pr_info("frame_type %x\r\n", vp9_param
.p
.frame_type
);
7367 pr_info("show_frame %x\r\n", vp9_param
.p
.show_frame
);
7368 pr_info("e.r.r.o.r_resilient_mode %x\r\n",
7369 vp9_param
.p
.error_resilient_mode
);
7370 pr_info("intra_only %x\r\n", vp9_param
.p
.intra_only
);
7371 pr_info("display_size_present %x\r\n",
7372 vp9_param
.p
.display_size_present
);
7373 pr_info("reset_frame_context %x\r\n",
7374 vp9_param
.p
.reset_frame_context
);
7375 pr_info("refresh_frame_flags %x\r\n",
7376 vp9_param
.p
.refresh_frame_flags
);
7377 pr_info("bit_depth %x\r\n", vp9_param
.p
.bit_depth
);
7378 pr_info("width %x\r\n", vp9_param
.p
.width
);
7379 pr_info("height %x\r\n", vp9_param
.p
.height
);
7380 pr_info("display_width %x\r\n", vp9_param
.p
.display_width
);
7381 pr_info("display_height %x\r\n", vp9_param
.p
.display_height
);
7382 pr_info("ref_info %x\r\n", vp9_param
.p
.ref_info
);
7383 pr_info("same_frame_size %x\r\n", vp9_param
.p
.same_frame_size
);
7384 if (!(debug
& VP9_DEBUG_DBG_LF_PRINT
))
7386 pr_info("mode_ref_delta_enabled: 0x%x\r\n",
7387 vp9_param
.p
.mode_ref_delta_enabled
);
7388 pr_info("sharpness_level: 0x%x\r\n",
7389 vp9_param
.p
.sharpness_level
);
7390 pr_info("ref_deltas: 0x%x, 0x%x, 0x%x, 0x%x\r\n",
7391 vp9_param
.p
.ref_deltas
[0], vp9_param
.p
.ref_deltas
[1],
7392 vp9_param
.p
.ref_deltas
[2], vp9_param
.p
.ref_deltas
[3]);
7393 pr_info("mode_deltas: 0x%x, 0x%x\r\n", vp9_param
.p
.mode_deltas
[0],
7394 vp9_param
.p
.mode_deltas
[1]);
7395 pr_info("filter_level: 0x%x\r\n", vp9_param
.p
.filter_level
);
7396 pr_info("seg_enabled: 0x%x\r\n", vp9_param
.p
.seg_enabled
);
7397 pr_info("seg_abs_delta: 0x%x\r\n", vp9_param
.p
.seg_abs_delta
);
7398 pr_info("seg_lf_feature_enabled: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\r\n",
7399 (vp9_param
.p
.seg_lf_info
[0]>>15 & 1),
7400 (vp9_param
.p
.seg_lf_info
[1]>>15 & 1),
7401 (vp9_param
.p
.seg_lf_info
[2]>>15 & 1),
7402 (vp9_param
.p
.seg_lf_info
[3]>>15 & 1),
7403 (vp9_param
.p
.seg_lf_info
[4]>>15 & 1),
7404 (vp9_param
.p
.seg_lf_info
[5]>>15 & 1),
7405 (vp9_param
.p
.seg_lf_info
[6]>>15 & 1),
7406 (vp9_param
.p
.seg_lf_info
[7]>>15 & 1));
7407 pr_info("seg_lf_feature_data: 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x 0x%x\r\n",
7408 (vp9_param
.p
.seg_lf_info
[0] & 0x13f),
7409 (vp9_param
.p
.seg_lf_info
[1] & 0x13f),
7410 (vp9_param
.p
.seg_lf_info
[2] & 0x13f),
7411 (vp9_param
.p
.seg_lf_info
[3] & 0x13f),
7412 (vp9_param
.p
.seg_lf_info
[4] & 0x13f),
7413 (vp9_param
.p
.seg_lf_info
[5] & 0x13f),
7414 (vp9_param
.p
.seg_lf_info
[6] & 0x13f),
7415 (vp9_param
.p
.seg_lf_info
[7] & 0x13f));
7420 static void vp9_recycle_mmu_buf_tail(struct VP9Decoder_s
*pbi
)
7422 struct VP9_Common_s
*const cm
= &pbi
->common
;
7423 if (pbi
->double_write_mode
& 0x10)
7425 if (cm
->cur_fb_idx_mmu
!= INVALID_IDX
) {
7426 if (pbi
->used_4k_num
== -1) {
7428 (READ_VREG(HEVC_SAO_MMU_STATUS
) >> 16);
7429 if (pbi
->m_ins_flag
)
7430 hevc_mmu_dma_check(hw_to_vdec(pbi
));
7432 decoder_mmu_box_free_idx_tail(pbi
->mmu_box
,
7433 cm
->cur_fb_idx_mmu
, pbi
->used_4k_num
);
7434 cm
->cur_fb_idx_mmu
= INVALID_IDX
;
7435 pbi
->used_4k_num
= -1;
7439 #ifdef MULTI_INSTANCE_SUPPORT
7440 static void vp9_recycle_mmu_buf(struct VP9Decoder_s
*pbi
)
7442 struct VP9_Common_s
*const cm
= &pbi
->common
;
7443 if (pbi
->double_write_mode
& 0x10)
7445 if (cm
->cur_fb_idx_mmu
!= INVALID_IDX
) {
7446 decoder_mmu_box_free_idx(pbi
->mmu_box
,
7447 cm
->cur_fb_idx_mmu
);
7449 cm
->cur_fb_idx_mmu
= INVALID_IDX
;
7450 pbi
->used_4k_num
= -1;
7456 static void dec_again_process(struct VP9Decoder_s
*pbi
)
7459 pbi
->dec_result
= DEC_RESULT_AGAIN
;
7460 if (pbi
->process_state
==
7461 PROC_STATE_DECODESLICE
) {
7462 pbi
->process_state
=
7463 PROC_STATE_SENDAGAIN
;
7464 if (pbi
->mmu_enable
)
7465 vp9_recycle_mmu_buf(pbi
);
7467 reset_process_time(pbi
);
7468 vdec_schedule_work(&pbi
->work
);
7471 int continue_decoding(struct VP9Decoder_s
*pbi
)
7475 struct VP9_Common_s
*const cm
= &pbi
->common
;
7476 debug_buffer_mgr_more(pbi
);
7478 bit_depth_luma
= vp9_param
.p
.bit_depth
;
7479 bit_depth_chroma
= vp9_param
.p
.bit_depth
;
7481 if ((vp9_param
.p
.bit_depth
>= VPX_BITS_10
) &&
7482 (get_double_write_mode(pbi
) == 0x10)) {
7483 pbi
->fatal_error
|= DECODER_FATAL_ERROR_SIZE_OVERFLOW
;
7484 pr_err("fatal err, bit_depth %d, unsupport dw 0x10\n",
7485 vp9_param
.p
.bit_depth
);
7489 if (pbi
->process_state
!= PROC_STATE_SENDAGAIN
) {
7490 ret
= vp9_bufmgr_process(pbi
, &vp9_param
);
7491 if (!pbi
->m_ins_flag
)
7494 union param_u
*params
= &vp9_param
;
7495 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
7496 ret
= vp9_alloc_mmu(pbi
,
7500 params
->p
.bit_depth
,
7501 pbi
->frame_mmu_map_addr
);
7503 cm
->cur_fb_idx_mmu
= cm
->new_fb_idx
;
7505 pr_err("can't alloc need mmu1,idx %d ret =%d\n",
7511 WRITE_VREG(HEVC_PARSER_PICTURE_SIZE
,
7512 (params
->p
.height
<< 16) | params
->p
.width
);
7515 pr_info("vp9_bufmgr_process=> %d, VP9_10B_DISCARD_NAL\r\n",
7517 WRITE_VREG(HEVC_DEC_STATUS_REG
, VP9_10B_DISCARD_NAL
);
7519 if (pbi
->mmu_enable
)
7520 vp9_recycle_mmu_buf(pbi
);
7521 #ifdef MULTI_INSTANCE_SUPPORT
7522 if (pbi
->m_ins_flag
) {
7523 pbi
->dec_result
= DEC_RESULT_DONE
;
7524 #ifdef SUPPORT_FB_DECODING
7525 if (pbi
->used_stage_buf_num
== 0)
7528 vdec_schedule_work(&pbi
->work
);
7532 } else if (ret
== 0) {
7533 struct PIC_BUFFER_CONFIG_s
*cur_pic_config
7534 = &cm
->cur_frame
->buf
;
7535 cur_pic_config
->decode_idx
= pbi
->frame_count
;
7537 if (pbi
->process_state
!= PROC_STATE_SENDAGAIN
) {
7538 if (!pbi
->m_ins_flag
) {
7540 decode_frame_count
[pbi
->index
]
7543 #ifdef MULTI_INSTANCE_SUPPORT
7545 cur_pic_config
->pts
= pbi
->chunk
->pts
;
7546 cur_pic_config
->pts64
= pbi
->chunk
->pts64
;
7547 cur_pic_config
->timestamp
= pbi
->chunk
->timestamp
;
7551 /*pr_info("Decode Frame Data %d\n", pbi->frame_count);*/
7552 config_pic_size(pbi
, vp9_param
.p
.bit_depth
);
7554 if ((pbi
->common
.frame_type
!= KEY_FRAME
)
7555 && (!pbi
->common
.intra_only
)) {
7556 config_mc_buffer(pbi
, vp9_param
.p
.bit_depth
);
7557 #ifdef SUPPORT_FB_DECODING
7558 if (pbi
->used_stage_buf_num
== 0)
7560 config_mpred_hw(pbi
);
7562 #ifdef SUPPORT_FB_DECODING
7563 if (pbi
->used_stage_buf_num
== 0)
7565 clear_mpred_hw(pbi
);
7568 if (mcrcc_cache_alg_flag
)
7569 config_mcrcc_axi_hw_new(pbi
);
7571 config_mcrcc_axi_hw(pbi
);
7573 config_sao_hw(pbi
, &vp9_param
);
7575 #ifdef VP9_LPF_LVL_UPDATE
7577 * Get loop filter related picture level parameters from Parser
7579 pbi
->lf
->mode_ref_delta_enabled
= vp9_param
.p
.mode_ref_delta_enabled
;
7580 pbi
->lf
->sharpness_level
= vp9_param
.p
.sharpness_level
;
7581 for (i
= 0; i
< 4; i
++)
7582 pbi
->lf
->ref_deltas
[i
] = vp9_param
.p
.ref_deltas
[i
];
7583 for (i
= 0; i
< 2; i
++)
7584 pbi
->lf
->mode_deltas
[i
] = vp9_param
.p
.mode_deltas
[i
];
7585 pbi
->default_filt_lvl
= vp9_param
.p
.filter_level
;
7586 pbi
->seg_4lf
->enabled
= vp9_param
.p
.seg_enabled
;
7587 pbi
->seg_4lf
->abs_delta
= vp9_param
.p
.seg_abs_delta
;
7588 for (i
= 0; i
< MAX_SEGMENTS
; i
++)
7589 pbi
->seg_4lf
->feature_mask
[i
] = (vp9_param
.p
.seg_lf_info
[i
] &
7590 0x8000) ? (1 << SEG_LVL_ALT_LF
) : 0;
7591 for (i
= 0; i
< MAX_SEGMENTS
; i
++)
7592 pbi
->seg_4lf
->feature_data
[i
][SEG_LVL_ALT_LF
]
7593 = (vp9_param
.p
.seg_lf_info
[i
]
7594 & 0x100) ? -(vp9_param
.p
.seg_lf_info
[i
]
7595 & 0x3f) : (vp9_param
.p
.seg_lf_info
[i
] & 0x3f);
7596 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) {
7597 /*Set pipeline mode*/
7598 uint32_t lpf_data32
= READ_VREG(HEVC_DBLK_CFGB
);
7599 /*dblk pipeline mode=1 for performance*/
7600 if (vp9_param
.p
.width
>= 1280)
7601 lpf_data32
|= (0x1 << 4);
7603 lpf_data32
&= ~(0x3 << 4);
7604 WRITE_VREG(HEVC_DBLK_CFGB
, lpf_data32
);
7607 * Update loop filter Thr/Lvl table for every frame
7610 ("vp9_loop_filter (run before every frame decoding start)\n");*/
7611 vp9_loop_filter_frame_init(pbi
->seg_4lf
,
7612 pbi
->lfi
, pbi
->lf
, pbi
->default_filt_lvl
);
7614 /*pr_info("HEVC_DEC_STATUS_REG <= VP9_10B_DECODE_SLICE\n");*/
7615 WRITE_VREG(HEVC_DEC_STATUS_REG
, VP9_10B_DECODE_SLICE
);
7617 pr_info("Skip search next start code\n");
7618 cm
->prev_fb_idx
= INVALID_IDX
;
7619 /*skip, search next start code*/
7620 WRITE_VREG(HEVC_DEC_STATUS_REG
, VP9_10B_DECODE_SLICE
);
7622 pbi
->process_state
= PROC_STATE_DECODESLICE
;
7623 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
7624 if (pbi
->last_put_idx
< pbi
->used_buf_num
) {
7625 struct RefCntBuffer_s
*frame_bufs
=
7626 cm
->buffer_pool
->frame_bufs
;
7627 int i
= pbi
->last_put_idx
;
7628 /*free not used buffers.*/
7629 if ((frame_bufs
[i
].ref_count
== 0) &&
7630 (frame_bufs
[i
].buf
.vf_ref
== 0) &&
7631 (frame_bufs
[i
].buf
.index
!= -1)) {
7632 decoder_mmu_box_free_idx(pbi
->mmu_box
, i
);
7634 pbi
->last_put_idx
= -1;
7640 static void fill_frame_info(struct VP9Decoder_s
*pbi
,
7641 struct PIC_BUFFER_CONFIG_s
*frame
,
7642 unsigned int framesize
,
7645 struct vframe_qos_s
*vframe_qos
= &pbi
->vframe_qos
;
7647 if (frame
->slice_type
== KEY_FRAME
)
7648 vframe_qos
->type
= 1;
7649 else if (frame
->slice_type
== INTER_FRAME
)
7650 vframe_qos
->type
= 2;
7652 #define SHOW_QOS_INFO
7654 vframe_qos
->size
= framesize
;
7655 vframe_qos
->pts
= pts
;
7656 #ifdef SHOW_QOS_INFO
7657 vp9_print(pbi
, 0, "slice:%d\n", frame
->slice_type
);
7659 vframe_qos
->max_mv
= frame
->max_mv
;
7660 vframe_qos
->avg_mv
= frame
->avg_mv
;
7661 vframe_qos
->min_mv
= frame
->min_mv
;
7662 #ifdef SHOW_QOS_INFO
7663 vp9_print(pbi
, 0, "mv: max:%d, avg:%d, min:%d\n",
7666 vframe_qos
->min_mv
);
7668 vframe_qos
->max_qp
= frame
->max_qp
;
7669 vframe_qos
->avg_qp
= frame
->avg_qp
;
7670 vframe_qos
->min_qp
= frame
->min_qp
;
7671 #ifdef SHOW_QOS_INFO
7672 vp9_print(pbi
, 0, "qp: max:%d, avg:%d, min:%d\n",
7675 vframe_qos
->min_qp
);
7677 vframe_qos
->max_skip
= frame
->max_skip
;
7678 vframe_qos
->avg_skip
= frame
->avg_skip
;
7679 vframe_qos
->min_skip
= frame
->min_skip
;
7680 #ifdef SHOW_QOS_INFO
7681 vp9_print(pbi
, 0, "skip: max:%d, avg:%d, min:%d\n",
7682 vframe_qos
->max_skip
,
7683 vframe_qos
->avg_skip
,
7684 vframe_qos
->min_skip
);
7688 if (pbi
->frameinfo_enable
)
7689 vdec_fill_frame_info(vframe_qos
, 1);
7692 /* only when we decoded one field or one frame,
7693 we can call this function to get qos info*/
7694 static void get_picture_qos_info(struct VP9Decoder_s
*pbi
)
7696 struct PIC_BUFFER_CONFIG_s
*frame
= &pbi
->cur_buf
->buf
;
7701 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_G12A
) {
7703 unsigned char i
, j
, t
;
7706 data
= READ_VREG(HEVC_MV_INFO
);
7707 if (frame
->slice_type
== KEY_FRAME
)
7710 a
[1] = (data
>> 8) & 0xff;
7711 a
[2] = (data
>> 16) & 0xff;
7713 for (i
= 0; i
< 3; i
++) {
7714 for (j
= i
+1; j
< 3; j
++) {
7719 } else if (a
[j
] == a
[i
]) {
7727 frame
->max_mv
= a
[2];
7728 frame
->avg_mv
= a
[1];
7729 frame
->min_mv
= a
[0];
7731 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7732 "mv data %x a[0]= %x a[1]= %x a[2]= %x\n",
7733 data
, a
[0], a
[1], a
[2]);
7735 data
= READ_VREG(HEVC_QP_INFO
);
7737 a
[1] = (data
>> 8) & 0x3f;
7738 a
[2] = (data
>> 16) & 0x7f;
7740 for (i
= 0; i
< 3; i
++) {
7741 for (j
= i
+1; j
< 3; j
++) {
7746 } else if (a
[j
] == a
[i
]) {
7754 frame
->max_qp
= a
[2];
7755 frame
->avg_qp
= a
[1];
7756 frame
->min_qp
= a
[0];
7758 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7759 "qp data %x a[0]= %x a[1]= %x a[2]= %x\n",
7760 data
, a
[0], a
[1], a
[2]);
7762 data
= READ_VREG(HEVC_SKIP_INFO
);
7764 a
[1] = (data
>> 8) & 0x3f;
7765 a
[2] = (data
>> 16) & 0x7f;
7767 for (i
= 0; i
< 3; i
++) {
7768 for (j
= i
+1; j
< 3; j
++) {
7773 } else if (a
[j
] == a
[i
]) {
7781 frame
->max_skip
= a
[2];
7782 frame
->avg_skip
= a
[1];
7783 frame
->min_skip
= a
[0];
7785 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7786 "skip data %x a[0]= %x a[1]= %x a[2]= %x\n",
7787 data
, a
[0], a
[1], a
[2]);
7789 uint32_t blk88_y_count
;
7790 uint32_t blk88_c_count
;
7791 uint32_t blk22_mv_count
;
7801 uint64_t temp_value
;
7802 int pic_number
= frame
->decode_idx
;
7808 frame
->max_skip
= 0;
7809 frame
->avg_skip
= 0;
7810 frame
->min_skip
= 0;
7816 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
, "slice_type:%d, poc:%d\n",
7820 /* set rd_idx to 0 */
7821 WRITE_VREG(HEVC_PIC_QUALITY_CTRL
, 0);
7823 blk88_y_count
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7824 if (blk88_y_count
== 0) {
7826 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7827 "[Picture %d Quality] NO Data yet.\n",
7830 /* reset all counts */
7831 WRITE_VREG(HEVC_PIC_QUALITY_CTRL
, (1<<8));
7835 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7837 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7838 "[Picture %d Quality] Y QP AVG : %d (%d/%d)\n",
7839 pic_number
, rdata32
/blk88_y_count
,
7840 rdata32
, blk88_y_count
);
7842 frame
->avg_qp
= rdata32
/blk88_y_count
;
7844 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7846 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7847 "[Picture %d Quality] Y intra rate : %d%c (%d)\n",
7848 pic_number
, rdata32
*100/blk88_y_count
,
7851 /* skipped_y_count */
7852 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7854 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7855 "[Picture %d Quality] Y skipped rate : %d%c (%d)\n",
7856 pic_number
, rdata32
*100/blk88_y_count
,
7859 frame
->avg_skip
= rdata32
*100/blk88_y_count
;
7860 /* coeff_non_zero_y_count */
7861 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7863 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7864 "[Picture %d Quality] Y ZERO_Coeff rate : %d%c (%d)\n",
7865 pic_number
, (100 - rdata32
*100/(blk88_y_count
*1)),
7869 blk88_c_count
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7870 if (blk88_c_count
== 0) {
7871 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7872 "[Picture %d Quality] NO Data yet.\n",
7874 /* reset all counts */
7875 WRITE_VREG(HEVC_PIC_QUALITY_CTRL
, (1<<8));
7879 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7881 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7882 "[Picture %d Quality] C QP AVG : %d (%d/%d)\n",
7883 pic_number
, rdata32
/blk88_c_count
,
7884 rdata32
, blk88_c_count
);
7887 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7889 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7890 "[Picture %d Quality] C intra rate : %d%c (%d)\n",
7891 pic_number
, rdata32
*100/blk88_c_count
,
7894 /* skipped_cu_c_count */
7895 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7897 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7898 "[Picture %d Quality] C skipped rate : %d%c (%d)\n",
7899 pic_number
, rdata32
*100/blk88_c_count
,
7902 /* coeff_non_zero_c_count */
7903 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7905 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7906 "[Picture %d Quality] C ZERO_Coeff rate : %d%c (%d)\n",
7907 pic_number
, (100 - rdata32
*100/(blk88_c_count
*1)),
7910 /* 1'h0, qp_c_max[6:0], 1'h0, qp_c_min[6:0],
7911 1'h0, qp_y_max[6:0], 1'h0, qp_y_min[6:0] */
7912 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7914 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7915 "[Picture %d Quality] Y QP min : %d\n",
7916 pic_number
, (rdata32
>>0)&0xff);
7918 frame
->min_qp
= (rdata32
>>0)&0xff;
7920 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7921 "[Picture %d Quality] Y QP max : %d\n",
7922 pic_number
, (rdata32
>>8)&0xff);
7924 frame
->max_qp
= (rdata32
>>8)&0xff;
7926 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7927 "[Picture %d Quality] C QP min : %d\n",
7928 pic_number
, (rdata32
>>16)&0xff);
7929 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7930 "[Picture %d Quality] C QP max : %d\n",
7931 pic_number
, (rdata32
>>24)&0xff);
7933 /* blk22_mv_count */
7934 blk22_mv_count
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7935 if (blk22_mv_count
== 0) {
7936 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7937 "[Picture %d Quality] NO MV Data yet.\n",
7939 /* reset all counts */
7940 WRITE_VREG(HEVC_PIC_QUALITY_CTRL
, (1<<8));
7943 /* mvy_L1_count[39:32], mvx_L1_count[39:32],
7944 mvy_L0_count[39:32], mvx_L0_count[39:32] */
7945 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7946 /* should all be 0x00 or 0xff */
7947 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7948 "[Picture %d Quality] MV AVG High Bits: 0x%X\n",
7949 pic_number
, rdata32
);
7951 mvx_L0_hi
= ((rdata32
>>0)&0xff);
7952 mvy_L0_hi
= ((rdata32
>>8)&0xff);
7953 mvx_L1_hi
= ((rdata32
>>16)&0xff);
7954 mvy_L1_hi
= ((rdata32
>>24)&0xff);
7956 /* mvx_L0_count[31:0] */
7957 rdata32_l
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7958 temp_value
= mvx_L0_hi
;
7959 temp_value
= (temp_value
<< 32) | rdata32_l
;
7961 if (mvx_L0_hi
& 0x80)
7962 value
= 0xFFFFFFF000000000 | temp_value
;
7966 value
= div_s64(value
, blk22_mv_count
);
7968 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7969 "[Picture %d Quality] MVX_L0 AVG : %d (%lld/%d)\n",
7970 pic_number
, (int)value
,
7971 value
, blk22_mv_count
);
7973 frame
->avg_mv
= value
;
7975 /* mvy_L0_count[31:0] */
7976 rdata32_l
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7977 temp_value
= mvy_L0_hi
;
7978 temp_value
= (temp_value
<< 32) | rdata32_l
;
7980 if (mvy_L0_hi
& 0x80)
7981 value
= 0xFFFFFFF000000000 | temp_value
;
7985 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
7986 "[Picture %d Quality] MVY_L0 AVG : %d (%lld/%d)\n",
7987 pic_number
, rdata32_l
/blk22_mv_count
,
7988 value
, blk22_mv_count
);
7990 /* mvx_L1_count[31:0] */
7991 rdata32_l
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
7992 temp_value
= mvx_L1_hi
;
7993 temp_value
= (temp_value
<< 32) | rdata32_l
;
7994 if (mvx_L1_hi
& 0x80)
7995 value
= 0xFFFFFFF000000000 | temp_value
;
7999 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8000 "[Picture %d Quality] MVX_L1 AVG : %d (%lld/%d)\n",
8001 pic_number
, rdata32_l
/blk22_mv_count
,
8002 value
, blk22_mv_count
);
8004 /* mvy_L1_count[31:0] */
8005 rdata32_l
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8006 temp_value
= mvy_L1_hi
;
8007 temp_value
= (temp_value
<< 32) | rdata32_l
;
8008 if (mvy_L1_hi
& 0x80)
8009 value
= 0xFFFFFFF000000000 | temp_value
;
8013 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8014 "[Picture %d Quality] MVY_L1 AVG : %d (%lld/%d)\n",
8015 pic_number
, rdata32_l
/blk22_mv_count
,
8016 value
, blk22_mv_count
);
8018 /* {mvx_L0_max, mvx_L0_min} // format : {sign, abs[14:0]} */
8019 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8020 mv_hi
= (rdata32
>>16)&0xffff;
8022 mv_hi
= 0x8000 - mv_hi
;
8024 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8025 "[Picture %d Quality] MVX_L0 MAX : %d\n",
8028 frame
->max_mv
= mv_hi
;
8030 mv_lo
= (rdata32
>>0)&0xffff;
8032 mv_lo
= 0x8000 - mv_lo
;
8034 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8035 "[Picture %d Quality] MVX_L0 MIN : %d\n",
8038 frame
->min_mv
= mv_lo
;
8040 /* {mvy_L0_max, mvy_L0_min} */
8041 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8042 mv_hi
= (rdata32
>>16)&0xffff;
8044 mv_hi
= 0x8000 - mv_hi
;
8046 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8047 "[Picture %d Quality] MVY_L0 MAX : %d\n",
8050 mv_lo
= (rdata32
>>0)&0xffff;
8052 mv_lo
= 0x8000 - mv_lo
;
8054 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8055 "[Picture %d Quality] MVY_L0 MIN : %d\n",
8058 /* {mvx_L1_max, mvx_L1_min} */
8059 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8060 mv_hi
= (rdata32
>>16)&0xffff;
8062 mv_hi
= 0x8000 - mv_hi
;
8064 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8065 "[Picture %d Quality] MVX_L1 MAX : %d\n",
8068 mv_lo
= (rdata32
>>0)&0xffff;
8070 mv_lo
= 0x8000 - mv_lo
;
8072 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8073 "[Picture %d Quality] MVX_L1 MIN : %d\n",
8076 /* {mvy_L1_max, mvy_L1_min} */
8077 rdata32
= READ_VREG(HEVC_PIC_QUALITY_DATA
);
8078 mv_hi
= (rdata32
>>16)&0xffff;
8080 mv_hi
= 0x8000 - mv_hi
;
8082 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8083 "[Picture %d Quality] MVY_L1 MAX : %d\n",
8086 mv_lo
= (rdata32
>>0)&0xffff;
8088 mv_lo
= 0x8000 - mv_lo
;
8090 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8091 "[Picture %d Quality] MVY_L1 MIN : %d\n",
8094 rdata32
= READ_VREG(HEVC_PIC_QUALITY_CTRL
);
8096 vp9_print(pbi
, VP9_DEBUG_QOS_INFO
,
8097 "[Picture %d Quality] After Read : VDEC_PIC_QUALITY_CTRL : 0x%x\n",
8098 pic_number
, rdata32
);
8100 /* reset all counts */
8101 WRITE_VREG(HEVC_PIC_QUALITY_CTRL
, (1<<8));
8105 static irqreturn_t
vvp9_isr_thread_fn(int irq
, void *data
)
8107 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)data
;
8108 unsigned int dec_status
= pbi
->dec_status
;
8111 /*if (pbi->wait_buf)
8112 * pr_info("set wait_buf to 0\r\n");
8117 #ifdef MULTI_INSTANCE_SUPPORT
8118 #ifdef SUPPORT_FB_DECODING
8119 #ifdef FB_DECODING_TEST_SCHEDULE
8120 if (pbi
->s1_test_cmd
== TEST_SET_PIC_DONE
)
8121 dec_status
= HEVC_DECPIC_DATA_DONE
;
8122 else if (pbi
->s1_test_cmd
== TEST_SET_S2_DONE
8123 && dec_status
== HEVC_DECPIC_DATA_DONE
)
8124 dec_status
= HEVC_S2_DECODING_DONE
;
8125 pbi
->s1_test_cmd
= TEST_SET_NONE
;
8127 /*if (irq != VDEC_IRQ_0)
8128 dec_status = HEVC_S2_DECODING_DONE;*/
8130 if (dec_status
== HEVC_S2_DECODING_DONE
) {
8131 pbi
->dec_result
= DEC_RESULT_DONE
;
8132 vdec_schedule_work(&pbi
->work
);
8133 #ifdef FB_DECODING_TEST_SCHEDULE
8135 pbi
->dec_s1_result
= DEC_S1_RESULT_DONE
;
8136 vdec_schedule_work(&pbi
->s1_work
);
8140 if ((dec_status
== HEVC_NAL_DECODE_DONE
) ||
8141 (dec_status
== HEVC_SEARCH_BUFEMPTY
) ||
8142 (dec_status
== HEVC_DECODE_BUFEMPTY
)
8144 if (pbi
->m_ins_flag
) {
8145 reset_process_time(pbi
);
8146 if (!vdec_frame_based(hw_to_vdec(pbi
)))
8147 dec_again_process(pbi
);
8149 pbi
->dec_result
= DEC_RESULT_GET_DATA
;
8150 vdec_schedule_work(&pbi
->work
);
8153 pbi
->process_busy
= 0;
8155 } else if (dec_status
== HEVC_DECPIC_DATA_DONE
) {
8156 if (pbi
->m_ins_flag
) {
8157 get_picture_qos_info(pbi
);
8158 #ifdef SUPPORT_FB_DECODING
8159 if (pbi
->used_stage_buf_num
> 0) {
8160 reset_process_time(pbi
);
8162 trigger_schedule(pbi
);
8163 #ifdef FB_DECODING_TEST_SCHEDULE
8164 pbi
->s1_test_cmd
= TEST_SET_S2_DONE
;
8167 pbi
->dec_s1_result
= DEC_S1_RESULT_DONE
;
8168 vdec_schedule_work(&pbi
->s1_work
);
8173 reset_process_time(pbi
);
8174 if (pbi
->vf_pre_count
== 0 || pbi
->low_latency_flag
)
8175 vp9_bufmgr_postproc(pbi
);
8177 pbi
->dec_result
= DEC_RESULT_DONE
;
8179 if (mcrcc_cache_alg_flag
)
8181 vdec_schedule_work(&pbi
->work
);
8184 if (pbi
->low_latency_flag
) {
8185 vp9_bufmgr_postproc(pbi
);
8186 WRITE_VREG(HEVC_DEC_STATUS_REG
, HEVC_ACTION_DONE
);
8187 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8188 vdec_profile(hw_to_vdec(pbi
), VDEC_PROFILE_EVENT_CB
);
8189 if (debug
& PRINT_FLAG_VDEC_DETAIL
)
8190 pr_info("%s VP9 frame done \n", __func__
);
8195 pbi
->process_busy
= 0;
8200 if (dec_status
== VP9_EOS
) {
8201 #ifdef MULTI_INSTANCE_SUPPORT
8202 if (pbi
->m_ins_flag
)
8203 reset_process_time(pbi
);
8206 pr_info("VP9_EOS, flush buffer\r\n");
8208 vp9_bufmgr_postproc(pbi
);
8210 pr_info("send VP9_10B_DISCARD_NAL\r\n");
8211 WRITE_VREG(HEVC_DEC_STATUS_REG
, VP9_10B_DISCARD_NAL
);
8212 pbi
->process_busy
= 0;
8213 #ifdef MULTI_INSTANCE_SUPPORT
8214 if (pbi
->m_ins_flag
) {
8215 pbi
->dec_result
= DEC_RESULT_DONE
;
8217 vdec_schedule_work(&pbi
->work
);
8221 } else if (dec_status
== HEVC_DECODE_OVER_SIZE
) {
8222 pr_info("vp9 decode oversize !!\n");
8223 debug
|= (VP9_DEBUG_DIS_LOC_ERROR_PROC
|
8224 VP9_DEBUG_DIS_SYS_ERROR_PROC
);
8225 pbi
->fatal_error
|= DECODER_FATAL_ERROR_SIZE_OVERFLOW
;
8226 #ifdef MULTI_INSTANCE_SUPPORT
8227 if (pbi
->m_ins_flag
)
8228 reset_process_time(pbi
);
8233 if (dec_status
!= VP9_HEAD_PARSER_DONE
) {
8234 pbi
->process_busy
= 0;
8239 #ifdef MULTI_INSTANCE_SUPPORT
8240 #ifdef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8241 if (pbi
->m_ins_flag
==0 && pbi
->low_latency_flag
) {
8242 vdec_profile(hw_to_vdec(pbi
), VDEC_PROFILE_EVENT_RUN
);
8243 if (debug
& PRINT_FLAG_VDEC_DETAIL
)
8244 pr_info("%s VP9 frame header found \n", __func__
);
8247 if (pbi
->m_ins_flag
)
8248 reset_process_time(pbi
);
8250 if (pbi
->process_state
!= PROC_STATE_SENDAGAIN
8251 #ifdef SUPPORT_FB_DECODING
8252 && pbi
->used_stage_buf_num
== 0
8255 if (pbi
->mmu_enable
)
8256 vp9_recycle_mmu_buf_tail(pbi
);
8259 if (pbi
->frame_count
> 0)
8260 vp9_bufmgr_postproc(pbi
);
8263 if (debug
& VP9_DEBUG_SEND_PARAM_WITH_REG
) {
8264 get_rpm_param(&vp9_param
);
8266 #ifdef SUPPORT_FB_DECODING
8267 if (pbi
->used_stage_buf_num
> 0) {
8268 reset_process_time(pbi
);
8272 &pbi
->s1_mv_buf_index
,
8273 &pbi
->s1_mpred_mv_wr_start_addr
8276 "%s: Error get_mv_buf fail\n",
8280 if (pbi
->s1_buf
== NULL
) {
8282 "%s: Error get_s1_buf fail\n",
8284 pbi
->process_busy
= 0;
8288 for (i
= 0; i
< (RPM_END
- RPM_BEGIN
); i
+= 4) {
8290 for (ii
= 0; ii
< 4; ii
++) {
8291 pbi
->s1_buf
->rpm
[i
+ 3 - ii
] =
8292 pbi
->rpm_ptr
[i
+ 3 - ii
];
8293 pbi
->s1_param
.l
.data
[i
+ ii
] =
8294 pbi
->rpm_ptr
[i
+ 3 - ii
];
8299 #ifdef FB_DECODING_TEST_SCHEDULE
8300 pbi
->dec_s1_result
=
8301 DEC_S1_RESULT_TEST_TRIGGER_DONE
;
8302 vdec_schedule_work(&pbi
->s1_work
);
8304 WRITE_VREG(HEVC_ASSIST_FB_MMU_MAP_ADDR
,
8305 pbi
->stage_mmu_map_phy_addr
+
8306 pbi
->s1_buf
->index
* STAGE_MMU_MAP_SIZE
);
8308 start_s1_decoding(pbi
);
8310 start_process_time(pbi
);
8311 pbi
->process_busy
= 0;
8316 for (i
= 0; i
< (RPM_END
- RPM_BEGIN
); i
+= 4) {
8318 for (ii
= 0; ii
< 4; ii
++)
8319 vp9_param
.l
.data
[i
+ ii
] =
8320 pbi
->rpm_ptr
[i
+ 3 - ii
];
8325 if (pbi
->is_used_v4l
) {
8326 struct aml_vcodec_ctx
*ctx
=
8327 (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
8329 pbi
->frame_width
= vp9_param
.p
.width
;
8330 pbi
->frame_height
= vp9_param
.p
.height
;
8331 if (ctx
->param_sets_from_ucode
&& !pbi
->v4l_params_parsed
) {
8332 struct aml_vdec_ps_infos ps
;
8334 ps
.visible_width
= pbi
->frame_width
;
8335 ps
.visible_height
= pbi
->frame_height
;
8336 ps
.coded_width
= ALIGN(pbi
->frame_width
, 32);
8337 ps
.coded_height
= ALIGN(pbi
->frame_height
, 32);
8338 ps
.dpb_size
= pbi
->used_buf_num
;
8339 pbi
->v4l_params_parsed
= true;
8340 vdec_v4l_set_ps_infos(ctx
, &ps
);
8344 if (pbi
->is_used_v4l
) {
8345 pbi
->dec_result
= DEC_V4L2_CONTINUE_DECODING
;
8346 vdec_schedule_work(&pbi
->work
);
8348 continue_decoding(pbi
);
8349 pbi
->postproc_done
= 0;
8350 pbi
->process_busy
= 0;
8353 #ifdef MULTI_INSTANCE_SUPPORT
8354 if (pbi
->m_ins_flag
)
8355 start_process_time(pbi
);
8361 static irqreturn_t
vvp9_isr(int irq
, void *data
)
8364 unsigned int dec_status
;
8365 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)data
;
8366 unsigned int adapt_prob_status
;
8367 struct VP9_Common_s
*const cm
= &pbi
->common
;
8370 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG
, 1);
8372 dec_status
= READ_VREG(HEVC_DEC_STATUS_REG
);
8373 adapt_prob_status
= READ_VREG(VP9_ADAPT_PROB_REG
);
8376 if (pbi
->init_flag
== 0)
8378 if (pbi
->process_busy
)/*on process.*/
8380 pbi
->dec_status
= dec_status
;
8381 pbi
->process_busy
= 1;
8382 if (debug
& VP9_DEBUG_BUFMGR
)
8383 pr_info("vp9 isr (%d) dec status = 0x%x, lcu 0x%x shiftbyte 0x%x (%x %x lev %x, wr %x, rd %x)\n",
8385 dec_status
, READ_VREG(HEVC_PARSER_LCU_START
),
8386 READ_VREG(HEVC_SHIFT_BYTE_COUNT
),
8387 READ_VREG(HEVC_STREAM_START_ADDR
),
8388 READ_VREG(HEVC_STREAM_END_ADDR
),
8389 READ_VREG(HEVC_STREAM_LEVEL
),
8390 READ_VREG(HEVC_STREAM_WR_PTR
),
8391 READ_VREG(HEVC_STREAM_RD_PTR
)
8393 #ifdef SUPPORT_FB_DECODING
8394 /*if (irq != VDEC_IRQ_0)
8395 return IRQ_WAKE_THREAD;*/
8398 debug_tag
= READ_HREG(DEBUG_REG1
);
8399 if (debug_tag
& 0x10000) {
8400 pr_info("LMEM<tag %x>:\n", READ_HREG(DEBUG_REG1
));
8401 for (i
= 0; i
< 0x400; i
+= 4) {
8404 pr_info("%03x: ", i
);
8405 for (ii
= 0; ii
< 4; ii
++) {
8407 pbi
->lmem_ptr
[i
+ 3 - ii
]);
8409 if (((i
+ ii
) & 0xf) == 0)
8413 if ((udebug_pause_pos
== (debug_tag
& 0xffff)) &&
8414 (udebug_pause_decode_idx
== 0 ||
8415 udebug_pause_decode_idx
== pbi
->slice_idx
) &&
8416 (udebug_pause_val
== 0 ||
8417 udebug_pause_val
== READ_HREG(DEBUG_REG2
)))
8418 pbi
->ucode_pause_pos
= udebug_pause_pos
;
8419 else if (debug_tag
& 0x20000)
8420 pbi
->ucode_pause_pos
= 0xffffffff;
8421 if (pbi
->ucode_pause_pos
)
8422 reset_process_time(pbi
);
8424 WRITE_HREG(DEBUG_REG1
, 0);
8425 } else if (debug_tag
!= 0) {
8427 "dbg%x: %x lcu %x\n", READ_HREG(DEBUG_REG1
),
8428 READ_HREG(DEBUG_REG2
),
8429 READ_VREG(HEVC_PARSER_LCU_START
));
8430 if ((udebug_pause_pos
== (debug_tag
& 0xffff)) &&
8431 (udebug_pause_decode_idx
== 0 ||
8432 udebug_pause_decode_idx
== pbi
->slice_idx
) &&
8433 (udebug_pause_val
== 0 ||
8434 udebug_pause_val
== READ_HREG(DEBUG_REG2
)))
8435 pbi
->ucode_pause_pos
= udebug_pause_pos
;
8436 if (pbi
->ucode_pause_pos
)
8437 reset_process_time(pbi
);
8439 WRITE_HREG(DEBUG_REG1
, 0);
8440 pbi
->process_busy
= 0;
8444 #ifdef MULTI_INSTANCE_SUPPORT
8445 if (!pbi
->m_ins_flag
) {
8447 if (pbi
->error_flag
== 1) {
8448 pbi
->error_flag
= 2;
8449 pbi
->process_busy
= 0;
8451 } else if (pbi
->error_flag
== 3) {
8452 pbi
->process_busy
= 0;
8456 if (get_free_buf_count(pbi
) <= 0) {
8458 if (pbi->wait_buf == 0)
8459 pr_info("set wait_buf to 1\r\n");
8462 pbi
->process_busy
= 0;
8465 #ifdef MULTI_INSTANCE_SUPPORT
8468 if ((adapt_prob_status
& 0xff) == 0xfd) {
8469 /*VP9_REQ_ADAPT_PROB*/
8470 int pre_fc
= (cm
->frame_type
== KEY_FRAME
) ? 1 : 0;
8471 uint8_t *prev_prob_b
=
8472 ((uint8_t *)pbi
->prob_buffer_addr
) +
8473 ((adapt_prob_status
>> 8) * 0x1000);
8474 uint8_t *cur_prob_b
=
8475 ((uint8_t *)pbi
->prob_buffer_addr
) + 0x4000;
8476 uint8_t *count_b
= (uint8_t *)pbi
->count_buffer_addr
;
8477 #ifdef MULTI_INSTANCE_SUPPORT
8478 if (pbi
->m_ins_flag
)
8479 reset_process_time(pbi
);
8481 adapt_coef_probs(pbi
->pic_count
,
8482 (cm
->last_frame_type
== KEY_FRAME
),
8483 pre_fc
, (adapt_prob_status
>> 8),
8484 (unsigned int *)prev_prob_b
,
8485 (unsigned int *)cur_prob_b
, (unsigned int *)count_b
);
8487 memcpy(prev_prob_b
, cur_prob_b
, PROB_SIZE
);
8488 WRITE_VREG(VP9_ADAPT_PROB_REG
, 0);
8489 pbi
->pic_count
+= 1;
8490 #ifdef MULTI_INSTANCE_SUPPORT
8491 if (pbi
->m_ins_flag
)
8492 start_process_time(pbi
);
8495 /*return IRQ_HANDLED;*/
8497 return IRQ_WAKE_THREAD
;
8500 static void vp9_set_clk(struct work_struct
*work
)
8502 struct VP9Decoder_s
*pbi
= container_of(work
,
8503 struct VP9Decoder_s
, set_clk_work
);
8504 int fps
= 96000 / pbi
->frame_dur
;
8506 if (hevc_source_changed(VFORMAT_VP9
,
8507 frame_width
, frame_height
, fps
) > 0)
8508 pbi
->saved_resolution
= frame_width
*
8512 static void vvp9_put_timer_func(unsigned long arg
)
8514 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)arg
;
8515 struct timer_list
*timer
= &pbi
->timer
;
8517 unsigned int buf_level
;
8519 enum receviver_start_e state
= RECEIVER_INACTIVE
;
8521 if (pbi
->m_ins_flag
) {
8522 if (hw_to_vdec(pbi
)->next_status
8523 == VDEC_STATUS_DISCONNECTED
) {
8524 #ifdef SUPPORT_FB_DECODING
8528 pbi
->dec_s1_result
= DEC_S1_RESULT_FORCE_EXIT
;
8529 vdec_schedule_work(&pbi
->s1_work
);
8531 pbi
->dec_result
= DEC_RESULT_FORCE_EXIT
;
8532 vdec_schedule_work(&pbi
->work
);
8534 "vdec requested to be disconnected\n");
8538 if (pbi
->init_flag
== 0) {
8539 if (pbi
->stat
& STAT_TIMER_ARM
) {
8540 timer
->expires
= jiffies
+ PUT_INTERVAL
;
8541 add_timer(&pbi
->timer
);
8545 if (pbi
->m_ins_flag
== 0) {
8546 if (vf_get_receiver(pbi
->provider_name
)) {
8548 vf_notify_receiver(pbi
->provider_name
,
8549 VFRAME_EVENT_PROVIDER_QUREY_STATE
,
8551 if ((state
== RECEIVER_STATE_NULL
)
8552 || (state
== RECEIVER_STATE_NONE
))
8553 state
= RECEIVER_INACTIVE
;
8555 state
= RECEIVER_INACTIVE
;
8557 empty_flag
= (READ_VREG(HEVC_PARSER_INT_STATUS
) >> 6) & 0x1;
8558 /* error watchdog */
8559 if (empty_flag
== 0) {
8560 /* decoder has input */
8561 if ((debug
& VP9_DEBUG_DIS_LOC_ERROR_PROC
) == 0) {
8563 buf_level
= READ_VREG(HEVC_STREAM_LEVEL
);
8564 /* receiver has no buffer to recycle */
8565 if ((state
== RECEIVER_INACTIVE
) &&
8566 (kfifo_is_empty(&pbi
->display_q
) &&
8570 (HEVC_ASSIST_MBOX0_IRQ_REG
,
8575 if ((debug
& VP9_DEBUG_DIS_SYS_ERROR_PROC
) == 0) {
8576 /* receiver has no buffer to recycle */
8577 /*if ((state == RECEIVER_INACTIVE) &&
8578 * (kfifo_is_empty(&pbi->display_q))) {
8579 *pr_info("vp9 something error,need reset\n");
8585 #ifdef MULTI_INSTANCE_SUPPORT
8588 (decode_timeout_val
> 0) &&
8589 (pbi
->start_process_time
> 0) &&
8590 ((1000 * (jiffies
- pbi
->start_process_time
) / HZ
)
8591 > decode_timeout_val
)
8593 int current_lcu_idx
=
8594 READ_VREG(HEVC_PARSER_LCU_START
)
8596 if (pbi
->last_lcu_idx
== current_lcu_idx
) {
8597 if (pbi
->decode_timeout_count
> 0)
8598 pbi
->decode_timeout_count
--;
8599 if (pbi
->decode_timeout_count
== 0) {
8600 if (input_frame_based(
8602 (READ_VREG(HEVC_STREAM_LEVEL
) > 0x200))
8603 timeout_process(pbi
);
8606 "timeout & empty, again\n");
8607 dec_again_process(pbi
);
8611 start_process_time(pbi
);
8612 pbi
->last_lcu_idx
= current_lcu_idx
;
8618 if ((pbi
->ucode_pause_pos
!= 0) &&
8619 (pbi
->ucode_pause_pos
!= 0xffffffff) &&
8620 udebug_pause_pos
!= pbi
->ucode_pause_pos
) {
8621 pbi
->ucode_pause_pos
= 0;
8622 WRITE_HREG(DEBUG_REG1
, 0);
8624 #ifdef MULTI_INSTANCE_SUPPORT
8625 if (debug
& VP9_DEBUG_FORCE_SEND_AGAIN
) {
8627 "Force Send Again\r\n");
8628 debug
&= ~VP9_DEBUG_FORCE_SEND_AGAIN
;
8629 reset_process_time(pbi
);
8630 pbi
->dec_result
= DEC_RESULT_AGAIN
;
8631 if (pbi
->process_state
==
8632 PROC_STATE_DECODESLICE
) {
8633 if (pbi
->mmu_enable
)
8634 vp9_recycle_mmu_buf(pbi
);
8635 pbi
->process_state
=
8636 PROC_STATE_SENDAGAIN
;
8640 vdec_schedule_work(&pbi
->work
);
8643 if (debug
& VP9_DEBUG_DUMP_DATA
) {
8644 debug
&= ~VP9_DEBUG_DUMP_DATA
;
8646 "%s: chunk size 0x%x off 0x%x sum 0x%x\n",
8650 get_data_check_sum(pbi
, pbi
->chunk
->size
)
8652 dump_data(pbi
, pbi
->chunk
->size
);
8655 if (debug
& VP9_DEBUG_DUMP_PIC_LIST
) {
8657 debug
&= ~VP9_DEBUG_DUMP_PIC_LIST
;
8659 if (debug
& VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC
) {
8660 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG
, 0x1);
8661 debug
&= ~VP9_DEBUG_TRIG_SLICE_SEGMENT_PROC
;
8663 /*if (debug & VP9_DEBUG_HW_RESET) {
8668 WRITE_VREG(radr
, rval
);
8669 pr_info("WRITE_VREG(%x,%x)\n", radr
, rval
);
8671 pr_info("READ_VREG(%x)=%x\n", radr
, READ_VREG(radr
));
8675 if (pop_shorts
!= 0) {
8679 pr_info("pop stream 0x%x shorts\r\n", pop_shorts
);
8680 for (i
= 0; i
< pop_shorts
; i
++) {
8682 (READ_HREG(HEVC_SHIFTED_DATA
) >> 16);
8683 WRITE_HREG(HEVC_SHIFT_COMMAND
,
8686 pr_info("%04x:", i
);
8687 pr_info("%04x ", data
);
8688 if (((i
+ 1) & 0xf) == 0)
8692 pr_info("\r\nsum = %x\r\n", sum
);
8699 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXBB
&&
8700 get_double_write_mode(pbi
) == 0) {
8702 READ_VCBUS_REG(AFBC_BODY_BADDR
) << 4;
8704 struct canvas_s cur_canvas
;
8706 canvas_read((READ_VCBUS_REG(VD1_IF0_CANVAS0
)
8707 & 0xff), &cur_canvas
);
8708 disp_laddr
= cur_canvas
.addr
;
8710 pr_info("current displayed buffer address %x\r\n",
8715 /*don't changed at start.*/
8716 if (pbi
->get_frame_dur
&& pbi
->show_frame_num
> 60 &&
8717 pbi
->frame_dur
> 0 && pbi
->saved_resolution
!=
8718 frame_width
* frame_height
*
8719 (96000 / pbi
->frame_dur
))
8720 vdec_schedule_work(&pbi
->set_clk_work
);
8722 timer
->expires
= jiffies
+ PUT_INTERVAL
;
8727 int vvp9_dec_status(struct vdec_s
*vdec
, struct vdec_info
*vstatus
)
8729 struct VP9Decoder_s
*vp9
=
8730 (struct VP9Decoder_s
*)vdec
->private;
8735 vstatus
->frame_width
= frame_width
;
8736 vstatus
->frame_height
= frame_height
;
8737 if (vp9
->frame_dur
!= 0)
8738 vstatus
->frame_rate
= 96000 / vp9
->frame_dur
;
8740 vstatus
->frame_rate
= -1;
8741 vstatus
->error_count
= 0;
8742 vstatus
->status
= vp9
->stat
| vp9
->fatal_error
;
8743 vstatus
->frame_dur
= vp9
->frame_dur
;
8744 #ifndef CONFIG_AMLOGIC_MEDIA_MULTI_DEC
8745 vstatus
->bit_rate
= gvs
->bit_rate
;
8746 vstatus
->frame_data
= gvs
->frame_data
;
8747 vstatus
->total_data
= gvs
->total_data
;
8748 vstatus
->frame_count
= gvs
->frame_count
;
8749 vstatus
->error_frame_count
= gvs
->error_frame_count
;
8750 vstatus
->drop_frame_count
= gvs
->drop_frame_count
;
8751 vstatus
->total_data
= gvs
->total_data
;
8752 vstatus
->samp_cnt
= gvs
->samp_cnt
;
8753 vstatus
->offset
= gvs
->offset
;
8754 snprintf(vstatus
->vdec_name
, sizeof(vstatus
->vdec_name
),
8760 int vvp9_set_isreset(struct vdec_s
*vdec
, int isreset
)
8767 static void VP9_DECODE_INIT(void)
8769 /* enable vp9 clocks */
8770 WRITE_VREG(DOS_GCLK_EN3
, 0xffffffff);
8771 /* *************************************************************** */
8773 /* *************************************************************** */
8775 WRITE_VREG(AO_RTI_GEN_PWR_SLEEP0
,
8776 READ_VREG(AO_RTI_GEN_PWR_SLEEP0
) & (~(0x3 << 6)));
8777 WRITE_VREG(DOS_MEM_PD_HEVC
, 0x0);
8778 WRITE_VREG(DOS_SW_RESET3
, READ_VREG(DOS_SW_RESET3
) | (0x3ffff << 2));
8779 WRITE_VREG(DOS_SW_RESET3
, READ_VREG(DOS_SW_RESET3
) & (~(0x3ffff << 2)));
8780 /* remove isolations */
8781 WRITE_VREG(AO_RTI_GEN_PWR_ISO0
,
8782 READ_VREG(AO_RTI_GEN_PWR_ISO0
) & (~(0x3 << 10)));
8787 static void vvp9_prot_init(struct VP9Decoder_s
*pbi
, u32 mask
)
8789 unsigned int data32
;
8790 /* VP9_DECODE_INIT(); */
8791 vp9_config_work_space_hw(pbi
, mask
);
8792 if (mask
& HW_MASK_BACK
)
8793 init_pic_list_hw(pbi
);
8795 vp9_init_decoder_hw(pbi
, mask
);
8797 #ifdef VP9_LPF_LVL_UPDATE
8798 if (mask
& HW_MASK_BACK
)
8799 vp9_loop_filter_init(pbi
);
8802 if ((mask
& HW_MASK_FRONT
) == 0)
8805 if (debug
& VP9_DEBUG_BUFMGR_MORE
)
8806 pr_info("%s\n", __func__
);
8807 data32
= READ_VREG(HEVC_STREAM_CONTROL
);
8809 (1 << 0)/*stream_fetch_enable*/
8811 WRITE_VREG(HEVC_STREAM_CONTROL
, data32
);
8813 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
) {
8814 if (debug
& VP9_DEBUG_BUFMGR
)
8815 pr_info("[test.c] Config STREAM_FIFO_CTL\n");
8816 data32
= READ_VREG(HEVC_STREAM_FIFO_CTL
);
8818 (1 << 29) // stream_fifo_hole
8820 WRITE_VREG(HEVC_STREAM_FIFO_CTL
, data32
);
8823 data32
= READ_VREG(HEVC_SHIFT_STARTCODE
);
8824 if (data32
!= 0x00000100) {
8825 pr_info("vp9 prot init error %d\n", __LINE__
);
8828 data32
= READ_VREG(HEVC_SHIFT_EMULATECODE
);
8829 if (data32
!= 0x00000300) {
8830 pr_info("vp9 prot init error %d\n", __LINE__
);
8833 WRITE_VREG(HEVC_SHIFT_STARTCODE
, 0x12345678);
8834 WRITE_VREG(HEVC_SHIFT_EMULATECODE
, 0x9abcdef0);
8835 data32
= READ_VREG(HEVC_SHIFT_STARTCODE
);
8836 if (data32
!= 0x12345678) {
8837 pr_info("vp9 prot init error %d\n", __LINE__
);
8840 data32
= READ_VREG(HEVC_SHIFT_EMULATECODE
);
8841 if (data32
!= 0x9abcdef0) {
8842 pr_info("vp9 prot init error %d\n", __LINE__
);
8846 WRITE_VREG(HEVC_SHIFT_STARTCODE
, 0x000000001);
8847 WRITE_VREG(HEVC_SHIFT_EMULATECODE
, 0x00000300);
8852 WRITE_VREG(HEVC_WAIT_FLAG
, 1);
8854 /* WRITE_VREG(HEVC_MPSR, 1); */
8856 /* clear mailbox interrupt */
8857 WRITE_VREG(HEVC_ASSIST_MBOX0_CLR_REG
, 1);
8859 /* enable mailbox interrupt */
8860 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK
, 1);
8862 /* disable PSCALE for hardware sharing */
8863 WRITE_VREG(HEVC_PSCALE_CTRL
, 0);
8865 WRITE_VREG(DEBUG_REG1
, 0x0);
8866 /*check vps/sps/pps/i-slice in ucode*/
8867 WRITE_VREG(NAL_SEARCH_CTL
, 0x8);
8869 WRITE_VREG(DECODE_STOP_POS
, udebug_flag
);
8870 #ifdef SUPPORT_FB_DECODING
8871 #ifndef FB_DECODING_TEST_SCHEDULE
8872 if (pbi
->used_stage_buf_num
> 0) {
8873 if (mask
& HW_MASK_FRONT
) {
8875 HEVC_ASSIST_HED_FB_W_CTL
);
8877 (1 << 0) /*hed_fb_wr_en*/
8879 WRITE_VREG(HEVC_ASSIST_HED_FB_W_CTL
,
8882 if (mask
& HW_MASK_BACK
) {
8884 HEVC_ASSIST_HED_FB_R_CTL
);
8885 while (data32
& (1 << 7)) {
8888 HEVC_ASSIST_HED_FB_R_CTL
);
8890 data32
&= (~(0x1 << 0));
8891 /*hed_fb_rd_addr_auto_rd*/
8892 data32
&= (~(0x1 << 1));
8893 /*rd_id = 0, hed_rd_map_auto_halt_num,
8894 after wr 2 ready, then start reading*/
8895 data32
|= (0x2 << 16);
8896 WRITE_VREG(HEVC_ASSIST_HED_FB_R_CTL
,
8899 data32
|= (0x1 << 11); /*hed_rd_map_auto_halt_en*/
8900 data32
|= (0x1 << 1); /*hed_fb_rd_addr_auto_rd*/
8901 data32
|= (0x1 << 0); /*hed_fb_rd_en*/
8902 WRITE_VREG(HEVC_ASSIST_HED_FB_R_CTL
,
8911 static int vvp9_local_init(struct VP9Decoder_s
*pbi
)
8916 if (alloc_lf_buf(pbi
) < 0)
8919 pbi
->gvs
= vzalloc(sizeof(struct vdec_info
));
8920 if (NULL
== pbi
->gvs
) {
8921 pr_info("the struct of vdec status malloc failed.\n");
8925 pbi
->pts_missed
= 0;
8928 pbi
->new_frame_displayed
= 0;
8929 pbi
->last_put_idx
= -1;
8930 pbi
->saved_resolution
= 0;
8931 pbi
->get_frame_dur
= false;
8932 on_no_keyframe_skiped
= 0;
8933 pbi
->duration_from_pts_done
= 0;
8934 pbi
->vp9_first_pts_ready
= 0;
8935 pbi
->frame_cnt_window
= 0;
8936 width
= pbi
->vvp9_amstream_dec_info
.width
;
8937 height
= pbi
->vvp9_amstream_dec_info
.height
;
8939 (pbi
->vvp9_amstream_dec_info
.rate
==
8940 0) ? 3200 : pbi
->vvp9_amstream_dec_info
.rate
;
8941 if (width
&& height
)
8942 pbi
->frame_ar
= height
* 0x100 / width
;
8946 pr_info("vp9: ver (%d,%d) decinfo: %dx%d rate=%d\n", vp9_version
,
8947 0, width
, height
, pbi
->frame_dur
);
8949 if (pbi
->frame_dur
== 0)
8950 pbi
->frame_dur
= 96000 / 24;
8952 INIT_KFIFO(pbi
->display_q
);
8953 INIT_KFIFO(pbi
->newframe_q
);
8956 for (i
= 0; i
< VF_POOL_SIZE
; i
++) {
8957 const struct vframe_s
*vf
= &pbi
->vfpool
[i
];
8959 pbi
->vfpool
[i
].index
= -1;
8960 kfifo_put(&pbi
->newframe_q
, vf
);
8964 ret
= vp9_local_init(pbi
);
8966 if (!pbi
->pts_unstable
) {
8968 (pbi
->vvp9_amstream_dec_info
.rate
== 0)?1:0;
8969 pr_info("set pts unstable\n");
8976 #ifdef MULTI_INSTANCE_SUPPORT
8977 static s32
vvp9_init(struct vdec_s
*vdec
)
8979 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)vdec
->private;
8981 static s32
vvp9_init(struct VP9Decoder_s
*pbi
)
8985 int fw_size
= 0x1000 * 16;
8986 struct firmware_s
*fw
= NULL
;
8988 pbi
->stat
|= STAT_TIMER_INIT
;
8990 if (vvp9_local_init(pbi
) < 0)
8993 fw
= vmalloc(sizeof(struct firmware_s
) + fw_size
);
8994 if (IS_ERR_OR_NULL(fw
))
8997 if (get_firmware_data(VIDEO_DEC_VP9_MMU
, fw
->data
) < 0) {
8998 pr_err("get firmware fail.\n");
9005 INIT_WORK(&pbi
->set_clk_work
, vp9_set_clk
);
9006 init_timer(&pbi
->timer
);
9008 #ifdef MULTI_INSTANCE_SUPPORT
9009 if (pbi
->m_ins_flag
) {
9010 pbi
->timer
.data
= (ulong
) pbi
;
9011 pbi
->timer
.function
= vvp9_put_timer_func
;
9012 pbi
->timer
.expires
= jiffies
+ PUT_INTERVAL
;
9014 /*add_timer(&pbi->timer);
9016 pbi->stat |= STAT_TIMER_ARM;
9017 pbi->stat |= STAT_ISR_REG;*/
9019 INIT_WORK(&pbi
->work
, vp9_work
);
9020 #ifdef SUPPORT_FB_DECODING
9021 if (pbi
->used_stage_buf_num
> 0)
9022 INIT_WORK(&pbi
->s1_work
, vp9_s1_work
);
9026 /* picture list init.*/
9027 pbi
->dec_result
= DEC_INIT_PICLIST
;
9028 vdec_schedule_work(&pbi
->work
);
9037 ret
= amhevc_loadmc_ex(VFORMAT_VP9
, NULL
, fw
->data
);
9041 pr_err("VP9: the %s fw loading failed, err: %x\n",
9042 tee_enabled() ? "TEE" : "local", ret
);
9048 pbi
->stat
|= STAT_MC_LOAD
;
9050 /* enable AMRISC side protocol */
9051 vvp9_prot_init(pbi
, HW_MASK_FRONT
| HW_MASK_BACK
);
9053 if (vdec_request_threaded_irq(VDEC_IRQ_0
,
9056 IRQF_ONESHOT
,/*run thread on this irq disabled*/
9057 "vvp9-irq", (void *)pbi
)) {
9058 pr_info("vvp9 irq register error.\n");
9063 pbi
->stat
|= STAT_ISR_REG
;
9065 pbi
->provider_name
= PROVIDER_NAME
;
9066 #ifdef MULTI_INSTANCE_SUPPORT
9067 vf_provider_init(&vvp9_vf_prov
, PROVIDER_NAME
,
9068 &vvp9_vf_provider
, pbi
);
9069 vf_reg_provider(&vvp9_vf_prov
);
9070 vf_notify_receiver(PROVIDER_NAME
, VFRAME_EVENT_PROVIDER_START
, NULL
);
9071 if (pbi
->frame_dur
!= 0) {
9073 vf_notify_receiver(pbi
->provider_name
,
9074 VFRAME_EVENT_PROVIDER_FR_HINT
,
9076 ((unsigned long)pbi
->frame_dur
));
9079 vf_provider_init(&vvp9_vf_prov
, PROVIDER_NAME
, &vvp9_vf_provider
,
9081 vf_reg_provider(&vvp9_vf_prov
);
9082 vf_notify_receiver(PROVIDER_NAME
, VFRAME_EVENT_PROVIDER_START
, NULL
);
9084 vf_notify_receiver(PROVIDER_NAME
, VFRAME_EVENT_PROVIDER_FR_HINT
,
9085 (void *)((unsigned long)pbi
->frame_dur
));
9087 pbi
->stat
|= STAT_VF_HOOK
;
9089 pbi
->timer
.data
= (ulong
)pbi
;
9090 pbi
->timer
.function
= vvp9_put_timer_func
;
9091 pbi
->timer
.expires
= jiffies
+ PUT_INTERVAL
;
9093 pbi
->stat
|= STAT_VDEC_RUN
;
9095 add_timer(&pbi
->timer
);
9097 pbi
->stat
|= STAT_TIMER_ARM
;
9102 pbi
->process_busy
= 0;
9103 pr_info("%d, vvp9_init, RP=0x%x\n",
9104 __LINE__
, READ_VREG(HEVC_STREAM_RD_PTR
));
9108 static int vmvp9_stop(struct VP9Decoder_s
*pbi
)
9112 if (pbi
->stat
& STAT_VDEC_RUN
) {
9114 pbi
->stat
&= ~STAT_VDEC_RUN
;
9116 if (pbi
->stat
& STAT_ISR_REG
) {
9117 vdec_free_irq(VDEC_IRQ_0
, (void *)pbi
);
9118 pbi
->stat
&= ~STAT_ISR_REG
;
9120 if (pbi
->stat
& STAT_TIMER_ARM
) {
9121 del_timer_sync(&pbi
->timer
);
9122 pbi
->stat
&= ~STAT_TIMER_ARM
;
9125 if (pbi
->stat
& STAT_VF_HOOK
) {
9127 vf_notify_receiver(pbi
->provider_name
,
9128 VFRAME_EVENT_PROVIDER_FR_END_HINT
,
9131 vf_unreg_provider(&vvp9_vf_prov
);
9132 pbi
->stat
&= ~STAT_VF_HOOK
;
9134 vp9_local_uninit(pbi
);
9135 reset_process_time(pbi
);
9136 cancel_work_sync(&pbi
->work
);
9137 #ifdef SUPPORT_FB_DECODING
9138 if (pbi
->used_stage_buf_num
> 0)
9139 cancel_work_sync(&pbi
->s1_work
);
9141 cancel_work_sync(&pbi
->set_clk_work
);
9142 uninit_mmu_buffers(pbi
);
9149 static int vvp9_stop(struct VP9Decoder_s
*pbi
)
9153 pbi
->first_sc_checked
= 0;
9154 if (pbi
->stat
& STAT_VDEC_RUN
) {
9156 pbi
->stat
&= ~STAT_VDEC_RUN
;
9159 if (pbi
->stat
& STAT_ISR_REG
) {
9160 #ifdef MULTI_INSTANCE_SUPPORT
9161 if (!pbi
->m_ins_flag
)
9163 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK
, 0);
9164 vdec_free_irq(VDEC_IRQ_0
, (void *)pbi
);
9165 pbi
->stat
&= ~STAT_ISR_REG
;
9168 if (pbi
->stat
& STAT_TIMER_ARM
) {
9169 del_timer_sync(&pbi
->timer
);
9170 pbi
->stat
&= ~STAT_TIMER_ARM
;
9173 if (pbi
->stat
& STAT_VF_HOOK
) {
9175 vf_notify_receiver(pbi
->provider_name
,
9176 VFRAME_EVENT_PROVIDER_FR_END_HINT
,
9179 vf_unreg_provider(&vvp9_vf_prov
);
9180 pbi
->stat
&= ~STAT_VF_HOOK
;
9182 vp9_local_uninit(pbi
);
9184 cancel_work_sync(&pbi
->set_clk_work
);
9185 #ifdef MULTI_INSTANCE_SUPPORT
9186 if (pbi
->m_ins_flag
) {
9187 #ifdef SUPPORT_FB_DECODING
9188 if (pbi
->used_stage_buf_num
> 0)
9189 cancel_work_sync(&pbi
->s1_work
);
9191 cancel_work_sync(&pbi
->work
);
9197 uninit_mmu_buffers(pbi
);
9203 static int amvdec_vp9_mmu_init(struct VP9Decoder_s
*pbi
)
9205 int tvp_flag
= vdec_secure(hw_to_vdec(pbi
)) ?
9206 CODEC_MM_FLAGS_TVP
: 0;
9209 if ((pbi
->max_pic_w
* pbi
->max_pic_h
> 1280*736) &&
9210 (pbi
->max_pic_w
* pbi
->max_pic_h
<= 1920*1088)) {
9212 } else if ((pbi
->max_pic_w
* pbi
->max_pic_h
> 0) &&
9213 (pbi
->max_pic_w
* pbi
->max_pic_h
<= 1280*736)) {
9216 pbi
->need_cache_size
= buf_size
* SZ_1M
;
9217 pbi
->sc_start_time
= get_jiffies_64();
9218 if (pbi
->mmu_enable
&& ((pbi
->double_write_mode
& 0x10) == 0)) {
9219 pbi
->mmu_box
= decoder_mmu_box_alloc_box(DRIVER_NAME
,
9220 pbi
->index
, FRAME_BUFFERS
,
9221 pbi
->need_cache_size
,
9224 if (!pbi
->mmu_box
) {
9225 pr_err("vp9 alloc mmu box failed!!\n");
9229 pbi
->bmmu_box
= decoder_bmmu_box_alloc_box(
9232 MAX_BMMU_BUFFER_NUM
,
9234 CODEC_MM_FLAGS_CMA_CLEAR
|
9235 CODEC_MM_FLAGS_FOR_VDECODER
|
9237 if (!pbi
->bmmu_box
) {
9238 pr_err("vp9 alloc bmmu box failed!!\n");
9244 static struct VP9Decoder_s
*gHevc
;
9246 static int amvdec_vp9_probe(struct platform_device
*pdev
)
9248 struct vdec_s
*pdata
= *(struct vdec_s
**)pdev
->dev
.platform_data
;
9249 struct BUF_s BUF
[MAX_BUF_NUM
];
9250 struct VP9Decoder_s
*pbi
;
9252 #ifndef MULTI_INSTANCE_SUPPORT
9255 pr_debug("%s\n", __func__
);
9257 mutex_lock(&vvp9_mutex
);
9258 pbi
= vmalloc(sizeof(struct VP9Decoder_s
));
9260 pr_info("\namvdec_vp9 device data allocation failed\n");
9261 mutex_unlock(&vvp9_mutex
);
9266 memcpy(&BUF
[0], &pbi
->m_BUF
[0], sizeof(struct BUF_s
) * MAX_BUF_NUM
);
9267 memset(pbi
, 0, sizeof(struct VP9Decoder_s
));
9268 memcpy(&pbi
->m_BUF
[0], &BUF
[0], sizeof(struct BUF_s
) * MAX_BUF_NUM
);
9271 pbi
->first_sc_checked
= 0;
9272 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
9273 vp9_max_pic_w
= 8192;
9274 vp9_max_pic_h
= 4608;
9276 pbi
->max_pic_w
= vp9_max_pic_w
;
9277 pbi
->max_pic_h
= vp9_max_pic_h
;
9279 #ifdef MULTI_INSTANCE_SUPPORT
9281 pbi
->start_process_time
= 0;
9282 pbi
->timeout_num
= 0;
9284 pbi
->fatal_error
= 0;
9285 pbi
->show_frame_num
= 0;
9286 if (pdata
== NULL
) {
9287 pr_info("\namvdec_vp9 memory resource undefined.\n");
9289 mutex_unlock(&vvp9_mutex
);
9292 pbi
->m_ins_flag
= 0;
9293 #ifdef MULTI_INSTANCE_SUPPORT
9294 pbi
->platform_dev
= pdev
;
9295 platform_set_drvdata(pdev
, pdata
);
9297 pbi
->double_write_mode
= double_write_mode
;
9298 pbi
->mmu_enable
= 1;
9299 if (amvdec_vp9_mmu_init(pbi
) < 0) {
9301 mutex_unlock(&vvp9_mutex
);
9302 pr_err("vp9 alloc bmmu box failed!!\n");
9306 ret
= decoder_bmmu_box_alloc_buf_phy(pbi
->bmmu_box
, WORK_SPACE_BUF_ID
,
9307 work_buf_size
, DRIVER_NAME
, &pdata
->mem_start
);
9309 uninit_mmu_buffers(pbi
);
9311 mutex_unlock(&vvp9_mutex
);
9314 pbi
->buf_size
= work_buf_size
;
9316 #ifdef MULTI_INSTANCE_SUPPORT
9317 pbi
->buf_start
= pdata
->mem_start
;
9319 if (!pbi
->mmu_enable
)
9320 pbi
->mc_buf_spec
.buf_end
= pdata
->mem_start
+ pbi
->buf_size
;
9322 for (i
= 0; i
< WORK_BUF_SPEC_NUM
; i
++)
9323 amvvp9_workbuff_spec
[i
].start_adr
= pdata
->mem_start
;
9328 pr_info("===VP9 decoder mem resource 0x%lx size 0x%x\n",
9329 pdata
->mem_start
, pbi
->buf_size
);
9332 if (pdata
->sys_info
)
9333 pbi
->vvp9_amstream_dec_info
= *pdata
->sys_info
;
9335 pbi
->vvp9_amstream_dec_info
.width
= 0;
9336 pbi
->vvp9_amstream_dec_info
.height
= 0;
9337 pbi
->vvp9_amstream_dec_info
.rate
= 30;
9339 pbi
->no_head
= no_head
;
9340 #ifdef MULTI_INSTANCE_SUPPORT
9341 pbi
->cma_dev
= pdata
->cma_dev
;
9343 cma_dev
= pdata
->cma_dev
;
9346 #ifdef MULTI_INSTANCE_SUPPORT
9347 pdata
->private = pbi
;
9348 pdata
->dec_status
= vvp9_dec_status
;
9349 pdata
->set_isreset
= vvp9_set_isreset
;
9351 if (vvp9_init(pdata
) < 0) {
9353 if (vvp9_init(pbi
) < 0) {
9355 pr_info("\namvdec_vp9 init failed.\n");
9356 vp9_local_uninit(pbi
);
9357 uninit_mmu_buffers(pbi
);
9359 pdata
->dec_status
= NULL
;
9360 mutex_unlock(&vvp9_mutex
);
9363 /*set the max clk for smooth playing...*/
9364 hevc_source_changed(VFORMAT_VP9
,
9366 mutex_unlock(&vvp9_mutex
);
9371 static int amvdec_vp9_remove(struct platform_device
*pdev
)
9373 struct VP9Decoder_s
*pbi
= gHevc
;
9374 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
9378 pr_info("amvdec_vp9_remove\n");
9380 mutex_lock(&vvp9_mutex
);
9384 hevc_source_changed(VFORMAT_VP9
, 0, 0, 0);
9386 if (vdec
->parallel_dec
== 1) {
9387 for (i
= 0; i
< FRAME_BUFFERS
; i
++) {
9388 vdec
->free_canvas_ex(pbi
->common
.buffer_pool
->
9389 frame_bufs
[i
].buf
.y_canvas_index
, vdec
->id
);
9390 vdec
->free_canvas_ex(pbi
->common
.buffer_pool
->
9391 frame_bufs
[i
].buf
.uv_canvas_index
, vdec
->id
);
9396 pr_info("pts missed %ld, pts hit %ld, duration %d\n",
9397 pbi
->pts_missed
, pbi
->pts_hit
, pbi
->frame_dur
);
9402 mutex_unlock(&vvp9_mutex
);
9407 /****************************************/
9409 static int vp9_suspend(struct device
*dev
)
9411 amhevc_suspend(to_platform_device(dev
), dev
->power
.power_state
);
9415 static int vp9_resume(struct device
*dev
)
9417 amhevc_resume(to_platform_device(dev
));
9421 static const struct dev_pm_ops vp9_pm_ops
= {
9422 SET_SYSTEM_SLEEP_PM_OPS(vp9_suspend
, vp9_resume
)
9426 static struct platform_driver amvdec_vp9_driver
= {
9427 .probe
= amvdec_vp9_probe
,
9428 .remove
= amvdec_vp9_remove
,
9430 .name
= DRIVER_NAME
,
9437 static struct codec_profile_t amvdec_vp9_profile
= {
9442 static struct codec_profile_t amvdec_vp9_profile_mult
;
9444 static unsigned char get_data_check_sum
9445 (struct VP9Decoder_s
*pbi
, int size
)
9451 if (!pbi
->chunk
->block
->is_mapped
)
9452 data
= codec_mm_vmap(pbi
->chunk
->block
->start
+
9453 pbi
->chunk
->offset
, size
);
9455 data
= ((u8
*)pbi
->chunk
->block
->start_virt
) +
9458 for (jj
= 0; jj
< size
; jj
++)
9461 if (!pbi
->chunk
->block
->is_mapped
)
9462 codec_mm_unmap_phyaddr(data
);
9466 static void dump_data(struct VP9Decoder_s
*pbi
, int size
)
9470 int padding_size
= pbi
->chunk
->offset
&
9471 (VDEC_FIFO_ALIGN
- 1);
9473 if (!pbi
->chunk
->block
->is_mapped
)
9474 data
= codec_mm_vmap(pbi
->chunk
->block
->start
+
9475 pbi
->chunk
->offset
, size
);
9477 data
= ((u8
*)pbi
->chunk
->block
->start_virt
) +
9480 vp9_print(pbi
, 0, "padding: ");
9481 for (jj
= padding_size
; jj
> 0; jj
--)
9484 "%02x ", *(data
- jj
));
9485 vp9_print_cont(pbi
, 0, "data adr %p\n",
9488 for (jj
= 0; jj
< size
; jj
++) {
9489 if ((jj
& 0xf) == 0)
9496 if (((jj
+ 1) & 0xf) == 0)
9505 if (!pbi
->chunk
->block
->is_mapped
)
9506 codec_mm_unmap_phyaddr(data
);
9509 static void vp9_work(struct work_struct
*work
)
9511 struct VP9Decoder_s
*pbi
= container_of(work
,
9512 struct VP9Decoder_s
, work
);
9513 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
9514 /* finished decoding one frame or error,
9515 * notify vdec core to switch context
9517 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
9518 "%s dec_result %d %x %x %x\n",
9521 READ_VREG(HEVC_STREAM_LEVEL
),
9522 READ_VREG(HEVC_STREAM_WR_PTR
),
9523 READ_VREG(HEVC_STREAM_RD_PTR
));
9525 if (pbi
->dec_result
== DEC_INIT_PICLIST
) {
9527 pbi
->pic_list_init_done
= true;
9531 if (pbi
->dec_result
== DEC_V4L2_CONTINUE_DECODING
) {
9532 struct aml_vcodec_ctx
*ctx
=
9533 (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
9535 if (ctx
->param_sets_from_ucode
) {
9536 reset_process_time(pbi
);
9537 if (wait_event_interruptible_timeout(ctx
->wq
,
9538 ctx
->v4l_codec_ready
,
9539 msecs_to_jiffies(500)) < 0)
9543 continue_decoding(pbi
);
9544 pbi
->postproc_done
= 0;
9545 pbi
->process_busy
= 0;
9550 if (((pbi
->dec_result
== DEC_RESULT_GET_DATA
) ||
9551 (pbi
->dec_result
== DEC_RESULT_GET_DATA_RETRY
))
9552 && (hw_to_vdec(pbi
)->next_status
!=
9553 VDEC_STATUS_DISCONNECTED
)) {
9554 if (!vdec_has_more_input(vdec
)) {
9555 pbi
->dec_result
= DEC_RESULT_EOS
;
9556 vdec_schedule_work(&pbi
->work
);
9560 if (pbi
->dec_result
== DEC_RESULT_GET_DATA
) {
9561 vp9_print(pbi
, PRINT_FLAG_VDEC_STATUS
,
9562 "%s DEC_RESULT_GET_DATA %x %x %x\n",
9564 READ_VREG(HEVC_STREAM_LEVEL
),
9565 READ_VREG(HEVC_STREAM_WR_PTR
),
9566 READ_VREG(HEVC_STREAM_RD_PTR
));
9567 vdec_vframe_dirty(vdec
, pbi
->chunk
);
9568 vdec_clean_input(vdec
);
9571 if (get_free_buf_count(pbi
) >=
9572 run_ready_min_buf_num
) {
9575 r
= vdec_prepare_input(vdec
, &pbi
->chunk
);
9577 pbi
->dec_result
= DEC_RESULT_GET_DATA_RETRY
;
9580 PRINT_FLAG_VDEC_DETAIL
,
9581 "amvdec_vh265: Insufficient data\n");
9583 vdec_schedule_work(&pbi
->work
);
9586 pbi
->dec_result
= DEC_RESULT_NONE
;
9587 vp9_print(pbi
, PRINT_FLAG_VDEC_STATUS
,
9588 "%s: chunk size 0x%x sum 0x%x\n",
9590 (debug
& PRINT_FLAG_VDEC_STATUS
) ?
9591 get_data_check_sum(pbi
, r
) : 0
9594 if (debug
& PRINT_FLAG_VDEC_DATA
)
9595 dump_data(pbi
, pbi
->chunk
->size
);
9597 decode_size
= pbi
->chunk
->size
+
9598 (pbi
->chunk
->offset
& (VDEC_FIFO_ALIGN
- 1));
9600 WRITE_VREG(HEVC_DECODE_SIZE
,
9601 READ_VREG(HEVC_DECODE_SIZE
) + decode_size
);
9603 vdec_enable_input(vdec
);
9605 WRITE_VREG(HEVC_DEC_STATUS_REG
, HEVC_ACTION_DONE
);
9607 start_process_time(pbi
);
9610 pbi
->dec_result
= DEC_RESULT_GET_DATA_RETRY
;
9612 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
9613 "amvdec_vh265: Insufficient data\n");
9615 vdec_schedule_work(&pbi
->work
);
9618 } else if (pbi
->dec_result
== DEC_RESULT_DONE
) {
9619 #ifdef SUPPORT_FB_DECODING
9620 if (pbi
->used_stage_buf_num
> 0) {
9621 #ifndef FB_DECODING_TEST_SCHEDULE
9622 if (!is_s2_decoding_finished(pbi
)) {
9623 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
9624 "s2 decoding not done, check again later\n");
9625 vdec_schedule_work(&pbi
->work
);
9629 if (mcrcc_cache_alg_flag
)
9633 /* if (!pbi->ctx_valid)
9634 pbi->ctx_valid = 1; */
9637 pbi
->process_state
= PROC_STATE_INIT
;
9638 decode_frame_count
[pbi
->index
] = pbi
->frame_count
;
9640 if (pbi
->mmu_enable
)
9642 (READ_VREG(HEVC_SAO_MMU_STATUS
) >> 16);
9643 vp9_print(pbi
, PRINT_FLAG_VDEC_STATUS
,
9644 "%s (===> %d) dec_result %d %x %x %x shiftbytes 0x%x decbytes 0x%x\n",
9648 READ_VREG(HEVC_STREAM_LEVEL
),
9649 READ_VREG(HEVC_STREAM_WR_PTR
),
9650 READ_VREG(HEVC_STREAM_RD_PTR
),
9651 READ_VREG(HEVC_SHIFT_BYTE_COUNT
),
9652 READ_VREG(HEVC_SHIFT_BYTE_COUNT
) -
9653 pbi
->start_shift_bytes
9655 vdec_vframe_dirty(hw_to_vdec(pbi
), pbi
->chunk
);
9656 } else if (pbi
->dec_result
== DEC_RESULT_AGAIN
) {
9658 stream base: stream buf empty or timeout
9659 frame base: vdec_prepare_input fail
9661 if (!vdec_has_more_input(vdec
)) {
9662 pbi
->dec_result
= DEC_RESULT_EOS
;
9663 vdec_schedule_work(&pbi
->work
);
9666 } else if (pbi
->dec_result
== DEC_RESULT_EOS
) {
9667 vp9_print(pbi
, PRINT_FLAG_VDEC_STATUS
,
9668 "%s: end of stream\n",
9671 vp9_bufmgr_postproc(pbi
);
9673 if (pbi
->is_used_v4l
)
9674 notify_v4l_eos(hw_to_vdec(pbi
));
9676 vdec_vframe_dirty(hw_to_vdec(pbi
), pbi
->chunk
);
9677 } else if (pbi
->dec_result
== DEC_RESULT_FORCE_EXIT
) {
9678 vp9_print(pbi
, PRINT_FLAG_VDEC_STATUS
,
9681 if (pbi
->stat
& STAT_VDEC_RUN
) {
9683 pbi
->stat
&= ~STAT_VDEC_RUN
;
9686 if (pbi
->stat
& STAT_ISR_REG
) {
9687 #ifdef MULTI_INSTANCE_SUPPORT
9688 if (!pbi
->m_ins_flag
)
9690 WRITE_VREG(HEVC_ASSIST_MBOX0_MASK
, 0);
9691 vdec_free_irq(VDEC_IRQ_0
, (void *)pbi
);
9692 pbi
->stat
&= ~STAT_ISR_REG
;
9695 if (pbi
->stat
& STAT_VDEC_RUN
) {
9697 pbi
->stat
&= ~STAT_VDEC_RUN
;
9700 if (pbi
->stat
& STAT_TIMER_ARM
) {
9701 del_timer_sync(&pbi
->timer
);
9702 pbi
->stat
&= ~STAT_TIMER_ARM
;
9704 /* mark itself has all HW resource released and input released */
9705 #ifdef SUPPORT_FB_DECODING
9706 if (pbi
->used_stage_buf_num
> 0)
9707 vdec_core_finish_run(hw_to_vdec(pbi
), CORE_MASK_HEVC_BACK
);
9709 vdec_core_finish_run(hw_to_vdec(pbi
), CORE_MASK_VDEC_1
9711 | CORE_MASK_HEVC_FRONT
9712 | CORE_MASK_HEVC_BACK
9715 if (vdec
->parallel_dec
== 1)
9716 vdec_core_finish_run(vdec
, CORE_MASK_HEVC
);
9718 vdec_core_finish_run(hw_to_vdec(pbi
), CORE_MASK_VDEC_1
9721 trigger_schedule(pbi
);
9724 static int vp9_hw_ctx_restore(struct VP9Decoder_s
*pbi
)
9727 #if (!defined SUPPORT_FB_DECODING)
9728 vvp9_prot_init(pbi
, HW_MASK_FRONT
| HW_MASK_BACK
);
9729 #elif (defined FB_DECODING_TEST_SCHEDULE)
9730 vvp9_prot_init(pbi
, HW_MASK_FRONT
| HW_MASK_BACK
);
9732 if (pbi
->used_stage_buf_num
> 0)
9733 vvp9_prot_init(pbi
, HW_MASK_FRONT
);
9735 vvp9_prot_init(pbi
, HW_MASK_FRONT
| HW_MASK_BACK
);
9739 static unsigned long run_ready(struct vdec_s
*vdec
, unsigned long mask
)
9741 struct VP9Decoder_s
*pbi
=
9742 (struct VP9Decoder_s
*)vdec
->private;
9743 int tvp
= vdec_secure(hw_to_vdec(pbi
)) ?
9744 CODEC_MM_FLAGS_TVP
: 0;
9745 unsigned long ret
= 0;
9747 if (!(pbi
->pic_list_init_done
&& pbi
->pic_list_init_done2
) || pbi
->eos
)
9749 if (!pbi
->first_sc_checked
&& pbi
->mmu_enable
) {
9750 int size
= decoder_mmu_box_sc_check(pbi
->mmu_box
, tvp
);
9751 pbi
->first_sc_checked
= 1;
9752 vp9_print(pbi
, 0, "vp9 cached=%d need_size=%d speed= %d ms\n",
9753 size
, (pbi
->need_cache_size
>> PAGE_SHIFT
),
9754 (int)(get_jiffies_64() - pbi
->sc_start_time
) * 1000/HZ
);
9757 #ifdef SUPPORT_FB_DECODING
9758 if (pbi
->used_stage_buf_num
> 0) {
9759 if (mask
& CORE_MASK_HEVC_FRONT
) {
9760 if (get_free_stage_buf_num(pbi
) > 0
9761 && mv_buf_available(pbi
))
9762 ret
|= CORE_MASK_HEVC_FRONT
;
9764 if (mask
& CORE_MASK_HEVC_BACK
) {
9765 if (s2_buf_available(pbi
) &&
9766 (get_free_buf_count(pbi
) >=
9767 run_ready_min_buf_num
)) {
9768 ret
|= CORE_MASK_HEVC_BACK
;
9769 pbi
->back_not_run_ready
= 0;
9771 pbi
->back_not_run_ready
= 1;
9773 if (get_free_buf_count(pbi
) <
9774 run_ready_min_buf_num
)
9778 } else if (get_free_buf_count(pbi
) >=
9779 run_ready_min_buf_num
)
9780 ret
= CORE_MASK_VDEC_1
| CORE_MASK_HEVC
9781 | CORE_MASK_HEVC_FRONT
9782 | CORE_MASK_HEVC_BACK
;
9784 if (ret
& CORE_MASK_HEVC_FRONT
)
9785 not_run_ready
[pbi
->index
] = 0;
9787 not_run_ready
[pbi
->index
]++;
9789 if (ret
& CORE_MASK_HEVC_BACK
)
9790 not_run2_ready
[pbi
->index
] = 0;
9792 not_run2_ready
[pbi
->index
]++;
9795 PRINT_FLAG_VDEC_DETAIL
, "%s mask %lx=>%lx (%d %d %d %d)\r\n",
9796 __func__
, mask
, ret
,
9797 get_free_stage_buf_num(pbi
),
9798 mv_buf_available(pbi
),
9799 s2_buf_available(pbi
),
9800 get_free_buf_count(pbi
)
9806 if (get_free_buf_count(pbi
) >=
9807 run_ready_min_buf_num
) {
9808 if (vdec
->parallel_dec
== 1)
9809 ret
= CORE_MASK_HEVC
;
9811 ret
= CORE_MASK_VDEC_1
| CORE_MASK_HEVC
;
9814 if (pbi
->is_used_v4l
) {
9815 struct aml_vcodec_ctx
*ctx
=
9816 (struct aml_vcodec_ctx
*)(pbi
->v4l2_ctx
);
9818 if (ctx
->param_sets_from_ucode
&&
9819 !ctx
->v4l_codec_ready
&&
9820 pbi
->v4l_params_parsed
) {
9821 ret
= 0; /*the params has parsed.*/
9822 } else if (!ctx
->v4l_codec_dpb_ready
) {
9823 if (v4l2_m2m_num_dst_bufs_ready(ctx
->m2m_ctx
) <
9824 run_ready_min_buf_num
)
9830 not_run_ready
[pbi
->index
] = 0;
9832 not_run_ready
[pbi
->index
]++;
9835 PRINT_FLAG_VDEC_DETAIL
, "%s mask %lx=>%lx\r\n",
9836 __func__
, mask
, ret
);
9841 static void run_front(struct vdec_s
*vdec
)
9843 struct VP9Decoder_s
*pbi
=
9844 (struct VP9Decoder_s
*)vdec
->private;
9847 run_count
[pbi
->index
]++;
9848 /* pbi->chunk = vdec_prepare_input(vdec); */
9849 #if (!defined SUPPORT_FB_DECODING)
9850 hevc_reset_core(vdec
);
9851 #elif (defined FB_DECODING_TEST_SCHEDULE)
9852 hevc_reset_core(vdec
);
9854 if (pbi
->used_stage_buf_num
> 0)
9855 fb_reset_core(vdec
, HW_MASK_FRONT
);
9857 hevc_reset_core(vdec
);
9860 size
= vdec_prepare_input(vdec
, &pbi
->chunk
);
9862 input_empty
[pbi
->index
]++;
9864 pbi
->dec_result
= DEC_RESULT_AGAIN
;
9866 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
9867 "ammvdec_vh265: Insufficient data\n");
9869 vdec_schedule_work(&pbi
->work
);
9873 input_empty
[pbi
->index
] = 0;
9874 pbi
->dec_result
= DEC_RESULT_NONE
;
9875 pbi
->start_shift_bytes
= READ_VREG(HEVC_SHIFT_BYTE_COUNT
);
9877 if (debug
& PRINT_FLAG_VDEC_STATUS
) {
9880 "%s (%d): size 0x%x (0x%x 0x%x) sum 0x%x (%x %x %x %x %x) bytes 0x%x",
9882 pbi
->frame_count
, size
,
9883 pbi
->chunk
? pbi
->chunk
->size
: 0,
9884 pbi
->chunk
? pbi
->chunk
->offset
: 0,
9885 pbi
->chunk
? ((vdec_frame_based(vdec
) &&
9886 (debug
& PRINT_FLAG_VDEC_STATUS
)) ?
9887 get_data_check_sum(pbi
, size
) : 0) : 0,
9888 READ_VREG(HEVC_STREAM_START_ADDR
),
9889 READ_VREG(HEVC_STREAM_END_ADDR
),
9890 READ_VREG(HEVC_STREAM_LEVEL
),
9891 READ_VREG(HEVC_STREAM_WR_PTR
),
9892 READ_VREG(HEVC_STREAM_RD_PTR
),
9893 pbi
->start_shift_bytes
);
9894 if (vdec_frame_based(vdec
) && pbi
->chunk
) {
9897 if (!pbi
->chunk
->block
->is_mapped
)
9898 data
= codec_mm_vmap(pbi
->chunk
->block
->start
+
9899 pbi
->chunk
->offset
, 8);
9901 data
= ((u8
*)pbi
->chunk
->block
->start_virt
) +
9904 vp9_print_cont(pbi
, 0, "data adr %p:",
9906 for (ii
= 0; ii
< 8; ii
++)
9907 vp9_print_cont(pbi
, 0, "%02x ",
9910 if (!pbi
->chunk
->block
->is_mapped
)
9911 codec_mm_unmap_phyaddr(data
);
9913 vp9_print_cont(pbi
, 0, "\r\n");
9915 if (vdec
->mc_loaded
) {
9916 /*firmware have load before,
9917 and not changes to another.
9921 ret
= amhevc_loadmc_ex(VFORMAT_VP9
, NULL
, pbi
->fw
->data
);
9924 vp9_print(pbi
, PRINT_FLAG_ERROR
,
9925 "VP9: the %s fw loading failed, err: %x\n",
9926 tee_enabled() ? "TEE" : "local", ret
);
9927 pbi
->dec_result
= DEC_RESULT_FORCE_EXIT
;
9928 vdec_schedule_work(&pbi
->work
);
9931 vdec
->mc_loaded
= 1;
9932 vdec
->mc_type
= VFORMAT_VP9
;
9935 if (vp9_hw_ctx_restore(pbi
) < 0) {
9936 vdec_schedule_work(&pbi
->work
);
9940 vdec_enable_input(vdec
);
9942 WRITE_VREG(HEVC_DEC_STATUS_REG
, HEVC_ACTION_DONE
);
9944 if (vdec_frame_based(vdec
)) {
9945 if (debug
& PRINT_FLAG_VDEC_DATA
)
9946 dump_data(pbi
, pbi
->chunk
->size
);
9948 WRITE_VREG(HEVC_SHIFT_BYTE_COUNT
, 0);
9949 size
= pbi
->chunk
->size
+
9950 (pbi
->chunk
->offset
& (VDEC_FIFO_ALIGN
- 1));
9952 WRITE_VREG(HEVC_DECODE_SIZE
, size
);
9953 WRITE_VREG(HEVC_DECODE_COUNT
, pbi
->slice_idx
);
9956 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
9957 "%s: start hevc (%x %x %x)\n",
9959 READ_VREG(HEVC_DEC_STATUS_REG
),
9960 READ_VREG(HEVC_MPC_E
),
9961 READ_VREG(HEVC_MPSR
));
9963 start_process_time(pbi
);
9964 mod_timer(&pbi
->timer
, jiffies
);
9965 pbi
->stat
|= STAT_TIMER_ARM
;
9966 pbi
->stat
|= STAT_ISR_REG
;
9968 pbi
->stat
|= STAT_VDEC_RUN
;
9971 #ifdef SUPPORT_FB_DECODING
9972 static void mpred_process(struct VP9Decoder_s
*pbi
)
9974 union param_u
*params
= &pbi
->s1_param
;
9975 unsigned char use_prev_frame_mvs
=
9976 !params
->p
.error_resilient_mode
&&
9977 params
->p
.width
== pbi
->s1_width
&&
9978 params
->p
.height
== pbi
->s1_height
&&
9979 !pbi
->s1_intra_only
&&
9980 pbi
->s1_last_show_frame
&&
9981 (pbi
->s1_frame_type
!= KEY_FRAME
);
9982 pbi
->s1_width
= params
->p
.width
;
9983 pbi
->s1_height
= params
->p
.height
;
9984 pbi
->s1_frame_type
= params
->p
.frame_type
;
9985 pbi
->s1_intra_only
=
9986 (params
->p
.show_frame
||
9987 params
->p
.show_existing_frame
)
9988 ? 0 : params
->p
.intra_only
;
9989 if ((pbi
->s1_frame_type
!= KEY_FRAME
)
9990 && (!pbi
->s1_intra_only
)) {
9991 unsigned int data32
;
9992 int mpred_mv_rd_end_addr
;
9994 mpred_mv_rd_end_addr
=
9995 pbi
->s1_mpred_mv_wr_start_addr_pre
9996 + (pbi
->lcu_total
* MV_MEM_UNIT
);
9998 WRITE_VREG(HEVC_MPRED_CTRL3
, 0x24122412);
9999 WRITE_VREG(HEVC_MPRED_ABV_START_ADDR
,
10000 pbi
->work_space_buf
->
10001 mpred_above
.buf_start
);
10003 data32
= READ_VREG(HEVC_MPRED_CTRL4
);
10005 data32
&= (~(1 << 6));
10006 data32
|= (use_prev_frame_mvs
<< 6);
10007 WRITE_VREG(HEVC_MPRED_CTRL4
, data32
);
10009 WRITE_VREG(HEVC_MPRED_MV_WR_START_ADDR
,
10010 pbi
->s1_mpred_mv_wr_start_addr
);
10011 WRITE_VREG(HEVC_MPRED_MV_WPTR
,
10012 pbi
->s1_mpred_mv_wr_start_addr
);
10014 WRITE_VREG(HEVC_MPRED_MV_RD_START_ADDR
,
10015 pbi
->s1_mpred_mv_wr_start_addr_pre
);
10016 WRITE_VREG(HEVC_MPRED_MV_RPTR
,
10017 pbi
->s1_mpred_mv_wr_start_addr_pre
);
10019 WRITE_VREG(HEVC_MPRED_MV_RD_END_ADDR
,
10020 mpred_mv_rd_end_addr
);
10023 clear_mpred_hw(pbi
);
10025 if (!params
->p
.show_existing_frame
) {
10026 pbi
->s1_mpred_mv_wr_start_addr_pre
=
10027 pbi
->s1_mpred_mv_wr_start_addr
;
10028 pbi
->s1_last_show_frame
=
10029 params
->p
.show_frame
;
10030 if (pbi
->s1_mv_buf_index_pre_pre
!= MV_BUFFER_NUM
)
10031 put_mv_buf(pbi
, &pbi
->s1_mv_buf_index_pre_pre
);
10032 pbi
->s1_mv_buf_index_pre_pre
=
10033 pbi
->s1_mv_buf_index_pre
;
10034 pbi
->s1_mv_buf_index_pre
= pbi
->s1_mv_buf_index
;
10036 put_mv_buf(pbi
, &pbi
->s1_mv_buf_index
);
10039 static void vp9_s1_work(struct work_struct
*s1_work
)
10041 struct VP9Decoder_s
*pbi
= container_of(s1_work
,
10042 struct VP9Decoder_s
, s1_work
);
10043 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
,
10044 "%s dec_s1_result %d\n",
10046 pbi
->dec_s1_result
);
10048 #ifdef FB_DECODING_TEST_SCHEDULE
10049 if (pbi
->dec_s1_result
==
10050 DEC_S1_RESULT_TEST_TRIGGER_DONE
) {
10051 pbi
->s1_test_cmd
= TEST_SET_PIC_DONE
;
10052 WRITE_VREG(HEVC_ASSIST_MBOX0_IRQ_REG
, 0x1);
10055 if (pbi
->dec_s1_result
== DEC_S1_RESULT_DONE
||
10056 pbi
->dec_s1_result
== DEC_S1_RESULT_FORCE_EXIT
) {
10058 vdec_core_finish_run(hw_to_vdec(pbi
),
10059 CORE_MASK_HEVC_FRONT
);
10061 trigger_schedule(pbi
);
10062 /*pbi->dec_s1_result = DEC_S1_RESULT_NONE;*/
10067 static void run_back(struct vdec_s
*vdec
)
10069 struct VP9Decoder_s
*pbi
=
10070 (struct VP9Decoder_s
*)vdec
->private;
10072 run2_count
[pbi
->index
]++;
10073 if (debug
& PRINT_FLAG_VDEC_STATUS
) {
10077 pbi
->run2_busy
= 1;
10078 #ifndef FB_DECODING_TEST_SCHEDULE
10079 fb_reset_core(vdec
, HW_MASK_BACK
);
10081 vvp9_prot_init(pbi
, HW_MASK_BACK
);
10083 vp9_recycle_mmu_buf_tail(pbi
);
10085 if (pbi
->frame_count
> 0)
10086 vp9_bufmgr_postproc(pbi
);
10088 if (get_s2_buf(pbi
) >= 0) {
10089 for (i
= 0; i
< (RPM_END
- RPM_BEGIN
); i
+= 4) {
10091 for (ii
= 0; ii
< 4; ii
++)
10092 vp9_param
.l
.data
[i
+ ii
] =
10093 pbi
->s2_buf
->rpm
[i
+ 3 - ii
];
10095 #ifndef FB_DECODING_TEST_SCHEDULE
10096 WRITE_VREG(HEVC_ASSIST_FBD_MMU_MAP_ADDR
,
10097 pbi
->stage_mmu_map_phy_addr
+
10098 pbi
->s2_buf
->index
* STAGE_MMU_MAP_SIZE
);
10100 continue_decoding(pbi
);
10102 pbi
->run2_busy
= 0;
10106 static void run(struct vdec_s
*vdec
, unsigned long mask
,
10107 void (*callback
)(struct vdec_s
*, void *), void *arg
)
10109 struct VP9Decoder_s
*pbi
=
10110 (struct VP9Decoder_s
*)vdec
->private;
10113 PRINT_FLAG_VDEC_DETAIL
, "%s mask %lx\r\n",
10116 run_count
[pbi
->index
]++;
10117 pbi
->vdec_cb_arg
= arg
;
10118 pbi
->vdec_cb
= callback
;
10119 #ifdef SUPPORT_FB_DECODING
10120 if ((mask
& CORE_MASK_HEVC
) ||
10121 (mask
& CORE_MASK_HEVC_FRONT
))
10124 if ((pbi
->used_stage_buf_num
> 0)
10125 && (mask
& CORE_MASK_HEVC_BACK
))
10132 static void init_frame_bufs(struct VP9Decoder_s
*pbi
)
10134 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
10135 struct VP9_Common_s
*const cm
= &pbi
->common
;
10136 struct RefCntBuffer_s
*const frame_bufs
= cm
->buffer_pool
->frame_bufs
;
10139 for (i
= 0; i
< pbi
->used_buf_num
; ++i
) {
10140 frame_bufs
[i
].ref_count
= 0;
10141 frame_bufs
[i
].buf
.vf_ref
= 0;
10142 frame_bufs
[i
].buf
.decode_idx
= 0;
10143 frame_bufs
[i
].buf
.cma_alloc_addr
= 0;
10144 frame_bufs
[i
].buf
.index
= i
;
10147 if (vdec
->parallel_dec
== 1) {
10148 for (i
= 0; i
< FRAME_BUFFERS
; i
++) {
10149 vdec
->free_canvas_ex
10150 (pbi
->common
.buffer_pool
->frame_bufs
[i
].buf
.y_canvas_index
,
10152 vdec
->free_canvas_ex
10153 (pbi
->common
.buffer_pool
->frame_bufs
[i
].buf
.uv_canvas_index
,
10159 static void reset(struct vdec_s
*vdec
)
10161 struct VP9Decoder_s
*pbi
=
10162 (struct VP9Decoder_s
*)vdec
->private;
10164 cancel_work_sync(&pbi
->work
);
10165 if (pbi
->stat
& STAT_VDEC_RUN
) {
10167 pbi
->stat
&= ~STAT_VDEC_RUN
;
10170 if (pbi
->stat
& STAT_TIMER_ARM
) {
10171 del_timer_sync(&pbi
->timer
);
10172 pbi
->stat
&= ~STAT_TIMER_ARM
;
10174 pbi
->dec_result
= DEC_RESULT_NONE
;
10175 reset_process_time(pbi
);
10176 vp9_local_uninit(pbi
);
10177 if (vvp9_local_init(pbi
) < 0)
10178 vp9_print(pbi
, 0, "%s local_init failed \r\n", __func__
);
10179 init_frame_bufs(pbi
);
10183 vp9_print(pbi
, PRINT_FLAG_VDEC_DETAIL
, "%s\r\n", __func__
);
10186 static irqreturn_t
vp9_irq_cb(struct vdec_s
*vdec
, int irq
)
10188 struct VP9Decoder_s
*pbi
=
10189 (struct VP9Decoder_s
*)vdec
->private;
10190 return vvp9_isr(0, pbi
);
10193 static irqreturn_t
vp9_threaded_irq_cb(struct vdec_s
*vdec
, int irq
)
10195 struct VP9Decoder_s
*pbi
=
10196 (struct VP9Decoder_s
*)vdec
->private;
10197 return vvp9_isr_thread_fn(0, pbi
);
10200 static void vp9_dump_state(struct vdec_s
*vdec
)
10202 struct VP9Decoder_s
*pbi
=
10203 (struct VP9Decoder_s
*)vdec
->private;
10204 struct VP9_Common_s
*const cm
= &pbi
->common
;
10206 vp9_print(pbi
, 0, "====== %s\n", __func__
);
10209 "width/height (%d/%d), used_buf_num %d\n",
10216 "is_framebase(%d), eos %d, dec_result 0x%x dec_frm %d disp_frm %d run %d not_run_ready %d input_empty %d low_latency %d no_head %d \n",
10217 input_frame_based(vdec
),
10220 decode_frame_count
[pbi
->index
],
10221 display_frame_count
[pbi
->index
],
10222 run_count
[pbi
->index
],
10223 not_run_ready
[pbi
->index
],
10224 input_empty
[pbi
->index
],
10225 pbi
->low_latency_flag
,
10229 if (vf_get_receiver(vdec
->vf_provider_name
)) {
10230 enum receviver_start_e state
=
10231 vf_notify_receiver(vdec
->vf_provider_name
,
10232 VFRAME_EVENT_PROVIDER_QUREY_STATE
,
10235 "\nreceiver(%s) state %d\n",
10236 vdec
->vf_provider_name
,
10241 "%s, newq(%d/%d), dispq(%d/%d), vf prepare/get/put (%d/%d/%d), free_buf_count %d (min %d for run_ready)\n",
10243 kfifo_len(&pbi
->newframe_q
),
10245 kfifo_len(&pbi
->display_q
),
10250 get_free_buf_count(pbi
),
10251 run_ready_min_buf_num
10254 dump_pic_list(pbi
);
10256 for (i
= 0; i
< MAX_BUF_NUM
; i
++) {
10258 "mv_Buf(%d) start_adr 0x%x size 0x%x used %d\n",
10260 pbi
->m_mv_BUF
[i
].start_adr
,
10261 pbi
->m_mv_BUF
[i
].size
,
10262 pbi
->m_mv_BUF
[i
].used_flag
);
10266 "HEVC_DEC_STATUS_REG=0x%x\n",
10267 READ_VREG(HEVC_DEC_STATUS_REG
));
10269 "HEVC_MPC_E=0x%x\n",
10270 READ_VREG(HEVC_MPC_E
));
10272 "DECODE_MODE=0x%x\n",
10273 READ_VREG(DECODE_MODE
));
10275 "NAL_SEARCH_CTL=0x%x\n",
10276 READ_VREG(NAL_SEARCH_CTL
));
10278 "HEVC_PARSER_LCU_START=0x%x\n",
10279 READ_VREG(HEVC_PARSER_LCU_START
));
10281 "HEVC_DECODE_SIZE=0x%x\n",
10282 READ_VREG(HEVC_DECODE_SIZE
));
10284 "HEVC_SHIFT_BYTE_COUNT=0x%x\n",
10285 READ_VREG(HEVC_SHIFT_BYTE_COUNT
));
10287 "HEVC_STREAM_START_ADDR=0x%x\n",
10288 READ_VREG(HEVC_STREAM_START_ADDR
));
10290 "HEVC_STREAM_END_ADDR=0x%x\n",
10291 READ_VREG(HEVC_STREAM_END_ADDR
));
10293 "HEVC_STREAM_LEVEL=0x%x\n",
10294 READ_VREG(HEVC_STREAM_LEVEL
));
10296 "HEVC_STREAM_WR_PTR=0x%x\n",
10297 READ_VREG(HEVC_STREAM_WR_PTR
));
10299 "HEVC_STREAM_RD_PTR=0x%x\n",
10300 READ_VREG(HEVC_STREAM_RD_PTR
));
10302 "PARSER_VIDEO_RP=0x%x\n",
10303 READ_PARSER_REG(PARSER_VIDEO_RP
));
10305 "PARSER_VIDEO_WP=0x%x\n",
10306 READ_PARSER_REG(PARSER_VIDEO_WP
));
10308 if (input_frame_based(vdec
) &&
10309 (debug
& PRINT_FLAG_VDEC_DATA
)
10312 if (pbi
->chunk
&& pbi
->chunk
->block
&&
10313 pbi
->chunk
->size
> 0) {
10316 if (!pbi
->chunk
->block
->is_mapped
)
10317 data
= codec_mm_vmap(
10318 pbi
->chunk
->block
->start
+
10319 pbi
->chunk
->offset
,
10322 data
= ((u8
*)pbi
->chunk
->block
->start_virt
)
10323 + pbi
->chunk
->offset
;
10325 "frame data size 0x%x\n",
10327 for (jj
= 0; jj
< pbi
->chunk
->size
; jj
++) {
10328 if ((jj
& 0xf) == 0)
10331 vp9_print_cont(pbi
, 0,
10332 "%02x ", data
[jj
]);
10333 if (((jj
+ 1) & 0xf) == 0)
10334 vp9_print_cont(pbi
, 0,
10338 if (!pbi
->chunk
->block
->is_mapped
)
10339 codec_mm_unmap_phyaddr(data
);
10345 static int ammvdec_vp9_probe(struct platform_device
*pdev
)
10347 struct vdec_s
*pdata
= *(struct vdec_s
**)pdev
->dev
.platform_data
;
10350 struct vframe_content_light_level_s content_light_level
;
10351 struct vframe_master_display_colour_s vf_dp
;
10353 struct BUF_s BUF
[MAX_BUF_NUM
];
10354 struct VP9Decoder_s
*pbi
= NULL
;
10355 pr_debug("%s\n", __func__
);
10357 if (pdata
== NULL
) {
10358 pr_info("\nammvdec_vp9 memory resource undefined.\n");
10361 /*pbi = (struct VP9Decoder_s *)devm_kzalloc(&pdev->dev,
10362 sizeof(struct VP9Decoder_s), GFP_KERNEL);*/
10363 memset(&vf_dp
, 0, sizeof(struct vframe_master_display_colour_s
));
10364 pbi
= vmalloc(sizeof(struct VP9Decoder_s
));
10366 pr_info("\nammvdec_vp9 device data allocation failed\n");
10369 memset(pbi
, 0, sizeof(struct VP9Decoder_s
));
10371 /* the ctx from v4l2 driver. */
10372 pbi
->v4l2_ctx
= pdata
->private;
10374 pdata
->private = pbi
;
10375 pdata
->dec_status
= vvp9_dec_status
;
10376 /* pdata->set_trickmode = set_trickmode; */
10377 pdata
->run_ready
= run_ready
;
10379 pdata
->reset
= reset
;
10380 pdata
->irq_handler
= vp9_irq_cb
;
10381 pdata
->threaded_irq_handler
= vp9_threaded_irq_cb
;
10382 pdata
->dump_state
= vp9_dump_state
;
10384 memcpy(&BUF
[0], &pbi
->m_BUF
[0], sizeof(struct BUF_s
) * MAX_BUF_NUM
);
10385 memcpy(&pbi
->m_BUF
[0], &BUF
[0], sizeof(struct BUF_s
) * MAX_BUF_NUM
);
10387 pbi
->index
= pdev
->id
;
10389 if (pdata
->use_vfm_path
)
10390 snprintf(pdata
->vf_provider_name
, VDEC_PROVIDER_NAME_SIZE
,
10391 VFM_DEC_PROVIDER_NAME
);
10393 snprintf(pdata
->vf_provider_name
, VDEC_PROVIDER_NAME_SIZE
,
10394 MULTI_INSTANCE_PROVIDER_NAME
".%02x", pdev
->id
& 0xff);
10396 vf_provider_init(&pdata
->vframe_provider
, pdata
->vf_provider_name
,
10397 &vvp9_vf_provider
, pbi
);
10399 pbi
->provider_name
= pdata
->vf_provider_name
;
10400 platform_set_drvdata(pdev
, pdata
);
10402 pbi
->platform_dev
= pdev
;
10403 pbi
->video_signal_type
= 0;
10404 pbi
->m_ins_flag
= 1;
10405 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX
)
10406 pbi
->stat
|= VP9_TRIGGER_FRAME_ENABLE
;
10408 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
10409 pbi
->max_pic_w
= 8192;
10410 pbi
->max_pic_h
= 4608;
10412 pbi
->max_pic_w
= 4096;
10413 pbi
->max_pic_h
= 2304;
10416 if ((debug
& IGNORE_PARAM_FROM_CONFIG
) == 0 &&
10417 pdata
->config_len
) {
10418 #ifdef MULTI_INSTANCE_SUPPORT
10419 int vp9_buf_width
= 0;
10420 int vp9_buf_height
= 0;
10421 /*use ptr config for doubel_write_mode, etc*/
10422 vp9_print(pbi
, 0, "pdata->config=%s\n", pdata
->config
);
10423 if (get_config_int(pdata
->config
, "vp9_double_write_mode",
10425 pbi
->double_write_mode
= config_val
;
10427 pbi
->double_write_mode
= double_write_mode
;
10429 if (get_config_int(pdata
->config
, "save_buffer_mode",
10431 pbi
->save_buffer_mode
= config_val
;
10433 pbi
->save_buffer_mode
= 0;
10434 if (get_config_int(pdata
->config
, "vp9_buf_width",
10435 &config_val
) == 0) {
10436 vp9_buf_width
= config_val
;
10438 if (get_config_int(pdata
->config
, "vp9_buf_height",
10439 &config_val
) == 0) {
10440 vp9_buf_height
= config_val
;
10443 if (get_config_int(pdata
->config
, "no_head",
10445 pbi
->no_head
= config_val
;
10447 pbi
->no_head
= no_head
;
10449 /*use ptr config for max_pic_w, etc*/
10450 if (get_config_int(pdata
->config
, "vp9_max_pic_w",
10451 &config_val
) == 0) {
10452 pbi
->max_pic_w
= config_val
;
10454 if (get_config_int(pdata
->config
, "vp9_max_pic_h",
10455 &config_val
) == 0) {
10456 pbi
->max_pic_h
= config_val
;
10458 if ((pbi
->max_pic_w
* pbi
->max_pic_h
)
10459 < (vp9_buf_width
* vp9_buf_height
)) {
10460 pbi
->max_pic_w
= vp9_buf_width
;
10461 pbi
->max_pic_h
= vp9_buf_height
;
10462 vp9_print(pbi
, 0, "use buf resolution\n");
10465 if (get_config_int(pdata
->config
,
10466 "parm_v4l_codec_enable",
10468 pbi
->is_used_v4l
= config_val
;
10470 if (get_config_int(pdata
->config
,
10471 "parm_v4l_buffer_margin",
10473 pbi
->dynamic_buf_num_margin
= config_val
;
10475 if (get_config_int(pdata
->config
,
10476 "parm_v4l_canvas_mem_mode",
10478 pbi
->mem_map_mode
= config_val
;
10480 if (get_config_int(pdata
->config
, "HDRStaticInfo",
10481 &vf_dp
.present_flag
) == 0
10482 && vf_dp
.present_flag
== 1) {
10483 get_config_int(pdata
->config
, "mG.x",
10484 &vf_dp
.primaries
[0][0]);
10485 get_config_int(pdata
->config
, "mG.y",
10486 &vf_dp
.primaries
[0][1]);
10487 get_config_int(pdata
->config
, "mB.x",
10488 &vf_dp
.primaries
[1][0]);
10489 get_config_int(pdata
->config
, "mB.y",
10490 &vf_dp
.primaries
[1][1]);
10491 get_config_int(pdata
->config
, "mR.x",
10492 &vf_dp
.primaries
[2][0]);
10493 get_config_int(pdata
->config
, "mR.y",
10494 &vf_dp
.primaries
[2][1]);
10495 get_config_int(pdata
->config
, "mW.x",
10496 &vf_dp
.white_point
[0]);
10497 get_config_int(pdata
->config
, "mW.y",
10498 &vf_dp
.white_point
[1]);
10499 get_config_int(pdata
->config
, "mMaxDL",
10500 &vf_dp
.luminance
[0]);
10501 get_config_int(pdata
->config
, "mMinDL",
10502 &vf_dp
.luminance
[1]);
10503 vf_dp
.content_light_level
.present_flag
= 1;
10504 get_config_int(pdata
->config
, "mMaxCLL",
10505 &content_light_level
.max_content
);
10506 get_config_int(pdata
->config
, "mMaxFALL",
10507 &content_light_level
.max_pic_average
);
10508 vf_dp
.content_light_level
= content_light_level
;
10509 pbi
->video_signal_type
= (1 << 29)
10510 | (5 << 26) /* unspecified */
10511 | (0 << 25) /* limit */
10512 | (1 << 24) /* color available */
10513 | (9 << 16) /* 2020 */
10514 | (16 << 8) /* 2084 */
10515 | (9 << 0); /* 2020 */
10517 pbi
->vf_dp
= vf_dp
;
10521 /*pbi->vvp9_amstream_dec_info.width = 0;
10522 pbi->vvp9_amstream_dec_info.height = 0;
10523 pbi->vvp9_amstream_dec_info.rate = 30;*/
10524 pbi
->double_write_mode
= double_write_mode
;
10527 if (!pbi
->is_used_v4l
) {
10528 pbi
->mem_map_mode
= mem_map_mode
;
10531 if (is_oversize(pbi
->max_pic_w
, pbi
->max_pic_h
)) {
10532 pr_err("over size: %dx%d, probe failed\n",
10533 pbi
->max_pic_w
, pbi
->max_pic_h
);
10536 pbi
->mmu_enable
= 1;
10537 video_signal_type
= pbi
->video_signal_type
;
10539 if (pdata
->sys_info
) {
10540 pbi
->vvp9_amstream_dec_info
= *pdata
->sys_info
;
10542 pbi
->vvp9_amstream_dec_info
.width
= 0;
10543 pbi
->vvp9_amstream_dec_info
.height
= 0;
10544 pbi
->vvp9_amstream_dec_info
.rate
= 30;
10546 pbi
->low_latency_flag
= 1;
10549 "no_head %d low_latency %d\n",
10550 pbi
->no_head
, pbi
->low_latency_flag
);
10552 pbi
->buf_start
= pdata
->mem_start
;
10553 pbi
->buf_size
= pdata
->mem_end
- pdata
->mem_start
+ 1;
10555 if (amvdec_vp9_mmu_init(pbi
) < 0) {
10556 pr_err("vp9 alloc bmmu box failed!!\n");
10557 /* devm_kfree(&pdev->dev, (void *)pbi); */
10558 vfree((void *)pbi
);
10559 pdata
->dec_status
= NULL
;
10563 pbi
->cma_alloc_count
= PAGE_ALIGN(work_buf_size
) / PAGE_SIZE
;
10564 ret
= decoder_bmmu_box_alloc_buf_phy(pbi
->bmmu_box
, WORK_SPACE_BUF_ID
,
10565 pbi
->cma_alloc_count
* PAGE_SIZE
, DRIVER_NAME
,
10566 &pbi
->cma_alloc_addr
);
10568 uninit_mmu_buffers(pbi
);
10569 /* devm_kfree(&pdev->dev, (void *)pbi); */
10570 vfree((void *)pbi
);
10571 pdata
->dec_status
= NULL
;
10574 pbi
->buf_start
= pbi
->cma_alloc_addr
;
10575 pbi
->buf_size
= work_buf_size
;
10578 pbi
->init_flag
= 0;
10579 pbi
->first_sc_checked
= 0;
10580 pbi
->fatal_error
= 0;
10581 pbi
->show_frame_num
= 0;
10584 pr_info("===VP9 decoder mem resource 0x%lx size 0x%x\n",
10589 pbi
->cma_dev
= pdata
->cma_dev
;
10590 if (vvp9_init(pdata
) < 0) {
10591 pr_info("\namvdec_vp9 init failed.\n");
10592 vp9_local_uninit(pbi
);
10593 uninit_mmu_buffers(pbi
);
10594 /* devm_kfree(&pdev->dev, (void *)pbi); */
10595 vfree((void *)pbi
);
10596 pdata
->dec_status
= NULL
;
10599 vdec_set_prepare_level(pdata
, start_decode_buf_level
);
10600 hevc_source_changed(VFORMAT_VP9
,
10602 #ifdef SUPPORT_FB_DECODING
10603 if (pbi
->used_stage_buf_num
> 0)
10604 vdec_core_request(pdata
,
10605 CORE_MASK_HEVC_FRONT
| CORE_MASK_HEVC_BACK
);
10607 vdec_core_request(pdata
, CORE_MASK_VDEC_1
| CORE_MASK_HEVC
10608 | CORE_MASK_HEVC_FRONT
| CORE_MASK_HEVC_BACK
10609 | CORE_MASK_COMBINE
);
10611 if (pdata
->parallel_dec
== 1)
10612 vdec_core_request(pdata
, CORE_MASK_HEVC
);
10614 vdec_core_request(pdata
, CORE_MASK_VDEC_1
| CORE_MASK_HEVC
10615 | CORE_MASK_COMBINE
);
10617 pbi
->pic_list_init_done2
= true;
10621 static int ammvdec_vp9_remove(struct platform_device
*pdev
)
10623 struct VP9Decoder_s
*pbi
= (struct VP9Decoder_s
*)
10624 (((struct vdec_s
*)(platform_get_drvdata(pdev
)))->private);
10625 struct vdec_s
*vdec
= hw_to_vdec(pbi
);
10628 pr_info("amvdec_vp9_remove\n");
10632 #ifdef SUPPORT_FB_DECODING
10633 vdec_core_release(hw_to_vdec(pbi
), CORE_MASK_VDEC_1
| CORE_MASK_HEVC
10634 | CORE_MASK_HEVC_FRONT
| CORE_MASK_HEVC_BACK
10637 if (vdec
->parallel_dec
== 1)
10638 vdec_core_release(hw_to_vdec(pbi
), CORE_MASK_HEVC
);
10640 vdec_core_release(hw_to_vdec(pbi
), CORE_MASK_VDEC_1
| CORE_MASK_HEVC
);
10642 vdec_set_status(hw_to_vdec(pbi
), VDEC_STATUS_DISCONNECTED
);
10644 if (vdec
->parallel_dec
== 1) {
10645 for (i
= 0; i
< FRAME_BUFFERS
; i
++) {
10646 vdec
->free_canvas_ex
10647 (pbi
->common
.buffer_pool
->frame_bufs
[i
].buf
.y_canvas_index
,
10649 vdec
->free_canvas_ex
10650 (pbi
->common
.buffer_pool
->frame_bufs
[i
].buf
.uv_canvas_index
,
10657 pr_info("pts missed %ld, pts hit %ld, duration %d\n",
10658 pbi
->pts_missed
, pbi
->pts_hit
, pbi
->frame_dur
);
10662 /* devm_kfree(&pdev->dev, (void *)pbi); */
10663 vfree((void *)pbi
);
10667 static struct platform_driver ammvdec_vp9_driver
= {
10668 .probe
= ammvdec_vp9_probe
,
10669 .remove
= ammvdec_vp9_remove
,
10671 .name
= MULTI_DRIVER_NAME
,
10678 static struct mconfig vp9_configs
[] = {
10679 MC_PU32("bit_depth_luma", &bit_depth_luma
),
10680 MC_PU32("bit_depth_chroma", &bit_depth_chroma
),
10681 MC_PU32("frame_width", &frame_width
),
10682 MC_PU32("frame_height", &frame_height
),
10683 MC_PU32("debug", &debug
),
10684 MC_PU32("radr", &radr
),
10685 MC_PU32("rval", &rval
),
10686 MC_PU32("pop_shorts", &pop_shorts
),
10687 MC_PU32("dbg_cmd", &dbg_cmd
),
10688 MC_PU32("dbg_skip_decode_index", &dbg_skip_decode_index
),
10689 MC_PU32("endian", &endian
),
10690 MC_PU32("step", &step
),
10691 MC_PU32("udebug_flag", &udebug_flag
),
10692 MC_PU32("decode_pic_begin", &decode_pic_begin
),
10693 MC_PU32("slice_parse_begin", &slice_parse_begin
),
10694 MC_PU32("i_only_flag", &i_only_flag
),
10695 MC_PU32("error_handle_policy", &error_handle_policy
),
10696 MC_PU32("buf_alloc_width", &buf_alloc_width
),
10697 MC_PU32("buf_alloc_height", &buf_alloc_height
),
10698 MC_PU32("buf_alloc_depth", &buf_alloc_depth
),
10699 MC_PU32("buf_alloc_size", &buf_alloc_size
),
10700 MC_PU32("buffer_mode", &buffer_mode
),
10701 MC_PU32("buffer_mode_dbg", &buffer_mode_dbg
),
10702 MC_PU32("max_buf_num", &max_buf_num
),
10703 MC_PU32("dynamic_buf_num_margin", &dynamic_buf_num_margin
),
10704 MC_PU32("mem_map_mode", &mem_map_mode
),
10705 MC_PU32("double_write_mode", &double_write_mode
),
10706 MC_PU32("enable_mem_saving", &enable_mem_saving
),
10707 MC_PU32("force_w_h", &force_w_h
),
10708 MC_PU32("force_fps", &force_fps
),
10709 MC_PU32("max_decoding_time", &max_decoding_time
),
10710 MC_PU32("on_no_keyframe_skiped", &on_no_keyframe_skiped
),
10711 MC_PU32("start_decode_buf_level", &start_decode_buf_level
),
10712 MC_PU32("decode_timeout_val", &decode_timeout_val
),
10713 MC_PU32("vp9_max_pic_w", &vp9_max_pic_w
),
10714 MC_PU32("vp9_max_pic_h", &vp9_max_pic_h
),
10716 static struct mconfig_node vp9_node
;
10718 static int __init
amvdec_vp9_driver_init_module(void)
10721 struct BuffInfo_s
*p_buf_info
;
10723 if (vdec_is_support_4k()) {
10724 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
)
10725 p_buf_info
= &amvvp9_workbuff_spec
[2];
10727 p_buf_info
= &amvvp9_workbuff_spec
[1];
10729 p_buf_info
= &amvvp9_workbuff_spec
[0];
10731 init_buff_spec(NULL
, p_buf_info
);
10733 (p_buf_info
->end_adr
- p_buf_info
->start_adr
10734 + 0xffff) & (~0xffff);
10736 pr_debug("amvdec_vp9 module init\n");
10738 error_handle_policy
= 0;
10740 #ifdef ERROR_HANDLE_DEBUG
10741 dbg_nal_skip_flag
= 0;
10742 dbg_nal_skip_count
= 0;
10745 decode_pic_begin
= 0;
10746 slice_parse_begin
= 0;
10748 buf_alloc_size
= 0;
10749 #ifdef MULTI_INSTANCE_SUPPORT
10750 if (platform_driver_register(&ammvdec_vp9_driver
))
10751 pr_err("failed to register ammvdec_vp9 driver\n");
10754 if (platform_driver_register(&amvdec_vp9_driver
)) {
10755 pr_err("failed to register amvdec_vp9 driver\n");
10759 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_SM1
) {
10760 amvdec_vp9_profile
.profile
=
10761 "8k, 10bit, dwrite, compressed, no_head";
10762 } else if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_GXL
10763 /*&& get_cpu_major_id() != MESON_CPU_MAJOR_ID_GXLX*/
10764 && get_cpu_major_id() != AM_MESON_CPU_MAJOR_ID_TXL
) {
10765 if (get_cpu_major_id() < AM_MESON_CPU_MAJOR_ID_TXLX
) {
10766 if (vdec_is_support_4k())
10767 amvdec_vp9_profile
.profile
=
10768 "4k, 10bit, dwrite, compressed";
10770 amvdec_vp9_profile
.profile
=
10771 "10bit, dwrite, compressed";
10773 if (vdec_is_support_4k())
10774 amvdec_vp9_profile
.profile
=
10775 "4k, 10bit, dwrite, compressed, no_head";
10777 amvdec_vp9_profile
.profile
=
10778 "10bit, dwrite, compressed, no_head";
10782 amvdec_vp9_profile
.name
= "vp9_unsupport";
10785 if (get_cpu_major_id() >= AM_MESON_CPU_MAJOR_ID_G12A
)
10786 max_buf_num
= MAX_BUF_NUM_LESS
;
10788 vcodec_profile_register(&amvdec_vp9_profile
);
10789 amvdec_vp9_profile_mult
= amvdec_vp9_profile
;
10790 amvdec_vp9_profile_mult
.name
= "mvp9";
10791 vcodec_profile_register(&amvdec_vp9_profile_mult
);
10792 INIT_REG_NODE_CONFIGS("media.decoder", &vp9_node
,
10793 "vp9", vp9_configs
, CONFIG_FOR_RW
);
10798 static void __exit
amvdec_vp9_driver_remove_module(void)
10800 pr_debug("amvdec_vp9 module remove.\n");
10801 #ifdef MULTI_INSTANCE_SUPPORT
10802 platform_driver_unregister(&ammvdec_vp9_driver
);
10804 platform_driver_unregister(&amvdec_vp9_driver
);
10807 /****************************************/
10809 module_param(bit_depth_luma
, uint
, 0664);
10810 MODULE_PARM_DESC(bit_depth_luma
, "\n amvdec_vp9 bit_depth_luma\n");
10812 module_param(bit_depth_chroma
, uint
, 0664);
10813 MODULE_PARM_DESC(bit_depth_chroma
, "\n amvdec_vp9 bit_depth_chroma\n");
10815 module_param(frame_width
, uint
, 0664);
10816 MODULE_PARM_DESC(frame_width
, "\n amvdec_vp9 frame_width\n");
10818 module_param(frame_height
, uint
, 0664);
10819 MODULE_PARM_DESC(frame_height
, "\n amvdec_vp9 frame_height\n");
10821 module_param(debug
, uint
, 0664);
10822 MODULE_PARM_DESC(debug
, "\n amvdec_vp9 debug\n");
10824 module_param(radr
, uint
, 0664);
10825 MODULE_PARM_DESC(radr
, "\n radr\n");
10827 module_param(rval
, uint
, 0664);
10828 MODULE_PARM_DESC(rval
, "\n rval\n");
10830 module_param(pop_shorts
, uint
, 0664);
10831 MODULE_PARM_DESC(pop_shorts
, "\n rval\n");
10833 module_param(dbg_cmd
, uint
, 0664);
10834 MODULE_PARM_DESC(dbg_cmd
, "\n dbg_cmd\n");
10836 module_param(dbg_skip_decode_index
, uint
, 0664);
10837 MODULE_PARM_DESC(dbg_skip_decode_index
, "\n dbg_skip_decode_index\n");
10839 module_param(endian
, uint
, 0664);
10840 MODULE_PARM_DESC(endian
, "\n rval\n");
10842 module_param(step
, uint
, 0664);
10843 MODULE_PARM_DESC(step
, "\n amvdec_vp9 step\n");
10845 module_param(decode_pic_begin
, uint
, 0664);
10846 MODULE_PARM_DESC(decode_pic_begin
, "\n amvdec_vp9 decode_pic_begin\n");
10848 module_param(slice_parse_begin
, uint
, 0664);
10849 MODULE_PARM_DESC(slice_parse_begin
, "\n amvdec_vp9 slice_parse_begin\n");
10851 module_param(i_only_flag
, uint
, 0664);
10852 MODULE_PARM_DESC(i_only_flag
, "\n amvdec_vp9 i_only_flag\n");
10854 module_param(low_latency_flag
, uint
, 0664);
10855 MODULE_PARM_DESC(low_latency_flag
, "\n amvdec_vp9 low_latency_flag\n");
10857 module_param(no_head
, uint
, 0664);
10858 MODULE_PARM_DESC(no_head
, "\n amvdec_vp9 no_head\n");
10860 module_param(error_handle_policy
, uint
, 0664);
10861 MODULE_PARM_DESC(error_handle_policy
, "\n amvdec_vp9 error_handle_policy\n");
10863 module_param(buf_alloc_width
, uint
, 0664);
10864 MODULE_PARM_DESC(buf_alloc_width
, "\n buf_alloc_width\n");
10866 module_param(buf_alloc_height
, uint
, 0664);
10867 MODULE_PARM_DESC(buf_alloc_height
, "\n buf_alloc_height\n");
10869 module_param(buf_alloc_depth
, uint
, 0664);
10870 MODULE_PARM_DESC(buf_alloc_depth
, "\n buf_alloc_depth\n");
10872 module_param(buf_alloc_size
, uint
, 0664);
10873 MODULE_PARM_DESC(buf_alloc_size
, "\n buf_alloc_size\n");
10875 module_param(buffer_mode
, uint
, 0664);
10876 MODULE_PARM_DESC(buffer_mode
, "\n buffer_mode\n");
10878 module_param(buffer_mode_dbg
, uint
, 0664);
10879 MODULE_PARM_DESC(buffer_mode_dbg
, "\n buffer_mode_dbg\n");
10881 module_param(max_buf_num
, uint
, 0664);
10882 MODULE_PARM_DESC(max_buf_num
, "\n max_buf_num\n");
10884 module_param(dynamic_buf_num_margin
, uint
, 0664);
10885 MODULE_PARM_DESC(dynamic_buf_num_margin
, "\n dynamic_buf_num_margin\n");
10887 module_param(mv_buf_margin
, uint
, 0664);
10888 MODULE_PARM_DESC(mv_buf_margin
, "\n mv_buf_margin\n");
10890 module_param(run_ready_min_buf_num
, uint
, 0664);
10891 MODULE_PARM_DESC(run_ready_min_buf_num
, "\n run_ready_min_buf_num\n");
10895 module_param(mem_map_mode
, uint
, 0664);
10896 MODULE_PARM_DESC(mem_map_mode
, "\n mem_map_mode\n");
10898 #ifdef SUPPORT_10BIT
10899 module_param(double_write_mode
, uint
, 0664);
10900 MODULE_PARM_DESC(double_write_mode
, "\n double_write_mode\n");
10902 module_param(enable_mem_saving
, uint
, 0664);
10903 MODULE_PARM_DESC(enable_mem_saving
, "\n enable_mem_saving\n");
10905 module_param(force_w_h
, uint
, 0664);
10906 MODULE_PARM_DESC(force_w_h
, "\n force_w_h\n");
10909 module_param(force_fps
, uint
, 0664);
10910 MODULE_PARM_DESC(force_fps
, "\n force_fps\n");
10912 module_param(max_decoding_time
, uint
, 0664);
10913 MODULE_PARM_DESC(max_decoding_time
, "\n max_decoding_time\n");
10915 module_param(on_no_keyframe_skiped
, uint
, 0664);
10916 MODULE_PARM_DESC(on_no_keyframe_skiped
, "\n on_no_keyframe_skiped\n");
10918 module_param(mcrcc_cache_alg_flag
, uint
, 0664);
10919 MODULE_PARM_DESC(mcrcc_cache_alg_flag
, "\n mcrcc_cache_alg_flag\n");
10921 #ifdef MULTI_INSTANCE_SUPPORT
10922 module_param(start_decode_buf_level
, int, 0664);
10923 MODULE_PARM_DESC(start_decode_buf_level
,
10924 "\n vp9 start_decode_buf_level\n");
10926 module_param(decode_timeout_val
, uint
, 0664);
10927 MODULE_PARM_DESC(decode_timeout_val
,
10928 "\n vp9 decode_timeout_val\n");
10930 module_param(vp9_max_pic_w
, uint
, 0664);
10931 MODULE_PARM_DESC(vp9_max_pic_w
, "\n vp9_max_pic_w\n");
10933 module_param(vp9_max_pic_h
, uint
, 0664);
10934 MODULE_PARM_DESC(vp9_max_pic_h
, "\n vp9_max_pic_h\n");
10936 module_param_array(decode_frame_count
, uint
,
10937 &max_decode_instance_num
, 0664);
10939 module_param_array(display_frame_count
, uint
,
10940 &max_decode_instance_num
, 0664);
10942 module_param_array(max_process_time
, uint
,
10943 &max_decode_instance_num
, 0664);
10945 module_param_array(run_count
, uint
,
10946 &max_decode_instance_num
, 0664);
10948 module_param_array(input_empty
, uint
,
10949 &max_decode_instance_num
, 0664);
10951 module_param_array(not_run_ready
, uint
,
10952 &max_decode_instance_num
, 0664);
10955 #ifdef SUPPORT_FB_DECODING
10956 module_param_array(not_run2_ready
, uint
,
10957 &max_decode_instance_num
, 0664);
10959 module_param_array(run2_count
, uint
,
10960 &max_decode_instance_num
, 0664);
10962 module_param(stage_buf_num
, uint
, 0664);
10963 MODULE_PARM_DESC(stage_buf_num
, "\n amvdec_h265 stage_buf_num\n");
10966 module_param(udebug_flag
, uint
, 0664);
10967 MODULE_PARM_DESC(udebug_flag
, "\n amvdec_h265 udebug_flag\n");
10969 module_param(udebug_pause_pos
, uint
, 0664);
10970 MODULE_PARM_DESC(udebug_pause_pos
, "\n udebug_pause_pos\n");
10972 module_param(udebug_pause_val
, uint
, 0664);
10973 MODULE_PARM_DESC(udebug_pause_val
, "\n udebug_pause_val\n");
10975 module_param(udebug_pause_decode_idx
, uint
, 0664);
10976 MODULE_PARM_DESC(udebug_pause_decode_idx
, "\n udebug_pause_decode_idx\n");
10978 module_param(without_display_mode
, uint
, 0664);
10979 MODULE_PARM_DESC(without_display_mode
, "\n without_display_mode\n");
10981 module_init(amvdec_vp9_driver_init_module
);
10982 module_exit(amvdec_vp9_driver_remove_module
);
10984 MODULE_DESCRIPTION("AMLOGIC vp9 Video Decoder Driver");
10985 MODULE_LICENSE("GPL");