--- /dev/null
+From 28ce6b9bf98afd40d242520c43d8c48e149c5d25 Mon Sep 17 00:00:00 2001
+From: adi766 <adityakumarteli7846@gmail.com>
+Date: Sat, 23 Sep 2017 16:10:43 +0000
+Subject: [PATCH] Add MediaTek color format support
+
+Signed-off-by: teja98 <tejachaitu96@gmail.com>
+---
+ include/media/stagefright/ACodec.h | 3 +
+ include/media/stagefright/ColorConverter.h | 4 +
+ .../media/stagefright/dpframework/DpBlitStream.h | 243 ++
+ include/media/stagefright/dpframework/DpDataType.h | 450 ++
+ media/libstagefright/Android.mk | 11 +
+ media/libstagefright/CameraSource.cpp | 8 +
+ media/libstagefright/OMXCodec.cpp | 4549 ++++++++++++++++++++
+ media/libstagefright/colorconversion/Android.mk | 7 +
+ .../colorconversion/ColorConverter.cpp | 101 +
+ 9 files changed, 5376 insertions(+)
+ create mode 100644 include/media/stagefright/dpframework/DpBlitStream.h
+ create mode 100644 include/media/stagefright/dpframework/DpDataType.h
+ create mode 100644 media/libstagefright/OMXCodec.cpp
+
+diff --git a/include/media/stagefright/ACodec.h b/include/media/stagefright/ACodec.h
+index d68263272..10a3905ef 100644
+--- a/include/media/stagefright/ACodec.h
++++ b/include/media/stagefright/ACodec.h
+@@ -351,6 +351,9 @@ protected:
+ #ifdef USE_SAMSUNG_COLORFORMAT
+ void setNativeWindowColorFormat(OMX_COLOR_FORMATTYPE &eNativeColorFormat);
+ #endif
++#ifdef MTK_HARDWARE
++ void setHalWindowColorFormat(OMX_COLOR_FORMATTYPE &eHalColorFormat);
++#endif
+ status_t cancelBufferToNativeWindow(BufferInfo *info);
+ status_t freeOutputBuffersNotOwnedByComponent();
+ BufferInfo *dequeueBufferFromNativeWindow();
+diff --git a/include/media/stagefright/ColorConverter.h b/include/media/stagefright/ColorConverter.h
+index 270c80966..cf0cda5af 100644
+--- a/include/media/stagefright/ColorConverter.h
++++ b/include/media/stagefright/ColorConverter.h
+@@ -82,6 +82,10 @@ private:
+ status_t convertTIYUV420PackedSemiPlanar(
+ const BitmapParams &src, const BitmapParams &dst);
+
++#ifdef MTK_HARDWARE
++ status_t convertYUVToRGBHW(const BitmapParams &src, const BitmapParams &dst);
++#endif
++
+ ColorConverter(const ColorConverter &);
+ ColorConverter &operator=(const ColorConverter &);
+ };
+diff --git a/include/media/stagefright/dpframework/DpBlitStream.h b/include/media/stagefright/dpframework/DpBlitStream.h
+new file mode 100644
+index 000000000..57fd482d6
+--- /dev/null
++++ b/include/media/stagefright/dpframework/DpBlitStream.h
+@@ -0,0 +1,243 @@
++#ifndef __DP_BLIT_STREAM_H__
++#define __DP_BLIT_STREAM_H__
++
++#include "DpDataType.h"
++
++enum
++{
++ DP_BLIT_GENERAL_USER = 0,
++ DP_BLIT_HWC0 = 0,
++ DP_BLIT_GPU = 1,
++ DP_BLIT_HWC1 = 1,
++ DP_BLIT_HWC2 = 2,
++ DP_BLIT_HWC3 = 3
++};
++
++
++class DpBlitStream
++{
++public:
++ static bool queryHWSupport(uint32_t srcWidth,
++ uint32_t srcHeight,
++ uint32_t dstWidth,
++ uint32_t dstHeight,
++ int32_t Orientation = 0);
++
++ DpBlitStream();
++
++ ~DpBlitStream();
++
++ enum DpOrientation
++ {
++ ROT_0 = 0x00000000,
++ FLIP_H = 0x00000001,
++ FLIP_V = 0x00000002,
++ ROT_90 = 0x00000004,
++ ROT_180 = FLIP_H|FLIP_V,
++ ROT_270 = ROT_180|ROT_90,
++ ROT_INVALID = 0x80
++ };
++
++ DP_STATUS_ENUM setSrcBuffer(void *pVABase,
++ uint32_t size);
++
++ DP_STATUS_ENUM setSrcBuffer(void **pVAList,
++ uint32_t *pSizeList,
++ uint32_t planeNumber);
++
++ // VA + MVA address interface
++ DP_STATUS_ENUM setSrcBuffer(void** pVAddrList,
++ void** pMVAddrList,
++ uint32_t *pSizeList,
++ uint32_t planeNumber);
++
++ // for ION file descriptor
++ DP_STATUS_ENUM setSrcBuffer(int32_t fileDesc,
++ uint32_t *sizeList,
++ uint32_t planeNumber);
++
++ DP_STATUS_ENUM setSrcConfig(int32_t width,
++ int32_t height,
++ DpColorFormat format,
++ DpInterlaceFormat field = eInterlace_None,
++ DpRect *pROI = 0);
++
++ DP_STATUS_ENUM setSrcConfig(int32_t width,
++ int32_t height,
++ int32_t yPitch,
++ int32_t uvPitch,
++ DpColorFormat format,
++ DP_PROFILE_ENUM profile = DP_PROFILE_BT601,
++ DpInterlaceFormat field = eInterlace_None,
++ DpRect *pROI = 0,
++ DpSecure secure = DP_SECURE_NONE,
++ bool doFlush = true);
++
++ DP_STATUS_ENUM setDstBuffer(void *pVABase,
++ uint32_t size);
++
++ DP_STATUS_ENUM setDstBuffer(void **pVABaseList,
++ uint32_t *pSizeList,
++ uint32_t planeNumber);
++
++ // VA + MVA address interface
++ DP_STATUS_ENUM setDstBuffer(void** pVABaseList,
++ void** pMVABaseList,
++ uint32_t *pSizeList,
++ uint32_t planeNumber);
++
++ // for ION file descriptor
++ DP_STATUS_ENUM setDstBuffer(int32_t fileDesc,
++ uint32_t *pSizeList,
++ uint32_t planeNumber);
++
++ DP_STATUS_ENUM setDstConfig(int32_t width,
++ int32_t height,
++ DpColorFormat format,
++ DpInterlaceFormat field = eInterlace_None,
++ DpRect *pROI = 0);
++
++ DP_STATUS_ENUM setDstConfig(int32_t width,
++ int32_t height,
++ int32_t yPitch,
++ int32_t uvPitch,
++ DpColorFormat format,
++ DP_PROFILE_ENUM profile = DP_PROFILE_BT601,
++ DpInterlaceFormat field = eInterlace_None,
++ DpRect *pROI = 0,
++ DpSecure secure = DP_SECURE_NONE,
++ bool doFlush = true);
++
++ DP_STATUS_ENUM setRotate(int32_t rotation)
++ {
++ if ((m_rotation != rotation) ||
++ (mRotate != rotation))
++ {
++ m_rotation = rotation;
++ mRotate = rotation;
++ m_frameChange = true;
++ }
++
++ return DP_STATUS_RETURN_SUCCESS;
++ }
++
++ //Compatible to 89
++ DP_STATUS_ENUM setFlip(int flip)
++ {
++ if (mFlip != flip)
++ {
++ mFlip = flip;
++ m_flipStatus = ((flip!= 0)? true: false);
++ m_frameChange = true;
++ }
++
++ return DP_STATUS_RETURN_SUCCESS;
++ }
++
++ DP_STATUS_ENUM setOrientation(uint32_t transform);
++
++ DP_STATUS_ENUM setTdshp(int gain)
++ {
++ if (mTdshp != gain)
++ {
++ mTdshp = gain;
++ m_frameChange = true;
++ }
++
++ return DP_STATUS_RETURN_SUCCESS;
++ }
++
++ uint32_t getPqID();
++
++ DP_STATUS_ENUM setPQParameter(const DpPqParam &pParam);
++
++ DP_STATUS_ENUM setDither(bool enDither)
++ {
++ if (m_ditherStatus != enDither)
++ {
++ m_ditherStatus = enDither;
++ m_frameChange = true;
++ }
++
++ return DP_STATUS_RETURN_SUCCESS;
++ }
++
++ DP_STATUS_ENUM setAdaptiveLuma(bool enADL)
++ {
++ m_adaptiveLuma = enADL;
++
++ return DP_STATUS_RETURN_SUCCESS;
++ }
++
++ DP_STATUS_ENUM setUser(uint32_t eID = 0);
++
++
++ DP_STATUS_ENUM invalidate();
++
++ DP_STATUS_ENUM pq_process();
++
++ // for dump register
++ void enableDumpReg(unsigned int flags){mDumpRegFlags = flags;}
++
++private:
++ DpStream *m_pStream;
++ DpChannel *m_pChannel;
++ int32_t m_channelID;
++ DpBasicBufferPool *m_pSrcPool;
++ DpBasicBufferPool *m_pDstPool;
++ int32_t m_srcBuffer;
++ int32_t m_srcWidth;
++ int32_t m_srcHeight;
++ int32_t m_srcYPitch;
++ int32_t m_srcUVPitch;
++ DpColorFormat m_srcFormat;
++ DP_PROFILE_ENUM m_srcProfile;
++ DpSecure m_srcSecure;
++ bool m_srcFlush;
++ int32_t m_dstBuffer;
++ int32_t m_dstWidth;
++ int32_t m_dstHeight;
++ int32_t m_dstYPitch;
++ int32_t m_dstUVPitch;
++ DpColorFormat m_dstFormat;
++ DP_PROFILE_ENUM m_dstProfile;
++ DpSecure m_dstSecure;
++ bool m_dstFlush;
++ DpStream *m_pPqStream;
++ DpChannel *m_pPqChannel;
++ DpAutoBufferPool *m_pPqPool;
++ int32_t m_pqBuffer;
++ int32_t m_cropXStart;
++ int32_t m_cropYStart;
++ int32_t m_cropWidth;
++ int32_t m_cropHeight;
++ int32_t m_cropSubPixelX;
++ int32_t m_cropSubPixelY;
++ int32_t m_targetXStart;
++ int32_t m_targetYStart;
++ int32_t m_rotation;
++ bool m_frameChange;
++ bool m_flipStatus;
++ bool m_ditherStatus;
++ bool m_adaptiveLuma;
++ uint32_t m_userID;
++ DpPqConfig m_PqConfig;
++ uint32_t m_PqID;
++ uint32_t m_engFlag;
++ //Compatible to 89
++ int mRotate;
++ int mFlip;
++ int mTdshp;
++
++ DpStream *mStream;
++ DpChannel *mChannel;
++ DpBufferPool *mSrcPool;
++ DpBufferPool *mDstPool;
++ DpPortOption *mSrcPort;
++ DpPortOption *mDstPort;
++ int mSrcBufferId;
++ int mDstBufferId;
++ unsigned int mDumpRegFlags;
++};
++
++#endif // __DP_BLIT_STREAM_H__
+diff --git a/include/media/stagefright/dpframework/DpDataType.h b/include/media/stagefright/dpframework/DpDataType.h
+new file mode 100644
+index 000000000..d858e9727
+--- /dev/null
++++ b/include/media/stagefright/dpframework/DpDataType.h
+@@ -0,0 +1,450 @@
++#ifndef __DP_DATA_TYPE_H__
++#define __DP_DATA_TYPE_H__
++
++#ifndef __KERNEL__
++#include <stdio.h>
++#include <stdlib.h>
++#include <assert.h>
++#include <string.h>
++#include <math.h>
++#endif
++
++#ifndef MAX
++ #define MAX(x, y) ((x) >= (y))? (x): (y)
++#endif // MAX
++
++#ifndef MIN
++ #define MIN(x, y) ((x) <= (y))? (x): (y)
++#endif // MIN
++
++#ifndef __KERNEL__
++class DpStream;
++class DpChannel;
++
++class DpBasicBufferPool;
++class DpAutoBufferPool;
++class DpCommand;
++class DpBufferPool;
++#endif
++
++typedef unsigned long long DpJobID;
++typedef int DpEngineType;
++
++typedef enum DP_STATUS_ENUM
++{
++ DP_STATUS_ABORTED_BY_USER = 4,
++ DP_STATUS_ALL_TEST_DONE = 3,
++ DP_STATUS_ALL_TPIPE_DONE = 2,
++ DP_STATUS_BUFFER_DONE = 1,
++ DP_STATUS_RETURN_SUCCESS = 0,
++ DP_STATUS_INVALID_PARAX = -1,
++ DP_STATUS_INVALID_PORT = -2,
++ DP_STATUS_INVALID_PATH = -3,
++ DP_STATUS_INVALID_FILE = -4,
++ DP_STATUS_INVALID_CHANNEL = -5,
++ DP_STATUS_INVALID_BUFFER = -6,
++ DP_STATUS_INVALID_STATE = -7,
++ DP_STATUS_INVALID_ENGINE = -8,
++ DP_STATUS_INVALID_FORMAT = -9,
++ DP_STATUS_INVALID_X_INPUT = -10,
++ DP_STATUS_INVALID_Y_INPUT = -11,
++ DP_STATUS_INVALID_X_OUTPUT = -12,
++ DP_STATUS_INVALID_Y_OUTPUT = -13,
++ DP_STATUS_INVALID_X_ALIGN = -14,
++ DP_STATUS_INVALID_Y_ALIGN = -15,
++ DP_STATUS_INVALID_WIDTH = -16,
++ DP_STATUS_INVALID_HEIGHT = -17,
++ DP_STATUS_INVALID_CROP = -18,
++ DP_STATUS_INVALID_ANGLE = -19,
++ DP_STATUS_INVALID_EVENT = -20,
++ DP_STATUS_INVALID_OPCODE = -21,
++ DP_STATUS_CAN_NOT_MERGE = -22,
++ DP_STATUS_OUT_OF_MEMORY = -23,
++ DP_STATUS_BUFFER_FULL = -24,
++ DP_STATUS_BUFFER_EMPTY = -25,
++ DP_STATUS_OPERATION_FAILED = -26,
++ DP_STATUS_OVER_MAX_BRANCH = -27,
++ DP_STATUS_OVER_MAX_ENGINE = -28,
++ DP_STATUS_OVER_MAX_BACKUP = -29,
++ DP_STATUS_SCHEDULE_ERROR = -30,
++ DP_STATUS_OVER_MAX_WIDTH = -31,
++ DP_STATUS_OVER_MAX_HEIGHT = -32,
++ DP_STATUS_LEFT_EDGE_ERROR = -33,
++ DP_STATUS_RIGHT_EDGE_ERROR = -34,
++ DP_STATUS_TOP_EDGE_ERROR = -35,
++ DP_STATUS_BOTTOM_EDGE_ERROR = -36,
++ DP_STATUS_X_LESS_THAN_LAST = -37,
++ DP_STATUS_Y_LESS_THAN_LAST = -38,
++ DP_STATUS_UNWANTED_X_CAL = -39,
++ DP_STATUS_LOSS_OVER_WIDTH = -40,
++ DP_STATUS_LOSS_OVER_HEIGHT = -41,
++ DP_STATUS_X_ALIGN_ERROR = -42,
++ DP_STATUS_Y_ALIGN_ERROR = -43,
++ DP_STATUS_X_OUT_OVERLAP = -44,
++ DP_STATUS_Y_OUT_OVERLAP = -45,
++ DP_STATUS_BACK_LE_FORWARD = -46,
++ DP_STATUS_UNKNOWN_ERROR = -47,
++} DP_STATUS_ENUM;
++
++
++typedef enum DP_MEMORY_ENUM
++{
++ DP_MEMORY_VA,
++ DP_MEMORY_ION,
++ DP_MEMORY_PHY,
++ DP_MEMORY_MVA
++} DP_MEMORY_ENUM;
++
++typedef struct DpJPEGEnc_Config_st // for JPEG port only
++{
++ int32_t fileDesc;
++ uint32_t size;
++ uint32_t fQuality;
++ uint32_t soi_en;
++ void *memSWAddr[3];
++} DpJPEGEnc_Config;
++
++typedef struct DpVEnc_Config // for VENC port only
++{
++ /* Venc Modify + */
++ unsigned long rVencDrvHandle;
++ /* Venc Modify - */
++ uint32_t memYUVMVAAddr[3];
++ uint32_t memYUVMVASize[3];
++ void *memYUVSWAddr[3];
++ void *memOutputSWAddr[3];
++
++ uint32_t* pNumPABuffer;
++ uint32_t* pPABuffer;
++ uint64_t* pConfigFrameCount;
++ uint64_t* pDequeueFrameCount;
++ DpCommand* pVEncCommander;
++} DpVEnc_Config;
++
++
++#ifndef __KERNEL__
++class DpRect
++{
++public:
++
++ enum
++ {
++ eINVALID_VALUE = -1,
++ eINITIAL_VALUE = 0 //TBD, why to set as "0"?
++ };
++
++ inline DpRect(void)
++ : x(eINITIAL_VALUE), sub_x(eINITIAL_VALUE),
++ y(eINITIAL_VALUE), sub_y(eINITIAL_VALUE),
++ w(eINITIAL_VALUE), h(eINITIAL_VALUE)
++ {}
++
++ inline DpRect(int32_t in_x, int32_t in_y, int32_t in_w, int32_t in_h,
++ int32_t in_sub_x = 0, int32_t in_sub_y = 0)
++ : x(in_x),
++ sub_x(in_sub_x),
++ y(in_y),
++ sub_y(in_sub_y),
++ w(in_w),
++ h(in_h)
++ {}
++
++ inline DpRect(const DpRect& rt)
++ : x(rt.x),
++ sub_x(rt.sub_x),
++ y(rt.y),
++ sub_y(rt.sub_y),
++ w(rt.w),
++ h(rt.h)
++ {}
++
++ ~DpRect(void) {}
++
++ inline DpRect& operator= (const DpRect rval)
++ {
++ x = rval.x;
++ sub_x = rval.sub_x;
++ y = rval.y;
++ sub_y = rval.sub_y;
++ w = rval.w;
++ h = rval.h;
++ return *this;
++ }
++
++ int32_t x;
++ int32_t sub_x;
++ int32_t y;
++ int32_t sub_y;
++ int32_t w;
++ int32_t h;
++};
++#endif
++
++typedef enum DP_PROFILE_ENUM
++{
++ DP_PROFILE_BT601, //Limited range
++ DP_PROFILE_BT709,
++ DP_PROFILE_JPEG,
++ DP_PROFILE_FULL_BT601 = DP_PROFILE_JPEG
++} DP_PROFILE_ENUM;
++
++
++typedef enum DP_STREAM_ID_ENUM
++{
++ DP_BLITSTREAM = 0x10000000,
++ DP_FRAGSTREAM = 0x20000000,
++ DP_ISPSTREAM = 0x30000000,
++ DP_ASYNCBLITSTREAM = 0x40000000,
++ DP_UNKNOWN_STREAM = 0xF0000000,
++} DP_STREAM_ID_ENUM;
++
++typedef enum DP_MEDIA_TYPE_ENUM
++{
++ MEDIA_UNKNOWN,
++ MEDIA_VIDEO,
++ MEDIA_PICTURE,
++ MEDIA_ISP_PREVIEW
++} DP_MEDIA_TYPE_ENUM;
++
++typedef struct
++{
++ uint32_t id;
++ uint32_t timeStamp;
++ uint32_t reserved[28]; // padding and reserved
++} DpVideoParam;
++
++typedef struct
++{
++ bool withHist;
++ uint32_t info[20];
++ uint32_t reserved[9]; // padding and reserved
++} DpImageParam;
++
++struct DpPqParam {
++ bool enable;
++ DP_MEDIA_TYPE_ENUM scenario;
++
++ union {
++ DpVideoParam video;
++ DpImageParam image;
++ } u;
++};
++
++struct DpPqConfig {
++ uint32_t enSharp;
++ uint32_t enDC;
++ uint32_t enColor;
++};
++
++
++//FMT GROUP , 0-RGB , 1-YUV , 2-Bayer raw , 3-compressed format
++#define DP_COLORFMT_PACK(VIDEO, PLANE, COPLANE, HFACTOR, VFACTOR, BITS, GROUP ,SWAP_ENABLE, UNIQUEID) \
++ ((VIDEO << 27) | \
++ (PLANE << 24) | \
++ (COPLANE << 22) | \
++ (HFACTOR << 20) | \
++ (VFACTOR << 18) | \
++ (BITS << 8) | \
++ (GROUP << 6) | \
++ (SWAP_ENABLE << 5) | \
++ (UNIQUEID << 0))
++
++#define DP_COLOR_GET_UFP_ENABLE(color) ((0x20000000 & color) >> 29)
++#define DP_COLOR_GET_INTERLACED_MODE(color) ((0x10000000 & color) >> 28)
++#define DP_COLOR_GET_BLOCK_MODE(color) ((0x08000000 & color) >> 27)
++#define DP_COLOR_GET_PLANE_COUNT(color) ((0x07000000 & color) >> 24)
++#define DP_COLOR_IS_UV_COPLANE(color) ((0x00C00000 & color) >> 22)
++#define DP_COLOR_GET_H_SUBSAMPLE(color) ((0x00300000 & color) >> 20)
++#define DP_COLOR_GET_V_SUBSAMPLE(color) ((0x000C0000 & color) >> 18)
++#define DP_COLOR_BITS_PER_PIXEL(color) ((0x0003FF00 & color) >> 8)
++#define DP_COLOR_GET_COLOR_GROUP(color) ((0x000000C0 & color) >> 6)
++#define DP_COLOR_GET_SWAP_ENABLE(color) ((0x00000020 & color) >> 5)
++#define DP_COLOR_GET_UNIQUE_ID(color) ((0x0000001F & color) >> 0)
++#define DP_COLOR_GET_HW_FORMAT(color) ((0x0000001F & color) >> 0)
++
++typedef enum DP_COLOR_ENUM
++{
++ DP_COLOR_UNKNOWN = 0,
++ DP_COLOR_FULLG8 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 8, 3, 0, 20),
++ DP_COLOR_FULLG10 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 10, 3, 0, 21),
++ DP_COLOR_FULLG12 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 12, 3, 0, 22),
++ DP_COLOR_FULLG14 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 14, 3, 0, 26),
++ DP_COLOR_UFO10 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 10, 3, 0, 27),
++
++ DP_COLOR_BAYER8 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 8, 2, 0, 20),
++ DP_COLOR_BAYER10 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 10, 2, 0, 21),
++ DP_COLOR_BAYER12 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 12, 2, 0, 22),
++
++ // Unified format
++ DP_COLOR_GREY = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 8, 1, 0, 7),
++
++ DP_COLOR_RGB565 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 16, 0, 0, 0),
++ DP_COLOR_BGR565 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 16, 0, 1, 0),
++ DP_COLOR_RGB888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 24, 0, 1, 1),
++ DP_COLOR_BGR888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 24, 0, 0, 1),
++ DP_COLOR_RGBA8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 1, 2),
++ DP_COLOR_BGRA8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 2),
++ DP_COLOR_ARGB8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 1, 3),
++ DP_COLOR_ABGR8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 3),
++
++ DP_COLOR_UYVY = DP_COLORFMT_PACK(0, 1, 0, 1, 0, 16, 1, 0, 4),
++ DP_COLOR_VYUY = DP_COLORFMT_PACK(0, 1, 0, 1, 0, 16, 1, 1, 4),
++ DP_COLOR_YUYV = DP_COLORFMT_PACK(0, 1, 0, 1, 0, 16, 1, 0, 5),
++ DP_COLOR_YVYU = DP_COLORFMT_PACK(0, 1, 0, 1, 0, 16, 1, 1, 5),
++
++ DP_COLOR_I420 = DP_COLORFMT_PACK(0, 3, 0, 1, 1, 8, 1, 0, 8),
++ DP_COLOR_YV12 = DP_COLORFMT_PACK(0, 3, 0, 1, 1, 8, 1, 1, 8),
++ DP_COLOR_I422 = DP_COLORFMT_PACK(0, 3, 0, 1, 0, 8, 1, 0, 9),
++ DP_COLOR_YV16 = DP_COLORFMT_PACK(0, 3, 0, 1, 0, 8, 1, 1, 9),
++ DP_COLOR_I444 = DP_COLORFMT_PACK(0, 3, 0, 0, 0, 8, 1, 0, 10),
++ DP_COLOR_YV24 = DP_COLORFMT_PACK(0, 3, 0, 0, 0, 8, 1, 1, 10),
++
++ DP_COLOR_NV12 = DP_COLORFMT_PACK(0, 2, 1, 1, 1, 8, 1, 0, 12),
++ DP_COLOR_NV21 = DP_COLORFMT_PACK(0, 2, 1, 1, 1, 8, 1, 1, 12),
++ DP_COLOR_NV16 = DP_COLORFMT_PACK(0, 2, 1, 1, 0, 8, 1, 0, 13),
++ DP_COLOR_NV61 = DP_COLORFMT_PACK(0, 2, 1, 1, 0, 8, 1, 1, 13),
++ DP_COLOR_NV24 = DP_COLORFMT_PACK(0, 2, 1, 0, 0, 8, 1, 0, 14),
++ DP_COLOR_NV42 = DP_COLORFMT_PACK(0, 2, 1, 0, 0, 8, 1, 1, 14),
++
++ // Mediatek proprietary format
++ DP_COLOR_420_BLKP_UFO = DP_COLORFMT_PACK(5, 2, 1, 1, 1, 256, 1, 0, 12),//Frame mode + Block mode
++ DP_COLOR_420_BLKP = DP_COLORFMT_PACK(1, 2, 1, 1, 1, 256, 1, 0, 12),//Frame mode + Block mode
++ DP_COLOR_420_BLKI = DP_COLORFMT_PACK(3, 2, 1, 1, 1, 256, 1, 0, 12),//Field mode + Block mode
++ DP_COLOR_422_BLKP = DP_COLORFMT_PACK(1, 1, 0, 1, 0, 512, 1, 0, 4), //Frame mode
++
++ DP_COLOR_PARGB8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 26),
++ DP_COLOR_XARGB8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 27),
++ DP_COLOR_PABGR8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 28),
++ DP_COLOR_XABGR8888 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 32, 0, 0, 29),
++
++ DP_COLOR_IYU2 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 24, 1, 0, 25),
++ DP_COLOR_YUV444 = DP_COLORFMT_PACK(0, 1, 0, 0, 0, 24, 1, 0, 30),
++// DP_COLOR_YUV422I = DP_COLORFMT_PACK(1, 0, 1, 0, 16, 1, 41),//Dup to DP_COLOR_YUYV
++// DP_COLOR_Y800 = DP_COLORFMT_PACK(1, 0, 1, 0, 8, 1, 42),//Dup to DP_COLOR_GREY
++// DP_COLOR_COMPACT_RAW1 = DP_COLORFMT_PACK(1, 0, 1, 0, 10, 2, 43),//Dup to Bayer10
++// DP_COLOR_420_3P_YVU = DP_COLORFMT_PACK(3, 0, 1, 1, 8, 1, 44),//Dup to DP_COLOR_YV12
++} DP_COLOR_ENUM;
++
++// Legacy for 6589 compatible
++typedef DP_COLOR_ENUM DpColorFormat;
++
++#define eYUV_420_3P DP_COLOR_I420
++#define eYUV_420_2P_YUYV DP_COLOR_YUYV
++#define eYUV_420_2P_UYVY DP_COLOR_UYVY
++#define eYUV_420_2P_YVYU DP_COLOR_YVYU
++#define eYUV_420_2P_VYUY DP_COLOR_VYUY
++#define eYUV_420_2P_ISP_BLK DP_COLOR_420_BLKP
++#define eYUV_420_2P_VDO_BLK DP_COLOR_420_BLKI
++#define eYUV_422_3P DP_COLOR_I422
++#define eYUV_422_2P DP_COLOR_NV16
++#define eYUV_422_I DP_COLOR_YUYV
++#define eYUV_422_I_BLK DP_COLOR_422_BLKP
++#define eYUV_444_3P DP_COLOR_I444
++#define eYUV_444_2P DP_COLOR_NV24
++#define eYUV_444_1P DP_COLOR_YUV444
++#define eBAYER8 DP_COLOR_BAYER8
++#define eBAYER10 DP_COLOR_BAYER10
++#define eBAYER12 DP_COLOR_BAYER12
++#define eRGB565 DP_COLOR_RGB565
++#define eBGR565 DP_COLOR_BGR565
++#define eRGB888 DP_COLOR_RGB888
++#define eBGR888 DP_COLOR_BGR888
++#define eARGB8888 DP_COLOR_ARGB8888
++#define eABGR8888 DP_COLOR_ABGR8888
++#define DP_COLOR_XRGB8888 DP_COLOR_ARGB8888
++#define DP_COLOR_XBGR8888 DP_COLOR_ABGR8888
++#define eRGBA8888 DP_COLOR_RGBA8888
++#define eBGRA8888 DP_COLOR_BGRA8888
++#define eXRGB8888 DP_COLOR_XRGB8888
++#define eXBGR8888 DP_COLOR_XBGR8888
++#define DP_COLOR_RGBX8888 DP_COLOR_RGBA8888
++#define DP_COLOR_BGRX8888 DP_COLOR_BGRA8888
++#define eRGBX8888 DP_COLOR_RGBX8888
++#define eBGRX8888 DP_COLOR_BGRX8888
++#define ePARGB8888 DP_COLOR_PARGB8888
++#define eXARGB8888 DP_COLOR_XARGB8888
++#define ePABGR8888 DP_COLOR_PABGR8888
++#define eXABGR8888 DP_COLOR_XABGR8888
++#define eGREY DP_COLOR_GREY
++#define eI420 DP_COLOR_I420
++#define eYV12 DP_COLOR_YV12
++#define eIYU2 DP_COLOR_IYU2
++
++
++#define eYV21 DP_COLOR_I420
++#define eNV12_BLK DP_COLOR_420_BLKP
++#define eNV12_BLK_FCM DP_COLOR_420_BLKI
++#define eYUV_420_3P_YVU DP_COLOR_YV12
++
++#define eNV12_BP DP_COLOR_420_BLKP
++#define eNV12_BI DP_COLOR_420_BLKI
++#define eNV12 DP_COLOR_NV12
++#define eNV21 DP_COLOR_NV21
++#define eI422 DP_COLOR_I422
++#define eYV16 DP_COLOR_YV16
++#define eNV16 DP_COLOR_NV16
++#define eNV61 DP_COLOR_NV61
++#define eUYVY DP_COLOR_UYVY
++#define eVYUY DP_COLOR_VYUY
++#define eYUYV DP_COLOR_YUYV
++#define eYVYU DP_COLOR_YVYU
++#define eUYVY_BP DP_COLOR_422_BLKP
++#define eI444 DP_COLOR_I444
++#define eNV24 DP_COLOR_NV24
++#define eNV42 DP_COLOR_NV42
++#define DP_COLOR_YUY2 DP_COLOR_YUYV
++#define eYUY2 DP_COLOR_YUY2
++#define eY800 DP_COLOR_GREY
++//#define eIYU2
++#define eMTKYUV DP_COLOR_422_BLKP
++
++#define eCompactRaw1 DP_COLOR_BAYER10
++
++
++enum DpInterlaceFormat
++{
++ eInterlace_None,
++ eTop_Field,
++ eBottom_Field
++};
++
++enum DpSecure
++{
++ DP_SECURE_NONE = 0,
++ DP_SECURE = 1,
++ DP_SECURE_SHIFT = 8
++};
++
++#define MAX_NUM_READBACK_REGS (20)
++
++#define VENC_ENABLE_FLAG (0x08967)
++
++#ifndef __KERNEL__
++struct DpPortOption
++{
++ int width;
++ int height;
++ DpRect ROI;
++ DpColorFormat format;
++ DpInterlaceFormat interlace;
++
++ enum DpPort
++ {
++ eLCD0_PORT,
++ eLCD1_PORT,
++ eHDMI_PORT,
++ eTVOUT_PORT,
++ eOVERLAY_PORT,
++ eVIRTUAL_PORT,
++ eMEMORY_PORT
++ };
++
++ DpPort port;
++
++ int overlayID; // setting if choose port = eOVERLAY
++ int virtualID; // setting if choose port = eVIRTUAL_PORT
++ DpBufferPool *buffer; // setting if choose port = eMEMORY
++};
++#endif // __KERNEL__
++
++#endif // __DP_DATA_TYPE_H__
+diff --git a/media/libstagefright/Android.mk b/media/libstagefright/Android.mk
+index e708f68ae..322253c0d 100644
+--- a/media/libstagefright/Android.mk
++++ b/media/libstagefright/Android.mk
+@@ -202,6 +202,17 @@ LOCAL_C_INCLUDES += \
+ $(TOP)/hardware/samsung/exynos4/include
+ endif
+
++# Mediatek
++ifeq ($(strip $(BOARD_HAS_MTK_HARDWARE)),true)
++LOCAL_CFLAGS += -DMTK_HARDWARE
++
++LOCAL_C_INCLUDES += \
++ $(TOP)/hardware/mediatek/dpframework/inc
++
++LOCAL_SHARED_LIBRARIES += \
++ libdpframework
++endif
++
+ LOCAL_MODULE:= libstagefright
+
+ LOCAL_MODULE_TAGS := optional
+diff --git a/media/libstagefright/CameraSource.cpp b/media/libstagefright/CameraSource.cpp
+index aa582d2d2..8f542bf60 100644
+--- a/media/libstagefright/CameraSource.cpp
++++ b/media/libstagefright/CameraSource.cpp
+@@ -44,6 +44,10 @@
+ #define UNUSED_UNLESS_VERBOSE(x)
+ #endif
+
++#ifdef MTK_HARDWARE
++#define OMX_MTK_COLOR_FormatYV12 0x7F000200
++#endif
++
+ namespace android {
+
+ static const int64_t CAMERA_SOURCE_TIMEOUT_NS = 3000000000LL;
+@@ -119,7 +123,11 @@ static int32_t getColorFormat(const char* colorFormat) {
+ }
+
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV420P)) {
++#ifdef MTK_HARDWARE
++ return OMX_MTK_COLOR_FormatYV12;
++#else
+ return OMX_COLOR_FormatYUV420Planar;
++#endif
+ }
+
+ if (!strcmp(colorFormat, CameraParameters::PIXEL_FORMAT_YUV422SP)) {
+diff --git a/media/libstagefright/OMXCodec.cpp b/media/libstagefright/OMXCodec.cpp
+new file mode 100644
+index 000000000..20c59750e
+--- /dev/null
++++ b/media/libstagefright/OMXCodec.cpp
+@@ -0,0 +1,4549 @@
++/*
++ * Copyright (C) 2009 The Android Open Source Project
++ *
++ * Licensed under the Apache License, Version 2.0 (the "License");
++ * you may not use this file except in compliance with the License.
++ * You may obtain a copy of the License at
++ *
++ * http://www.apache.org/licenses/LICENSE-2.0
++ *
++ * Unless required by applicable law or agreed to in writing, software
++ * distributed under the License is distributed on an "AS IS" BASIS,
++ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
++ * See the License for the specific language governing permissions and
++ * limitations under the License.
++ */
++
++#include <inttypes.h>
++
++//#define LOG_NDEBUG 0
++#define LOG_TAG "OMXCodec"
++
++#ifdef __LP64__
++#define OMX_ANDROID_COMPILE_AS_32BIT_ON_64BIT_PLATFORMS
++#endif
++
++#include <utils/Log.h>
++
++#include "include/AACEncoder.h"
++
++#include "include/ESDS.h"
++
++#include <binder/IServiceManager.h>
++#include <binder/MemoryDealer.h>
++#include <binder/ProcessState.h>
++#include <HardwareAPI.h>
++#include <media/stagefright/foundation/ADebug.h>
++#include <media/IMediaPlayerService.h>
++#include <media/stagefright/ACodec.h>
++#include <media/stagefright/MediaBuffer.h>
++#include <media/stagefright/MediaBufferGroup.h>
++#include <media/stagefright/MediaDefs.h>
++#include <media/stagefright/MediaCodecList.h>
++#include <media/stagefright/MediaExtractor.h>
++#include <media/stagefright/MetaData.h>
++#include <media/stagefright/OMXCodec.h>
++#include <media/stagefright/SurfaceUtils.h>
++#include <media/stagefright/Utils.h>
++#include <media/stagefright/SkipCutBuffer.h>
++#include <utils/Vector.h>
++
++#include <OMX_AudioExt.h>
++#include <OMX_Component.h>
++#include <OMX_IndexExt.h>
++#include <OMX_VideoExt.h>
++#include <OMX_AsString.h>
++
++#include "include/avc_utils.h"
++
++#ifdef USE_S3D_SUPPORT
++#include "Exynos_OMX_Def.h"
++#include "ExynosHWCService.h"
++#endif
++
++namespace android {
++
++// Treat time out as an error if we have not received any output
++// buffers after 3 seconds.
++const static int64_t kBufferFilledEventTimeOutNs = 3000000000LL;
++
++// OMX Spec defines less than 50 color formats. If the query for
++// color format is executed for more than kMaxColorFormatSupported,
++// the query will fail to avoid looping forever.
++// 1000 is more than enough for us to tell whether the omx
++// component in question is buggy or not.
++const static uint32_t kMaxColorFormatSupported = 1000;
++
++#define FACTORY_CREATE_ENCODER(name) \
++static sp<MediaSource> Make##name(const sp<MediaSource> &source, const sp<MetaData> &meta) { \
++ return new name(source, meta); \
++}
++
++#define FACTORY_REF(name) { #name, Make##name },
++
++FACTORY_CREATE_ENCODER(AACEncoder)
++
++static sp<MediaSource> InstantiateSoftwareEncoder(
++ const char *name, const sp<MediaSource> &source,
++ const sp<MetaData> &meta) {
++ struct FactoryInfo {
++ const char *name;
++ sp<MediaSource> (*CreateFunc)(const sp<MediaSource> &, const sp<MetaData> &);
++ };
++
++ static const FactoryInfo kFactoryInfo[] = {
++ FACTORY_REF(AACEncoder)
++ };
++ for (size_t i = 0;
++ i < sizeof(kFactoryInfo) / sizeof(kFactoryInfo[0]); i) {
++ if (!strcmp(name, kFactoryInfo[i].name)) {
++ return (*kFactoryInfo[i].CreateFunc)(source, meta);
++ }
++ }
++
++ return NULL;
++}
++
++#undef FACTORY_CREATE_ENCODER
++#undef FACTORY_REF
++
++#define CODEC_LOGI(x, ...) ALOGI("[%s] " x, mComponentName, ##__VA_ARGS__)
++#define CODEC_LOGV(x, ...) ALOGV("[%s] " x, mComponentName, ##__VA_ARGS__)
++#define CODEC_LOGW(x, ...) ALOGW("[%s] " x, mComponentName, ##__VA_ARGS__)
++#define CODEC_LOGE(x, ...) ALOGE("[%s] " x, mComponentName, ##__VA_ARGS__)
++
++struct OMXCodecObserver : public BnOMXObserver {
++ OMXCodecObserver() {
++ }
++
++ void setCodec(const sp<OMXCodec> &target) {
++ mTarget = target;
++ }
++
++ // from IOMXObserver
++ virtual void onMessages(const std::list<omx_message> &messages) {
++ sp<OMXCodec> codec = mTarget.promote();
++
++ if (codec.get() != NULL) {
++ Mutex::Autolock autoLock(codec->mLock);
++ for (std::list<omx_message>::const_iterator it = messages.cbegin();
++ it != messages.cend(); it) {
++ codec->on_message(*it);
++ }
++ codec.clear();
++ }
++ }
++
++protected:
++ virtual ~OMXCodecObserver() {}
++
++private:
++ wp<OMXCodec> mTarget;
++
++ OMXCodecObserver(const OMXCodecObserver &);
++ OMXCodecObserver &operator=(const OMXCodecObserver &);
++};
++
++template<class T>
++static void InitOMXParams(T *params) {
++ COMPILE_TIME_ASSERT_FUNCTION_SCOPE(sizeof(OMX_PTR) == 4); // check OMX_PTR is 4 bytes.
++ params->nSize = sizeof(T);
++ params->nVersion.s.nVersionMajor = 1;
++ params->nVersion.s.nVersionMinor = 0;
++ params->nVersion.s.nRevision = 0;
++ params->nVersion.s.nStep = 0;
++}
++
++static bool IsSoftwareCodec(const char *componentName) {
++ if (!strncmp("OMX.google.", componentName, 11)
++ || !strncmp("OMX.ffmpeg.", componentName, 11)) {
++ return true;
++ }
++
++ if (!strncmp("OMX.", componentName, 4)) {
++ return false;
++ }
++
++ return true;
++}
++
++// A sort order in which OMX software codecs are first, followed
++// by other (non-OMX) software codecs, followed by everything else.
++static int CompareSoftwareCodecsFirst(
++ const OMXCodec::CodecNameAndQuirks *elem1,
++ const OMXCodec::CodecNameAndQuirks *elem2) {
++ bool isOMX1 = !strncmp(elem1->mName.string(), "OMX.", 4);
++ bool isOMX2 = !strncmp(elem2->mName.string(), "OMX.", 4);
++
++ bool isSoftwareCodec1 = IsSoftwareCodec(elem1->mName.string());
++ bool isSoftwareCodec2 = IsSoftwareCodec(elem2->mName.string());
++
++ if (isSoftwareCodec1) {
++ if (!isSoftwareCodec2) { return -1; }
++
++ if (isOMX1) {
++ if (isOMX2) { return 0; }
++
++ return -1;
++ } else {
++ if (isOMX2) { return 0; }
++
++ return 1;
++ }
++
++ return -1;
++ }
++
++ if (isSoftwareCodec2) {
++ return 1;
++ }
++
++ return 0;
++}
++
++// static
++void OMXCodec::findMatchingCodecs(
++ const char *mime,
++ bool createEncoder, const char *matchComponentName,
++ uint32_t flags,
++ Vector<CodecNameAndQuirks> *matchingCodecs) {
++ matchingCodecs->clear();
++
++ const sp<IMediaCodecList> list = MediaCodecList::getInstance();
++ if (list == NULL) {
++ return;
++ }
++
++ size_t index = 0;
++ for (;;) {
++ ssize_t matchIndex =
++ list->findCodecByType(mime, createEncoder, index);
++
++ if (matchIndex < 0) {
++ break;
++ }
++
++ index = matchIndex 1;
++
++ const sp<MediaCodecInfo> info = list->getCodecInfo(matchIndex);
++ CHECK(info != NULL);
++ const char *componentName = info->getCodecName();
++
++ // If a specific codec is requested, skip the non-matching ones.
++ if (matchComponentName && strcmp(componentName, matchComponentName)) {
++ continue;
++ }
++
++ // When requesting software-only codecs, only push software codecs
++ // When requesting hardware-only codecs, only push hardware codecs
++ // When there is request neither for software-only nor for
++ // hardware-only codecs, push all codecs
++ if (((flags & kSoftwareCodecsOnly) && IsSoftwareCodec(componentName)) ||
++ ((flags & kHardwareCodecsOnly) && !IsSoftwareCodec(componentName)) ||
++ (!(flags & (kSoftwareCodecsOnly | kHardwareCodecsOnly)))) {
++
++ ssize_t index = matchingCodecs->add();
++ CodecNameAndQuirks *entry = &matchingCodecs->editItemAt(index);
++ entry->mName = String8(componentName);
++ entry->mQuirks = getComponentQuirks(info);
++
++ ALOGV("matching '%s' quirks 0x%08x",
++ entry->mName.string(), entry->mQuirks);
++ }
++ }
++
++ if (flags & kPreferSoftwareCodecs) {
++ matchingCodecs->sort(CompareSoftwareCodecsFirst);
++ }
++}
++
++// static
++uint32_t OMXCodec::getComponentQuirks(
++ const sp<MediaCodecInfo> &info) {
++ uint32_t quirks = 0;
++ if (info->hasQuirk("requires-allocate-on-input-ports")) {
++ quirks |= kRequiresAllocateBufferOnInputPorts;
++ }
++ if (info->hasQuirk("requires-allocate-on-output-ports")) {
++ quirks |= kRequiresAllocateBufferOnOutputPorts;
++ }
++ if (info->hasQuirk("output-buffers-are-unreadable")) {
++ quirks |= kOutputBuffersAreUnreadable;
++ }
++
++ return quirks;
++}
++
++// static
++bool OMXCodec::findCodecQuirks(const char *componentName, uint32_t *quirks) {
++ const sp<IMediaCodecList> list = MediaCodecList::getInstance();
++ if (list == NULL) {
++ return false;
++ }
++
++ ssize_t index = list->findCodecByName(componentName);
++
++ if (index < 0) {
++ return false;
++ }
++
++ const sp<MediaCodecInfo> info = list->getCodecInfo(index);
++ CHECK(info != NULL);
++ *quirks = getComponentQuirks(info);
++
++ return true;
++}
++
++// static
++sp<MediaSource> OMXCodec::Create(
++ const sp<IOMX> &omx,
++ const sp<MetaData> &meta, bool createEncoder,
++ const sp<MediaSource> &source,
++ const char *matchComponentName,
++ uint32_t flags,
++ const sp<ANativeWindow> &nativeWindow) {
++ int32_t requiresSecureBuffers;
++ if (source->getFormat()->findInt32(
++ kKeyRequiresSecureBuffers,
++ &requiresSecureBuffers)
++ && requiresSecureBuffers) {
++ flags |= kIgnoreCodecSpecificData;
++ flags |= kUseSecureInputBuffers;
++ }
++
++ const char *mime;
++ bool success = meta->findCString(kKeyMIMEType, &mime);
++ CHECK(success);
++
++ Vector<CodecNameAndQuirks> matchingCodecs;
++ findMatchingCodecs(
++ mime, createEncoder, matchComponentName, flags, &matchingCodecs);
++
++ if (matchingCodecs.isEmpty()) {
++ ALOGV("No matching codecs! (mime: %s, createEncoder: %s, "
++ "matchComponentName: %s, flags: 0x%x)",
++ mime, createEncoder ? "true" : "false", matchComponentName, flags);
++ return NULL;
++ }
++
++ sp<OMXCodecObserver> observer = new OMXCodecObserver;
++ IOMX::node_id node = 0;
++
++ for (size_t i = 0; i < matchingCodecs.size(); i) {
++ const char *componentNameBase = matchingCodecs[i].mName.string();
++ uint32_t quirks = matchingCodecs[i].mQuirks;
++ const char *componentName = componentNameBase;
++
++ AString tmp;
++ if (flags & kUseSecureInputBuffers) {
++ tmp = componentNameBase;
++ tmp.append(".secure");
++
++ componentName = tmp.c_str();
++ }
++
++ if (createEncoder) {
++ sp<MediaSource> softwareCodec =
++ InstantiateSoftwareEncoder(componentName, source, meta);
++
++ if (softwareCodec != NULL) {
++ ALOGV("Successfully allocated software codec '%s'", componentName);
++
++ return softwareCodec;
++ }
++ }
++
++ ALOGV("Attempting to allocate OMX node '%s'", componentName);
++
++ status_t err = omx->allocateNode(componentName, observer, &node);
++ if (err == OK) {
++ ALOGV("Successfully allocated OMX node '%s'", componentName);
++
++ sp<OMXCodec> codec = new OMXCodec(
++ omx, node, quirks, flags,
++ createEncoder, mime, componentName,
++ source, nativeWindow);
++
++ observer->setCodec(codec);
++
++ err = codec->configureCodec(meta);
++ if (err == OK) {
++ return codec;
++ }
++
++ ALOGV("Failed to configure codec '%s'", componentName);
++ }
++ }
++
++ return NULL;
++}
++
++status_t OMXCodec::parseHEVCCodecSpecificData(
++ const void *data, size_t size,
++ unsigned *profile, unsigned *level) {
++ const uint8_t *ptr = (const uint8_t *)data;
++
++ // verify minimum size and configurationVersion == 1.
++ if (size < 23 || ptr[0] != 1) {
++ return ERROR_MALFORMED;
++ }
++
++ *profile = (ptr[1] & 31);
++ *level = ptr[12];
++
++ ptr = 22;
++ size -= 22;
++
++ size_t numofArrays = (char)ptr[0];
++ ptr = 1;
++ size -= 1;
++ size_t j = 0, i = 0;
++ for (i = 0; i < numofArrays; i) {
++ if (size < 3) {
++ return ERROR_MALFORMED;
++ }
++ ptr = 1;
++ size -= 1;
++
++ // Num of nals
++ size_t numofNals = U16_AT(ptr);
++ ptr = 2;
++ size -= 2;
++
++ for (j = 0;j < numofNals;j) {
++ if (size < 2) {
++ return ERROR_MALFORMED;
++ }
++
++ size_t length = U16_AT(ptr);
++
++ ptr = 2;
++ size -= 2;
++
++ if (size < length) {
++ return ERROR_MALFORMED;
++ }
++ addCodecSpecificData(ptr, length);
++
++ ptr = length;
++ size -= length;
++ }
++ }
++ return OK;
++}
++
++status_t OMXCodec::parseAVCCodecSpecificData(
++ const void *data, size_t size,
++ unsigned *profile, unsigned *level) {
++ const uint8_t *ptr = (const uint8_t *)data;
++
++ // verify minimum size and configurationVersion == 1.
++ if (size < 7 || ptr[0] != 1) {
++ return ERROR_MALFORMED;
++ }
++
++ *profile = ptr[1];
++ *level = ptr[3];
++
++ // There is decodable content out there that fails the following
++ // assertion, let's be lenient for now...
++ // CHECK((ptr[4] >> 2) == 0x3f); // reserved
++
++ size_t lengthSize __unused = 1 (ptr[4] & 3);
++
++ // commented out check below as H264_QVGA_500_NO_AUDIO.3gp
++ // violates it...
++ // CHECK((ptr[5] >> 5) == 7); // reserved
++
++ size_t numSeqParameterSets = ptr[5] & 31;
++
++ ptr = 6;
++ size -= 6;
++
++ for (size_t i = 0; i < numSeqParameterSets; i) {
++ if (size < 2) {
++ return ERROR_MALFORMED;
++ }
++
++ size_t length = U16_AT(ptr);
++
++ ptr = 2;
++ size -= 2;
++
++ if (size < length) {
++ return ERROR_MALFORMED;
++ }
++
++ addCodecSpecificData(ptr, length);
++
++ ptr = length;
++ size -= length;
++ }
++
++ if (size < 1) {
++ return ERROR_MALFORMED;
++ }
++
++ size_t numPictureParameterSets = *ptr;
++ ptr;
++ --size;
++
++ for (size_t i = 0; i < numPictureParameterSets; i) {
++ if (size < 2) {
++ return ERROR_MALFORMED;
++ }
++
++ size_t length = U16_AT(ptr);
++
++ ptr = 2;
++ size -= 2;
++
++ if (size < length) {
++ return ERROR_MALFORMED;
++ }
++
++ addCodecSpecificData(ptr, length);
++
++ ptr = length;
++ size -= length;
++ }
++
++ return OK;
++}
++
++status_t OMXCodec::configureCodec(const sp<MetaData> &meta) {
++ ALOGV("configureCodec protected=%d",
++ (mFlags & kEnableGrallocUsageProtected) ? 1 : 0);
++
++ if (!(mFlags & kIgnoreCodecSpecificData)) {
++ uint32_t type;
++ const void *data;
++ size_t size;
++ if (meta->findData(kKeyESDS, &type, &data, &size)) {
++ ESDS esds((const char *)data, size);
++ CHECK_EQ(esds.InitCheck(), (status_t)OK);
++
++ const void *codec_specific_data;
++ size_t codec_specific_data_size;
++ esds.getCodecSpecificInfo(
++ &codec_specific_data, &codec_specific_data_size);
++
++ addCodecSpecificData(
++ codec_specific_data, codec_specific_data_size);
++ } else if (meta->findData(kKeyAVCC, &type, &data, &size)) {
++ // Parse the AVCDecoderConfigurationRecord
++
++ unsigned profile, level;
++ status_t err;
++ if ((err = parseAVCCodecSpecificData(
++ data, size, &profile, &level)) != OK) {
++ ALOGE("Malformed AVC codec specific data.");
++ return err;
++ }
++
++ CODEC_LOGI(
++ "AVC profile = %u (%s), level = %u",
++ profile, AVCProfileToString(profile), level);
++ } else if (meta->findData(kKeyHVCC, &type, &data, &size)) {
++ // Parse the HEVCDecoderConfigurationRecord
++
++ unsigned profile, level;
++ status_t err;
++ if ((err = parseHEVCCodecSpecificData(
++ data, size, &profile, &level)) != OK) {
++ ALOGE("Malformed HEVC codec specific data.");
++ return err;
++ }
++
++ CODEC_LOGI(
++ "HEVC profile = %u , level = %u",
++ profile, level);
++ } else if (meta->findData(kKeyVorbisInfo, &type, &data, &size)) {
++ addCodecSpecificData(data, size);
++
++ CHECK(meta->findData(kKeyVorbisBooks, &type, &data, &size));
++ addCodecSpecificData(data, size);
++ } else if (meta->findData(kKeyOpusHeader, &type, &data, &size)) {
++ addCodecSpecificData(data, size);
++
++ CHECK(meta->findData(kKeyOpusCodecDelay, &type, &data, &size));
++ addCodecSpecificData(data, size);
++ CHECK(meta->findData(kKeyOpusSeekPreRoll, &type, &data, &size));
++ addCodecSpecificData(data, size);
++ }
++ }
++
++ int32_t bitRate = 0;
++ if (mIsEncoder) {
++ CHECK(meta->findInt32(kKeyBitRate, &bitRate));
++ }
++ if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_NB, mMIME)) {
++ setAMRFormat(false /* isWAMR */, bitRate);
++ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AMR_WB, mMIME)) {
++ setAMRFormat(true /* isWAMR */, bitRate);
++ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AAC, mMIME)) {
++ int32_t numChannels, sampleRate, aacProfile;
++ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
++ CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
++
++ if (!meta->findInt32(kKeyAACProfile, &aacProfile)) {
++ aacProfile = OMX_AUDIO_AACObjectNull;
++ }
++
++ int32_t isADTS;
++ if (!meta->findInt32(kKeyIsADTS, &isADTS)) {
++ isADTS = false;
++ }
++
++ status_t err = setAACFormat(numChannels, sampleRate, bitRate, aacProfile, isADTS);
++ if (err != OK) {
++ CODEC_LOGE("setAACFormat() failed (err = %d)", err);
++ return err;
++ }
++ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_MPEG, mMIME)) {
++ int32_t numChannels, sampleRate;
++ if (meta->findInt32(kKeyChannelCount, &numChannels)
++ && meta->findInt32(kKeySampleRate, &sampleRate)) {
++ // Since we did not always check for these, leave them optional
++ // and have the decoder figure it all out.
++ setRawAudioFormat(
++ mIsEncoder ? kPortIndexInput : kPortIndexOutput,
++ sampleRate,
++ numChannels);
++ }
++ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_AC3, mMIME)) {
++ int32_t numChannels;
++ int32_t sampleRate;
++ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
++ CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
++
++ status_t err = setAC3Format(numChannels, sampleRate);
++ if (err != OK) {
++ CODEC_LOGE("setAC3Format() failed (err = %d)", err);
++ return err;
++ }
++ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_ALAW, mMIME)
++ || !strcasecmp(MEDIA_MIMETYPE_AUDIO_G711_MLAW, mMIME)) {
++ // These are PCM-like formats with a fixed sample rate but
++ // a variable number of channels.
++
++ int32_t sampleRate;
++ int32_t numChannels;
++ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
++ if (!meta->findInt32(kKeySampleRate, &sampleRate)) {
++ sampleRate = 8000;
++ }
++
++ setG711Format(sampleRate, numChannels);
++ } else if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_RAW, mMIME)) {
++ CHECK(!mIsEncoder);
++
++ int32_t numChannels, sampleRate;
++ CHECK(meta->findInt32(kKeyChannelCount, &numChannels));
++ CHECK(meta->findInt32(kKeySampleRate, &sampleRate));
++
++ setRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
++ }
++
++ if (!strncasecmp(mMIME, "video/", 6)) {
++
++ if (mIsEncoder) {
++ setVideoInputFormat(mMIME, meta);
++ } else {
++ status_t err = setVideoOutputFormat(
++ mMIME, meta);
++
++ if (err != OK) {
++ return err;
++ }
++ }
++ }
++
++ int32_t maxInputSize;
++ if (meta->findInt32(kKeyMaxInputSize, &maxInputSize)) {
++ setMinBufferSize(kPortIndexInput, (OMX_U32)maxInputSize);
++ }
++
++ initOutputFormat(meta);
++
++ if (mNativeWindow != NULL
++ && !mIsEncoder
++ && !strncasecmp(mMIME, "video/", 6)
++ && !strncmp(mComponentName, "OMX.", 4)) {
++ status_t err = initNativeWindow();
++ if (err != OK) {
++ return err;
++ }
++ }
++
++ return OK;
++}
++
++void OMXCodec::setMinBufferSize(OMX_U32 portIndex, OMX_U32 size) {
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = portIndex;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ if ((portIndex == kPortIndexInput && (mQuirks & kInputBufferSizesAreBogus))
++ || (def.nBufferSize < size)) {
++ def.nBufferSize = size;
++ }
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ // Make sure the setting actually stuck.
++ if (portIndex == kPortIndexInput
++ && (mQuirks & kInputBufferSizesAreBogus)) {
++ CHECK_EQ(def.nBufferSize, size);
++ } else {
++ CHECK(def.nBufferSize >= size);
++ }
++}
++
++status_t OMXCodec::setVideoPortFormatType(
++ OMX_U32 portIndex,
++ OMX_VIDEO_CODINGTYPE compressionFormat,
++ OMX_COLOR_FORMATTYPE colorFormat) {
++ OMX_VIDEO_PARAM_PORTFORMATTYPE format;
++ InitOMXParams(&format);
++ format.nPortIndex = portIndex;
++ format.nIndex = 0;
++ bool found = false;
++
++ OMX_U32 index = 0;
++ for (;;) {
++ format.nIndex = index;
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamVideoPortFormat,
++ &format, sizeof(format));
++
++ if (err != OK) {
++ return err;
++ }
++
++ // The following assertion is violated by TI's video decoder.
++ // CHECK_EQ(format.nIndex, index);
++
++#if 1
++ CODEC_LOGV("portIndex: %u, index: %u, eCompressionFormat=%d eColorFormat=%d",
++ portIndex,
++ index, format.eCompressionFormat, format.eColorFormat);
++#endif
++
++ if (format.eCompressionFormat == compressionFormat
++ && format.eColorFormat == colorFormat) {
++ found = true;
++ break;
++ }
++
++ index;
++ if (index >= kMaxColorFormatSupported) {
++ CODEC_LOGE("color format %d or compression format %d is not supported",
++ colorFormat, compressionFormat);
++ return UNKNOWN_ERROR;
++ }
++ }
++
++ if (!found) {
++ return UNKNOWN_ERROR;
++ }
++
++ CODEC_LOGV("found a match.");
++ status_t err = mOMX->setParameter(
++ mNode, OMX_IndexParamVideoPortFormat,
++ &format, sizeof(format));
++
++ return err;
++}
++
++static size_t getFrameSize(
++ OMX_COLOR_FORMATTYPE colorFormat, int32_t width, int32_t height) {
++ switch (colorFormat) {
++ case OMX_COLOR_FormatYCbYCr:
++ case OMX_COLOR_FormatCbYCrY:
++ return width * height * 2;
++
++ case OMX_COLOR_FormatYUV420Planar:
++ case OMX_COLOR_FormatYUV420SemiPlanar:
++ case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
++ /*
++ * FIXME: For the Opaque color format, the frame size does not
++ * need to be (w*h*3)/2. It just needs to
++ * be larger than certain minimum buffer size. However,
++ * currently, this opaque foramt has been tested only on
++ * YUV420 formats. If that is changed, then we need to revisit
++ * this part in the future
++ */
++ case OMX_COLOR_FormatAndroidOpaque:
++ return (width * height * 3) / 2;
++
++ default:
++ CHECK(!"Should not be here. Unsupported color format.");
++ break;
++ }
++ return 0;
++}
++
++status_t OMXCodec::findTargetColorFormat(
++ const sp<MetaData>& meta, OMX_COLOR_FORMATTYPE *colorFormat) {
++ ALOGV("findTargetColorFormat");
++ CHECK(mIsEncoder);
++
++ *colorFormat = OMX_COLOR_FormatYUV420SemiPlanar;
++ int32_t targetColorFormat;
++ if (meta->findInt32(kKeyColorFormat, &targetColorFormat)) {
++ *colorFormat = (OMX_COLOR_FORMATTYPE) targetColorFormat;
++ }
++
++ // Check whether the target color format is supported.
++ return isColorFormatSupported(*colorFormat, kPortIndexInput);
++}
++
++status_t OMXCodec::isColorFormatSupported(
++ OMX_COLOR_FORMATTYPE colorFormat, int portIndex) {
++ ALOGV("isColorFormatSupported: %d", static_cast<int>(colorFormat));
++
++ // Enumerate all the color formats supported by
++ // the omx component to see whether the given
++ // color format is supported.
++ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
++ InitOMXParams(&portFormat);
++ portFormat.nPortIndex = portIndex;
++ OMX_U32 index = 0;
++ portFormat.nIndex = index;
++ while (true) {
++ if (OMX_ErrorNone != mOMX->getParameter(
++ mNode, OMX_IndexParamVideoPortFormat,
++ &portFormat, sizeof(portFormat))) {
++ break;
++ }
++ // Make sure that omx component does not overwrite
++ // the incremented index (bug 2897413).
++ CHECK_EQ(index, portFormat.nIndex);
++ if (portFormat.eColorFormat == colorFormat) {
++ CODEC_LOGV("Found supported color format: %d", portFormat.eColorFormat);
++ return OK; // colorFormat is supported!
++ }
++ index;
++ portFormat.nIndex = index;
++
++ if (index >= kMaxColorFormatSupported) {
++ CODEC_LOGE("More than %u color formats are supported???", index);
++ break;
++ }
++ }
++
++ CODEC_LOGE("color format %d is not supported", colorFormat);
++ return UNKNOWN_ERROR;
++}
++
++void OMXCodec::setVideoInputFormat(
++ const char *mime, const sp<MetaData>& meta) {
++
++ int32_t width, height, frameRate, bitRate, stride, sliceHeight;
++ bool success = meta->findInt32(kKeyWidth, &width);
++ success = success && meta->findInt32(kKeyHeight, &height);
++ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
++ success = success && meta->findInt32(kKeyBitRate, &bitRate);
++ success = success && meta->findInt32(kKeyStride, &stride);
++ success = success && meta->findInt32(kKeySliceHeight, &sliceHeight);
++ CHECK(success);
++ CHECK(stride != 0);
++
++ OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
++ if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
++ compressionFormat = OMX_VIDEO_CodingAVC;
++ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
++ compressionFormat = OMX_VIDEO_CodingHEVC;
++ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
++ !strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4_DP, mime)) {
++ compressionFormat = OMX_VIDEO_CodingMPEG4;
++ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
++ compressionFormat = OMX_VIDEO_CodingH263;
++ } else {
++ ALOGE("Not a supported video mime type: %s", mime);
++ CHECK(!"Should not be here. Not a supported video mime type.");
++ }
++
++ OMX_COLOR_FORMATTYPE colorFormat;
++ CHECK_EQ((status_t)OK, findTargetColorFormat(meta, &colorFormat));
++
++ status_t err;
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
++
++ //////////////////////// Input port /////////////////////////
++ CHECK_EQ(setVideoPortFormatType(
++ kPortIndexInput, OMX_VIDEO_CodingUnused,
++ colorFormat), (status_t)OK);
++
++ InitOMXParams(&def);
++ def.nPortIndex = kPortIndexInput;
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ def.nBufferSize = getFrameSize(colorFormat,
++ stride > 0? stride: -stride, sliceHeight);
++
++ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
++
++ video_def->nFrameWidth = width;
++ video_def->nFrameHeight = height;
++ video_def->nStride = stride;
++ video_def->nSliceHeight = sliceHeight;
++ video_def->xFramerate = (frameRate << 16); // Q16 format
++ video_def->eCompressionFormat = OMX_VIDEO_CodingUnused;
++ video_def->eColorFormat = colorFormat;
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ //////////////////////// Output port /////////////////////////
++ CHECK_EQ(setVideoPortFormatType(
++ kPortIndexOutput, compressionFormat, OMX_COLOR_FormatUnused),
++ (status_t)OK);
++ InitOMXParams(&def);
++ def.nPortIndex = kPortIndexOutput;
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++
++ CHECK_EQ(err, (status_t)OK);
++ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
++
++ video_def->nFrameWidth = width;
++ video_def->nFrameHeight = height;
++ video_def->xFramerate = 0; // No need for output port
++ video_def->nBitrate = bitRate; // Q16 format
++ video_def->eCompressionFormat = compressionFormat;
++ video_def->eColorFormat = OMX_COLOR_FormatUnused;
++ if (mQuirks & kRequiresLargerEncoderOutputBuffer) {
++ // Increases the output buffer size
++ def.nBufferSize = ((def.nBufferSize * 3) >> 1);
++ }
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ /////////////////// Codec-specific ////////////////////////
++ switch (compressionFormat) {
++ case OMX_VIDEO_CodingMPEG4:
++ {
++ CHECK_EQ(setupMPEG4EncoderParameters(meta), (status_t)OK);
++ break;
++ }
++
++ case OMX_VIDEO_CodingH263:
++ CHECK_EQ(setupH263EncoderParameters(meta), (status_t)OK);
++ break;
++
++ case OMX_VIDEO_CodingAVC:
++ {
++ CHECK_EQ(setupAVCEncoderParameters(meta), (status_t)OK);
++ break;
++ }
++
++ default:
++ CHECK(!"Support for this compressionFormat to be implemented.");
++ break;
++ }
++}
++
++static OMX_U32 setPFramesSpacing(int32_t iFramesInterval, int32_t frameRate) {
++ if (iFramesInterval < 0) {
++ return 0xFFFFFFFF;
++ } else if (iFramesInterval == 0) {
++ return 0;
++ }
++ OMX_U32 ret = frameRate * iFramesInterval - 1;
++ return ret;
++}
++
++status_t OMXCodec::setupErrorCorrectionParameters() {
++ OMX_VIDEO_PARAM_ERRORCORRECTIONTYPE errorCorrectionType;
++ InitOMXParams(&errorCorrectionType);
++ errorCorrectionType.nPortIndex = kPortIndexOutput;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamVideoErrorCorrection,
++ &errorCorrectionType, sizeof(errorCorrectionType));
++ if (err != OK) {
++ ALOGW("Error correction param query is not supported");
++ return OK; // Optional feature. Ignore this failure
++ }
++
++ errorCorrectionType.bEnableHEC = OMX_FALSE;
++ errorCorrectionType.bEnableResync = OMX_TRUE;
++ errorCorrectionType.nResynchMarkerSpacing = 256;
++ errorCorrectionType.bEnableDataPartitioning = OMX_FALSE;
++ errorCorrectionType.bEnableRVLC = OMX_FALSE;
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamVideoErrorCorrection,
++ &errorCorrectionType, sizeof(errorCorrectionType));
++ if (err != OK) {
++ ALOGW("Error correction param configuration is not supported");
++ }
++
++ // Optional feature. Ignore the failure.
++ return OK;
++}
++
++status_t OMXCodec::setupBitRate(int32_t bitRate) {
++ OMX_VIDEO_PARAM_BITRATETYPE bitrateType;
++ InitOMXParams(&bitrateType);
++ bitrateType.nPortIndex = kPortIndexOutput;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamVideoBitrate,
++ &bitrateType, sizeof(bitrateType));
++ CHECK_EQ(err, (status_t)OK);
++
++ bitrateType.eControlRate = OMX_Video_ControlRateVariable;
++ bitrateType.nTargetBitrate = bitRate;
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamVideoBitrate,
++ &bitrateType, sizeof(bitrateType));
++ CHECK_EQ(err, (status_t)OK);
++ return OK;
++}
++
++status_t OMXCodec::getVideoProfileLevel(
++ const sp<MetaData>& meta,
++ const CodecProfileLevel& defaultProfileLevel,
++ CodecProfileLevel &profileLevel) {
++ CODEC_LOGV("Default profile: %u, level #x%x",
++ defaultProfileLevel.mProfile, defaultProfileLevel.mLevel);
++
++ // Are the default profile and level overwriten?
++ int32_t profile, level;
++ if (!meta->findInt32(kKeyVideoProfile, &profile)) {
++ profile = defaultProfileLevel.mProfile;
++ }
++ if (!meta->findInt32(kKeyVideoLevel, &level)) {
++ level = defaultProfileLevel.mLevel;
++ }
++ CODEC_LOGV("Target profile: %d, level: %d", profile, level);
++
++ // Are the target profile and level supported by the encoder?
++ OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
++ InitOMXParams(¶m);
++ param.nPortIndex = kPortIndexOutput;
++ for (param.nProfileIndex = 0;; param.nProfileIndex) {
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamVideoProfileLevelQuerySupported,
++ ¶m, sizeof(param));
++
++ if (err != OK) break;
++
++ int32_t supportedProfile = static_cast<int32_t>(param.eProfile);
++ int32_t supportedLevel = static_cast<int32_t>(param.eLevel);
++ CODEC_LOGV("Supported profile: %d, level %d",
++ supportedProfile, supportedLevel);
++
++ if (profile == supportedProfile &&
++ level <= supportedLevel) {
++ // We can further check whether the level is a valid
++ // value; but we will leave that to the omx encoder component
++ // via OMX_SetParameter call.
++ profileLevel.mProfile = profile;
++ profileLevel.mLevel = level;
++ return OK;
++ }
++ }
++
++ CODEC_LOGE("Target profile (%d) and level (%d) is not supported",
++ profile, level);
++ return BAD_VALUE;
++}
++
++status_t OMXCodec::setupH263EncoderParameters(const sp<MetaData>& meta) {
++ int32_t iFramesInterval, frameRate, bitRate;
++ bool success = meta->findInt32(kKeyBitRate, &bitRate);
++ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
++ success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
++ CHECK(success);
++ OMX_VIDEO_PARAM_H263TYPE h263type;
++ InitOMXParams(&h263type);
++ h263type.nPortIndex = kPortIndexOutput;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
++ CHECK_EQ(err, (status_t)OK);
++
++ h263type.nAllowedPictureTypes =
++ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
++
++ h263type.nPFrames = setPFramesSpacing(iFramesInterval, frameRate);
++ if (h263type.nPFrames == 0) {
++ h263type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
++ }
++ h263type.nBFrames = 0;
++
++ // Check profile and level parameters
++ CodecProfileLevel defaultProfileLevel, profileLevel;
++ defaultProfileLevel.mProfile = h263type.eProfile;
++ defaultProfileLevel.mLevel = h263type.eLevel;
++ err = getVideoProfileLevel(meta, defaultProfileLevel, profileLevel);
++ if (err != OK) return err;
++ h263type.eProfile = static_cast<OMX_VIDEO_H263PROFILETYPE>(profileLevel.mProfile);
++ h263type.eLevel = static_cast<OMX_VIDEO_H263LEVELTYPE>(profileLevel.mLevel);
++
++ h263type.bPLUSPTYPEAllowed = OMX_FALSE;
++ h263type.bForceRoundingTypeToZero = OMX_FALSE;
++ h263type.nPictureHeaderRepetition = 0;
++ h263type.nGOBHeaderInterval = 0;
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamVideoH263, &h263type, sizeof(h263type));
++ CHECK_EQ(err, (status_t)OK);
++
++ CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
++ CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK);
++
++ return OK;
++}
++
++status_t OMXCodec::setupMPEG4EncoderParameters(const sp<MetaData>& meta) {
++ int32_t iFramesInterval, frameRate, bitRate;
++ bool success = meta->findInt32(kKeyBitRate, &bitRate);
++ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
++ success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
++ CHECK(success);
++ OMX_VIDEO_PARAM_MPEG4TYPE mpeg4type;
++ InitOMXParams(&mpeg4type);
++ mpeg4type.nPortIndex = kPortIndexOutput;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
++ CHECK_EQ(err, (status_t)OK);
++
++ mpeg4type.nSliceHeaderSpacing = 0;
++ mpeg4type.bSVH = OMX_FALSE;
++ mpeg4type.bGov = OMX_FALSE;
++
++ mpeg4type.nAllowedPictureTypes =
++ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
++
++ mpeg4type.nPFrames = setPFramesSpacing(iFramesInterval, frameRate);
++ if (mpeg4type.nPFrames == 0) {
++ mpeg4type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
++ }
++ mpeg4type.nBFrames = 0;
++ mpeg4type.nIDCVLCThreshold = 0;
++ mpeg4type.bACPred = OMX_TRUE;
++ mpeg4type.nMaxPacketSize = 256;
++ mpeg4type.nTimeIncRes = 1000;
++ mpeg4type.nHeaderExtension = 0;
++ mpeg4type.bReversibleVLC = OMX_FALSE;
++
++ // Check profile and level parameters
++ CodecProfileLevel defaultProfileLevel, profileLevel;
++ defaultProfileLevel.mProfile = mpeg4type.eProfile;
++ defaultProfileLevel.mLevel = mpeg4type.eLevel;
++ err = getVideoProfileLevel(meta, defaultProfileLevel, profileLevel);
++ if (err != OK) return err;
++ mpeg4type.eProfile = static_cast<OMX_VIDEO_MPEG4PROFILETYPE>(profileLevel.mProfile);
++ mpeg4type.eLevel = static_cast<OMX_VIDEO_MPEG4LEVELTYPE>(profileLevel.mLevel);
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamVideoMpeg4, &mpeg4type, sizeof(mpeg4type));
++ CHECK_EQ(err, (status_t)OK);
++
++ CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
++ CHECK_EQ(setupErrorCorrectionParameters(), (status_t)OK);
++
++ return OK;
++}
++
++status_t OMXCodec::setupAVCEncoderParameters(const sp<MetaData>& meta) {
++ int32_t iFramesInterval, frameRate, bitRate;
++ bool success = meta->findInt32(kKeyBitRate, &bitRate);
++ success = success && meta->findInt32(kKeyFrameRate, &frameRate);
++ success = success && meta->findInt32(kKeyIFramesInterval, &iFramesInterval);
++ CHECK(success);
++
++ OMX_VIDEO_PARAM_AVCTYPE h264type;
++ InitOMXParams(&h264type);
++ h264type.nPortIndex = kPortIndexOutput;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
++ CHECK_EQ(err, (status_t)OK);
++
++ h264type.nAllowedPictureTypes =
++ OMX_VIDEO_PictureTypeI | OMX_VIDEO_PictureTypeP;
++
++ // Check profile and level parameters
++ CodecProfileLevel defaultProfileLevel, profileLevel;
++ defaultProfileLevel.mProfile = h264type.eProfile;
++ defaultProfileLevel.mLevel = h264type.eLevel;
++ err = getVideoProfileLevel(meta, defaultProfileLevel, profileLevel);
++ if (err != OK) return err;
++ h264type.eProfile = static_cast<OMX_VIDEO_AVCPROFILETYPE>(profileLevel.mProfile);
++ h264type.eLevel = static_cast<OMX_VIDEO_AVCLEVELTYPE>(profileLevel.mLevel);
++
++ // XXX
++ if (h264type.eProfile != OMX_VIDEO_AVCProfileBaseline) {
++ ALOGW("Use baseline profile instead of %d for AVC recording",
++ h264type.eProfile);
++ h264type.eProfile = OMX_VIDEO_AVCProfileBaseline;
++ }
++
++ if (h264type.eProfile == OMX_VIDEO_AVCProfileBaseline) {
++ h264type.nSliceHeaderSpacing = 0;
++ h264type.bUseHadamard = OMX_TRUE;
++ h264type.nRefFrames = 1;
++ h264type.nBFrames = 0;
++ h264type.nPFrames = setPFramesSpacing(iFramesInterval, frameRate);
++ if (h264type.nPFrames == 0) {
++ h264type.nAllowedPictureTypes = OMX_VIDEO_PictureTypeI;
++ }
++ h264type.nRefIdx10ActiveMinus1 = 0;
++ h264type.nRefIdx11ActiveMinus1 = 0;
++ h264type.bEntropyCodingCABAC = OMX_FALSE;
++ h264type.bWeightedPPrediction = OMX_FALSE;
++ h264type.bconstIpred = OMX_FALSE;
++ h264type.bDirect8x8Inference = OMX_FALSE;
++ h264type.bDirectSpatialTemporal = OMX_FALSE;
++ h264type.nCabacInitIdc = 0;
++ }
++
++ if (h264type.nBFrames != 0) {
++ h264type.nAllowedPictureTypes |= OMX_VIDEO_PictureTypeB;
++ }
++
++ h264type.bEnableUEP = OMX_FALSE;
++ h264type.bEnableFMO = OMX_FALSE;
++ h264type.bEnableASO = OMX_FALSE;
++ h264type.bEnableRS = OMX_FALSE;
++ h264type.bFrameMBsOnly = OMX_TRUE;
++ h264type.bMBAFF = OMX_FALSE;
++ h264type.eLoopFilterMode = OMX_VIDEO_AVCLoopFilterEnable;
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamVideoAvc, &h264type, sizeof(h264type));
++ CHECK_EQ(err, (status_t)OK);
++
++ CHECK_EQ(setupBitRate(bitRate), (status_t)OK);
++
++ return OK;
++}
++
++status_t OMXCodec::setVideoOutputFormat(
++ const char *mime, const sp<MetaData>& meta) {
++
++ int32_t width, height;
++ bool success = meta->findInt32(kKeyWidth, &width);
++ success = success && meta->findInt32(kKeyHeight, &height);
++ CHECK(success);
++
++ CODEC_LOGV("setVideoOutputFormat width=%d, height=%d", width, height);
++
++ OMX_VIDEO_CODINGTYPE compressionFormat = OMX_VIDEO_CodingUnused;
++ if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mime)) {
++ compressionFormat = OMX_VIDEO_CodingAVC;
++ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4, mime) ||
++ !strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG4_DP, mime)) {
++ compressionFormat = OMX_VIDEO_CodingMPEG4;
++ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mime)) {
++ compressionFormat = OMX_VIDEO_CodingHEVC;
++ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_H263, mime)) {
++ compressionFormat = OMX_VIDEO_CodingH263;
++ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VP8, mime)) {
++ compressionFormat = OMX_VIDEO_CodingVP8;
++ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_VP9, mime)) {
++ compressionFormat = OMX_VIDEO_CodingVP9;
++ } else if (!strcasecmp(MEDIA_MIMETYPE_VIDEO_MPEG2, mime)) {
++ compressionFormat = OMX_VIDEO_CodingMPEG2;
++ } else {
++ ALOGE("Not a supported video mime type: %s", mime);
++ CHECK(!"Should not be here. Not a supported video mime type.");
++ }
++
++ status_t err = setVideoPortFormatType(
++ kPortIndexInput, compressionFormat, OMX_COLOR_FormatUnused);
++
++ if (err != OK) {
++ return err;
++ }
++
++#if 1
++ {
++ OMX_VIDEO_PARAM_PORTFORMATTYPE format;
++ InitOMXParams(&format);
++ format.nPortIndex = kPortIndexOutput;
++ format.nIndex = 0;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamVideoPortFormat,
++ &format, sizeof(format));
++ CHECK_EQ(err, (status_t)OK);
++ CHECK_EQ((int)format.eCompressionFormat, (int)OMX_VIDEO_CodingUnused);
++
++ int32_t colorFormat;
++ if (meta->findInt32(kKeyColorFormat, &colorFormat)
++ && colorFormat != OMX_COLOR_FormatUnused
++ && colorFormat != format.eColorFormat) {
++
++ while (OMX_ErrorNoMore != err) {
++ format.nIndex;
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamVideoPortFormat,
++ &format, sizeof(format));
++ if (format.eColorFormat == colorFormat) {
++ break;
++ }
++ }
++ if (format.eColorFormat != colorFormat) {
++ CODEC_LOGE("Color format %d is not supported", colorFormat);
++ return ERROR_UNSUPPORTED;
++ }
++ }
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamVideoPortFormat,
++ &format, sizeof(format));
++
++ if (err != OK) {
++ return err;
++ }
++ }
++#endif
++
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = kPortIndexInput;
++
++ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++
++ CHECK_EQ(err, (status_t)OK);
++
++#if 1
++ // XXX Need a (much) better heuristic to compute input buffer sizes.
++ const size_t X = 64 * 1024;
++ if (def.nBufferSize < X) {
++ def.nBufferSize = X;
++ }
++#endif
++
++ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
++
++ video_def->nFrameWidth = width;
++ video_def->nFrameHeight = height;
++
++ video_def->eCompressionFormat = compressionFormat;
++ video_def->eColorFormat = OMX_COLOR_FormatUnused;
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++
++ if (err != OK) {
++ return err;
++ }
++
++ ////////////////////////////////////////////////////////////////////////////
++
++ InitOMXParams(&def);
++ def.nPortIndex = kPortIndexOutput;
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainVideo);
++
++#if 0
++ def.nBufferSize =
++ (((width 15) & -16) * ((height 15) & -16) * 3) / 2; // YUV420
++#endif
++
++ video_def->nFrameWidth = width;
++ video_def->nFrameHeight = height;
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++
++ return err;
++}
++
++OMXCodec::OMXCodec(
++ const sp<IOMX> &omx, IOMX::node_id node,
++ uint32_t quirks, uint32_t flags,
++ bool isEncoder,
++ const char *mime,
++ const char *componentName,
++ const sp<MediaSource> &source,
++ const sp<ANativeWindow> &nativeWindow)
++ : mOMX(omx),
++ mOMXLivesLocally(omx->livesLocally(node, getpid())),
++ mNode(node),
++ mQuirks(quirks),
++ mFlags(flags),
++ mIsEncoder(isEncoder),
++ mIsVideo(!strncasecmp("video/", mime, 6)),
++ mMIME(strdup(mime)),
++ mComponentName(strdup(componentName)),
++ mSource(source),
++ mCodecSpecificDataIndex(0),
++ mState(LOADED),
++ mInitialBufferSubmit(true),
++ mSignalledEOS(false),
++ mNoMoreOutputData(false),
++ mOutputPortSettingsHaveChanged(false),
++ mSeekTimeUs(-1),
++ mSeekMode(ReadOptions::SEEK_CLOSEST_SYNC),
++ mTargetTimeUs(-1),
++ mOutputPortSettingsChangedPending(false),
++ mSkipCutBuffer(NULL),
++ mLeftOverBuffer(NULL),
++ mPaused(false),
++ mNativeWindow(
++ (!strncmp(componentName, "OMX.google.", 11))
++ ? NULL : nativeWindow) {
++ mPortStatus[kPortIndexInput] = ENABLED;
++ mPortStatus[kPortIndexOutput] = ENABLED;
++
++ setComponentRole();
++}
++
++// static
++void OMXCodec::setComponentRole(
++ const sp<IOMX> &omx, IOMX::node_id node, bool isEncoder,
++ const char *mime) {
++ struct MimeToRole {
++ const char *mime;
++ const char *decoderRole;
++ const char *encoderRole;
++ };
++
++ static const MimeToRole kMimeToRole[] = {
++ { MEDIA_MIMETYPE_AUDIO_MPEG,
++ "audio_decoder.mp3", "audio_encoder.mp3" },
++ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_I,
++ "audio_decoder.mp1", "audio_encoder.mp1" },
++ { MEDIA_MIMETYPE_AUDIO_MPEG_LAYER_II,
++ "audio_decoder.mp2", "audio_encoder.mp2" },
++ { MEDIA_MIMETYPE_AUDIO_AMR_NB,
++ "audio_decoder.amrnb", "audio_encoder.amrnb" },
++ { MEDIA_MIMETYPE_AUDIO_AMR_WB,
++ "audio_decoder.amrwb", "audio_encoder.amrwb" },
++ { MEDIA_MIMETYPE_AUDIO_AAC,
++ "audio_decoder.aac", "audio_encoder.aac" },
++ { MEDIA_MIMETYPE_AUDIO_VORBIS,
++ "audio_decoder.vorbis", "audio_encoder.vorbis" },
++ { MEDIA_MIMETYPE_AUDIO_OPUS,
++ "audio_decoder.opus", "audio_encoder.opus" },
++ { MEDIA_MIMETYPE_AUDIO_G711_MLAW,
++ "audio_decoder.g711mlaw", "audio_encoder.g711mlaw" },
++ { MEDIA_MIMETYPE_AUDIO_G711_ALAW,
++ "audio_decoder.g711alaw", "audio_encoder.g711alaw" },
++ { MEDIA_MIMETYPE_VIDEO_AVC,
++ "video_decoder.avc", "video_encoder.avc" },
++ { MEDIA_MIMETYPE_VIDEO_HEVC,
++ "video_decoder.hevc", "video_encoder.hevc" },
++ { MEDIA_MIMETYPE_VIDEO_MPEG4,
++ "video_decoder.mpeg4", "video_encoder.mpeg4" },
++ { MEDIA_MIMETYPE_VIDEO_MPEG4_DP,
++ "video_decoder.mpeg4", NULL },
++ { MEDIA_MIMETYPE_VIDEO_H263,
++ "video_decoder.h263", "video_encoder.h263" },
++ { MEDIA_MIMETYPE_VIDEO_VP8,
++ "video_decoder.vp8", "video_encoder.vp8" },
++ { MEDIA_MIMETYPE_VIDEO_VP9,
++ "video_decoder.vp9", "video_encoder.vp9" },
++ { MEDIA_MIMETYPE_AUDIO_RAW,
++ "audio_decoder.raw", "audio_encoder.raw" },
++ { MEDIA_MIMETYPE_AUDIO_FLAC,
++ "audio_decoder.flac", "audio_encoder.flac" },
++ { MEDIA_MIMETYPE_AUDIO_MSGSM,
++ "audio_decoder.gsm", "audio_encoder.gsm" },
++ { MEDIA_MIMETYPE_VIDEO_MPEG2,
++ "video_decoder.mpeg2", "video_encoder.mpeg2" },
++ { MEDIA_MIMETYPE_AUDIO_AC3,
++ "audio_decoder.ac3", "audio_encoder.ac3" },
++ };
++
++ static const size_t kNumMimeToRole =
++ sizeof(kMimeToRole) / sizeof(kMimeToRole[0]);
++
++ size_t i;
++ for (i = 0; i < kNumMimeToRole; i) {
++ if (!strcasecmp(mime, kMimeToRole[i].mime)) {
++ break;
++ }
++ }
++
++ if (i == kNumMimeToRole) {
++ return;
++ }
++
++ const char *role =
++ isEncoder ? kMimeToRole[i].encoderRole
++ : kMimeToRole[i].decoderRole;
++
++ if (role != NULL) {
++ OMX_PARAM_COMPONENTROLETYPE roleParams;
++ InitOMXParams(&roleParams);
++
++ strncpy((char *)roleParams.cRole,
++ role, OMX_MAX_STRINGNAME_SIZE - 1);
++
++ roleParams.cRole[OMX_MAX_STRINGNAME_SIZE - 1] = '\0';
++
++ status_t err = omx->setParameter(
++ node, OMX_IndexParamStandardComponentRole,
++ &roleParams, sizeof(roleParams));
++
++ if (err != OK) {
++ ALOGW("Failed to set standard component role '%s'.", role);
++ }
++ }
++}
++
++void OMXCodec::setComponentRole() {
++ setComponentRole(mOMX, mNode, mIsEncoder, mMIME);
++}
++
++OMXCodec::~OMXCodec() {
++ mSource.clear();
++
++ CHECK(mState == LOADED || mState == ERROR || mState == LOADED_TO_IDLE);
++
++ status_t err = mOMX->freeNode(mNode);
++ CHECK_EQ(err, (status_t)OK);
++
++ mNode = 0;
++ setState(DEAD);
++
++ clearCodecSpecificData();
++
++ free(mComponentName);
++ mComponentName = NULL;
++
++ free(mMIME);
++ mMIME = NULL;
++}
++
++status_t OMXCodec::init() {
++ // mLock is held.
++
++ CHECK_EQ((int)mState, (int)LOADED);
++
++ status_t err;
++ if (!(mQuirks & kRequiresLoadedToIdleAfterAllocation)) {
++ err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
++ CHECK_EQ(err, (status_t)OK);
++ setState(LOADED_TO_IDLE);
++ }
++
++ err = allocateBuffers();
++ if (err != (status_t)OK) {
++ return err;
++ }
++
++ if (mQuirks & kRequiresLoadedToIdleAfterAllocation) {
++ err = mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
++ CHECK_EQ(err, (status_t)OK);
++
++ setState(LOADED_TO_IDLE);
++ }
++
++ while (mState != EXECUTING && mState != ERROR) {
++ mAsyncCompletion.wait(mLock);
++ }
++
++ return mState == ERROR ? UNKNOWN_ERROR : OK;
++}
++
++// static
++bool OMXCodec::isIntermediateState(State state) {
++ return state == LOADED_TO_IDLE
++ || state == IDLE_TO_EXECUTING
++ || state == EXECUTING_TO_IDLE
++ || state == PAUSING
++ || state == FLUSHING
++ || state == IDLE_TO_LOADED
++ || state == RECONFIGURING;
++}
++
++status_t OMXCodec::allocateBuffers() {
++ status_t err = allocateBuffersOnPort(kPortIndexInput);
++
++ if (err != OK) {
++ return err;
++ }
++
++ return allocateBuffersOnPort(kPortIndexOutput);
++}
++
++status_t OMXCodec::allocateBuffersOnPort(OMX_U32 portIndex) {
++ if (mNativeWindow != NULL && portIndex == kPortIndexOutput) {
++ return allocateOutputBuffersFromNativeWindow();
++ }
++
++ if ((mFlags & kEnableGrallocUsageProtected) && portIndex == kPortIndexOutput) {
++ ALOGE("protected output buffers must be stent to an ANativeWindow");
++ return PERMISSION_DENIED;
++ }
++
++ status_t err = OK;
++ if ((mFlags & kStoreMetaDataInVideoBuffers)
++ && portIndex == kPortIndexInput) {
++ err = mOMX->storeMetaDataInBuffers(mNode, kPortIndexInput, OMX_TRUE);
++ if (err != OK) {
++ ALOGE("Storing meta data in video buffers is not supported");
++ return err;
++ }
++ }
++
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = portIndex;
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++
++ if (err != OK) {
++ return err;
++ }
++
++ CODEC_LOGV("allocating %u buffers of size %u on %s port",
++ def.nBufferCountActual, def.nBufferSize,
++ portIndex == kPortIndexInput ? "input" : "output");
++
++ if (def.nBufferSize != 0 && def.nBufferCountActual > SIZE_MAX / def.nBufferSize) {
++ return BAD_VALUE;
++ }
++ size_t totalSize = def.nBufferCountActual * def.nBufferSize;
++ mDealer[portIndex] = new MemoryDealer(totalSize, "OMXCodec");
++
++ for (OMX_U32 i = 0; i < def.nBufferCountActual; i) {
++ sp<IMemory> mem = mDealer[portIndex]->allocate(def.nBufferSize);
++ if (mem == NULL || mem->pointer() == NULL) {
++ return NO_MEMORY;
++ }
++
++ BufferInfo info;
++ info.mData = NULL;
++ info.mSize = def.nBufferSize;
++
++ IOMX::buffer_id buffer;
++ if (portIndex == kPortIndexInput
++ && ((mQuirks & kRequiresAllocateBufferOnInputPorts)
++ || (mFlags & kUseSecureInputBuffers))) {
++ if (mOMXLivesLocally) {
++ mem.clear();
++
++ err = mOMX->allocateBuffer(
++ mNode, portIndex, def.nBufferSize, &buffer,
++ &info.mData);
++ } else {
++ err = mOMX->allocateBufferWithBackup(
++ mNode, portIndex, mem, &buffer, mem->size());
++ }
++ } else if (portIndex == kPortIndexOutput
++ && (mQuirks & kRequiresAllocateBufferOnOutputPorts)) {
++ if (mOMXLivesLocally) {
++ mem.clear();
++
++ err = mOMX->allocateBuffer(
++ mNode, portIndex, def.nBufferSize, &buffer,
++ &info.mData);
++ } else {
++ err = mOMX->allocateBufferWithBackup(
++ mNode, portIndex, mem, &buffer, mem->size());
++ }
++ } else {
++ err = mOMX->useBuffer(mNode, portIndex, mem, &buffer, mem->size());
++ }
++
++ if (err != OK) {
++ ALOGE("allocate_buffer_with_backup failed");
++ return err;
++ }
++
++ if (mem != NULL) {
++ info.mData = mem->pointer();
++ }
++
++ info.mBuffer = buffer;
++ info.mStatus = OWNED_BY_US;
++ info.mMem = mem;
++ info.mMediaBuffer = NULL;
++
++ if (portIndex == kPortIndexOutput) {
++ // Fail deferred MediaBuffer creation until FILL_BUFFER_DONE;
++ // this legacy mode is no longer supported.
++ LOG_ALWAYS_FATAL_IF((mOMXLivesLocally
++ && (mQuirks & kRequiresAllocateBufferOnOutputPorts)
++ && (mQuirks & kDefersOutputBufferAllocation)),
++ "allocateBuffersOnPort cannot defer buffer allocation");
++
++ info.mMediaBuffer = new MediaBuffer(info.mData, info.mSize);
++ info.mMediaBuffer->setObserver(this);
++ }
++
++ mPortBuffers[portIndex].push(info);
++
++ CODEC_LOGV("allocated buffer %u on %s port", buffer,
++ portIndex == kPortIndexInput ? "input" : "output");
++ }
++
++ if (portIndex == kPortIndexOutput) {
++
++ sp<MetaData> meta = mSource->getFormat();
++ int32_t delay = 0;
++ if (!meta->findInt32(kKeyEncoderDelay, &delay)) {
++ delay = 0;
++ }
++ int32_t padding = 0;
++ if (!meta->findInt32(kKeyEncoderPadding, &padding)) {
++ padding = 0;
++ }
++ int32_t numchannels = 0;
++ if (delay padding) {
++ if (mOutputFormat->findInt32(kKeyChannelCount, &numchannels)) {
++ if (mSkipCutBuffer != NULL) {
++ size_t prevbuffersize = mSkipCutBuffer->size();
++ if (prevbuffersize != 0) {
++ ALOGW("Replacing SkipCutBuffer holding %zu bytes", prevbuffersize);
++ }
++ }
++ mSkipCutBuffer = new SkipCutBuffer(delay, padding, numchannels);
++ }
++ }
++ }
++
++ // dumpPortStatus(portIndex);
++
++ if (portIndex == kPortIndexInput && (mFlags & kUseSecureInputBuffers)) {
++ Vector<MediaBuffer *> buffers;
++ for (size_t i = 0; i < def.nBufferCountActual; i) {
++ const BufferInfo &info = mPortBuffers[kPortIndexInput].itemAt(i);
++
++ MediaBuffer *mbuf = new MediaBuffer(info.mData, info.mSize);
++ buffers.push(mbuf);
++ }
++
++ status_t err = mSource->setBuffers(buffers);
++
++ if (err != OK) {
++ for (size_t i = 0; i < def.nBufferCountActual; i) {
++ buffers.editItemAt(i)->release();
++ }
++ buffers.clear();
++
++ CODEC_LOGE(
++ "Codec requested to use secure input buffers but "
++ "upstream source didn't support that.");
++
++ return err;
++ }
++ }
++
++ return OK;
++}
++
++status_t OMXCodec::allocateOutputBuffersFromNativeWindow() {
++ // Get the number of buffers needed.
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = kPortIndexOutput;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ if (err != OK) {
++ CODEC_LOGE("getParameter failed: %d", err);
++ return err;
++ }
++
++ sp<MetaData> meta = mSource->getFormat();
++
++ int32_t rotationDegrees;
++ if (!meta->findInt32(kKeyRotation, &rotationDegrees)) {
++ rotationDegrees = 0;
++ }
++
++ // Set up the native window.
++ OMX_U32 usage = 0;
++ err = mOMX->getGraphicBufferUsage(mNode, kPortIndexOutput, &usage);
++ if (err != 0) {
++ ALOGW("querying usage flags from OMX IL component failed: %d", err);
++ // XXX: Currently this error is logged, but not fatal.
++ usage = 0;
++ }
++
++ if (mFlags & kEnableGrallocUsageProtected) {
++ usage |= GRALLOC_USAGE_PROTECTED;
++#ifdef GRALLOC_USAGE_PRIVATE_NONSECURE
++ if (!(mFlags & kUseSecureInputBuffers))
++ usage |= GRALLOC_USAGE_PRIVATE_NONSECURE;
++#endif
++ }
++
++ err = setNativeWindowSizeFormatAndUsage(
++ mNativeWindow.get(),
++ def.format.video.nFrameWidth,
++ def.format.video.nFrameHeight,
++ def.format.video.eColorFormat,
++ rotationDegrees,
++ usage | GRALLOC_USAGE_HW_TEXTURE | GRALLOC_USAGE_EXTERNAL_DISP);
++ if (err != 0) {
++ return err;
++ }
++
++ int minUndequeuedBufs = 0;
++ err = mNativeWindow->query(mNativeWindow.get(),
++ NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS, &minUndequeuedBufs);
++ if (err != 0) {
++ ALOGE("NATIVE_WINDOW_MIN_UNDEQUEUED_BUFFERS query failed: %s (%d)",
++ strerror(-err), -err);
++ return err;
++ }
++ // FIXME: assume that surface is controlled by app (native window
++ // returns the number for the case when surface is not controlled by app)
++ // FIXME2: This means that minUndeqeueudBufs can be 1 larger than reported
++ // For now, try to allocate 1 more buffer, but don't fail if unsuccessful
++
++ // Use conservative allocation while also trying to reduce starvation
++ //
++ // 1. allocate at least nBufferCountMin minUndequeuedBuffers - that is the
++ // minimum needed for the consumer to be able to work
++ // 2. try to allocate two (2) additional buffers to reduce starvation from
++ // the consumer
++ // plus an extra buffer to account for incorrect minUndequeuedBufs
++ CODEC_LOGI("OMX-buffers: min=%u actual=%u undeq=%d1",
++ def.nBufferCountMin, def.nBufferCountActual, minUndequeuedBufs);
++#ifdef BOARD_CANT_REALLOCATE_OMX_BUFFERS
++ // Some devices don't like to set OMX_IndexParamPortDefinition at this
++ // point (even with an unmodified def), so skip it if possible.
++ // This check was present in KitKat.
++ if (def.nBufferCountActual < def.nBufferCountMin minUndequeuedBufs) {
++#endif
++ for (OMX_U32 extraBuffers = 2 1; /* condition inside loop */; extraBuffers--) {
++ OMX_U32 newBufferCount =
++ def.nBufferCountMin minUndequeuedBufs extraBuffers;
++ def.nBufferCountActual = newBufferCount;
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++
++ if (err == OK) {
++ minUndequeuedBufs = extraBuffers;
++ break;
++ }
++
++ CODEC_LOGW("setting nBufferCountActual to %u failed: %d",
++ newBufferCount, err);
++ /* exit condition */
++ if (extraBuffers == 0) {
++ return err;
++ }
++ }
++ CODEC_LOGI("OMX-buffers: min=%u actual=%u undeq=%d1",
++ def.nBufferCountMin, def.nBufferCountActual, minUndequeuedBufs);
++#ifdef BOARD_CANT_REALLOCATE_OMX_BUFFERS
++ }
++#endif
++
++ err = native_window_set_buffer_count(
++ mNativeWindow.get(), def.nBufferCountActual);
++ if (err != 0) {
++ ALOGE("native_window_set_buffer_count failed: %s (%d)", strerror(-err),
++ -err);
++ return err;
++ }
++
++ CODEC_LOGV("allocating %u buffers from a native window of size %u on "
++ "output port", def.nBufferCountActual, def.nBufferSize);
++
++ // Dequeue buffers and send them to OMX
++ for (OMX_U32 i = 0; i < def.nBufferCountActual; i) {
++ ANativeWindowBuffer* buf;
++ err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf);
++ if (err != 0) {
++ ALOGE("dequeueBuffer failed: %s (%d)", strerror(-err), -err);
++ break;
++ }
++
++ sp<GraphicBuffer> graphicBuffer(new GraphicBuffer(buf, false));
++ BufferInfo info;
++ info.mData = NULL;
++ info.mSize = def.nBufferSize;
++ info.mStatus = OWNED_BY_US;
++ info.mMem = NULL;
++ info.mMediaBuffer = new MediaBuffer(graphicBuffer);
++ info.mMediaBuffer->setObserver(this);
++ mPortBuffers[kPortIndexOutput].push(info);
++
++ IOMX::buffer_id bufferId;
++ err = mOMX->useGraphicBuffer(mNode, kPortIndexOutput, graphicBuffer,
++ &bufferId);
++ if (err != 0) {
++ CODEC_LOGE("registering GraphicBuffer with OMX IL component "
++ "failed: %d", err);
++ break;
++ }
++
++ mPortBuffers[kPortIndexOutput].editItemAt(i).mBuffer = bufferId;
++
++ CODEC_LOGV("registered graphic buffer with ID %u (pointer = %p)",
++ bufferId, graphicBuffer.get());
++ }
++
++ OMX_U32 cancelStart;
++ OMX_U32 cancelEnd;
++ if (err != 0) {
++ // If an error occurred while dequeuing we need to cancel any buffers
++ // that were dequeued.
++ cancelStart = 0;
++ cancelEnd = mPortBuffers[kPortIndexOutput].size();
++ } else {
++ // Return the last two buffers to the native window.
++ cancelStart = def.nBufferCountActual - minUndequeuedBufs;
++ cancelEnd = def.nBufferCountActual;
++ }
++
++ for (OMX_U32 i = cancelStart; i < cancelEnd; i) {
++ BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(i);
++ cancelBufferToNativeWindow(info);
++ }
++
++ return err;
++}
++
++status_t OMXCodec::cancelBufferToNativeWindow(BufferInfo *info) {
++ CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
++ CODEC_LOGV("Calling cancelBuffer on buffer %u", info->mBuffer);
++ int err = mNativeWindow->cancelBuffer(
++ mNativeWindow.get(), info->mMediaBuffer->graphicBuffer().get(), -1);
++ if (err != 0) {
++ CODEC_LOGE("cancelBuffer failed w/ error 0x%08x", err);
++
++ setState(ERROR);
++ return err;
++ }
++ info->mStatus = OWNED_BY_NATIVE_WINDOW;
++ return OK;
++}
++
++OMXCodec::BufferInfo* OMXCodec::dequeueBufferFromNativeWindow() {
++ // Dequeue the next buffer from the native window.
++ ANativeWindowBuffer* buf;
++ int err = native_window_dequeue_buffer_and_wait(mNativeWindow.get(), &buf);
++ if (err != 0) {
++ CODEC_LOGE("dequeueBuffer failed w/ error 0x%08x", err);
++
++ setState(ERROR);
++ return 0;
++ }
++
++ // Determine which buffer we just dequeued.
++ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
++ BufferInfo *bufInfo = 0;
++ for (size_t i = 0; i < buffers->size(); i) {
++ sp<GraphicBuffer> graphicBuffer = buffers->itemAt(i).
++ mMediaBuffer->graphicBuffer();
++ if (graphicBuffer->handle == buf->handle) {
++ bufInfo = &buffers->editItemAt(i);
++ break;
++ }
++ }
++
++ if (bufInfo == 0) {
++ CODEC_LOGE("dequeued unrecognized buffer: %p", buf);
++
++ setState(ERROR);
++ return 0;
++ }
++
++ // The native window no longer owns the buffer.
++ CHECK_EQ((int)bufInfo->mStatus, (int)OWNED_BY_NATIVE_WINDOW);
++ bufInfo->mStatus = OWNED_BY_US;
++
++ return bufInfo;
++}
++
++int64_t OMXCodec::getDecodingTimeUs() {
++ CHECK(mIsEncoder && mIsVideo);
++
++ if (mDecodingTimeList.empty()) {
++ CHECK(mSignalledEOS || mNoMoreOutputData);
++ // No corresponding input frame available.
++ // This could happen when EOS is reached.
++ return 0;
++ }
++
++ List<int64_t>::iterator it = mDecodingTimeList.begin();
++ int64_t timeUs = *it;
++ mDecodingTimeList.erase(it);
++ return timeUs;
++}
++
++void OMXCodec::on_message(const omx_message &msg) {
++ if (mState == ERROR) {
++ /*
++ * only drop EVENT messages, EBD and FBD are still
++ * processed for bookkeeping purposes
++ */
++ if (msg.type == omx_message::EVENT) {
++ ALOGW("Dropping OMX EVENT message - we're in ERROR state.");
++ return;
++ }
++ }
++
++ switch (msg.type) {
++ case omx_message::EVENT:
++ {
++ onEvent(
++ msg.u.event_data.event, msg.u.event_data.data1,
++ msg.u.event_data.data2);
++
++ break;
++ }
++
++ case omx_message::EMPTY_BUFFER_DONE:
++ {
++ IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer;
++
++ CODEC_LOGV("EMPTY_BUFFER_DONE(buffer: %u)", buffer);
++
++ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
++ size_t i = 0;
++ while (i < buffers->size() && (*buffers)[i].mBuffer != buffer) {
++ i;
++ }
++
++ CHECK(i < buffers->size());
++ if ((*buffers)[i].mStatus != OWNED_BY_COMPONENT) {
++ ALOGW("We already own input buffer %u, yet received "
++ "an EMPTY_BUFFER_DONE.", buffer);
++ }
++
++ BufferInfo* info = &buffers->editItemAt(i);
++ info->mStatus = OWNED_BY_US;
++
++ // Buffer could not be released until empty buffer done is called.
++ if (info->mMediaBuffer != NULL) {
++ info->mMediaBuffer->release();
++ info->mMediaBuffer = NULL;
++ }
++
++ if (mPortStatus[kPortIndexInput] == DISABLING) {
++ CODEC_LOGV("Port is disabled, freeing buffer %u", buffer);
++
++ status_t err = freeBuffer(kPortIndexInput, i);
++ CHECK_EQ(err, (status_t)OK);
++ } else if (mState != ERROR
++ && mPortStatus[kPortIndexInput] != SHUTTING_DOWN) {
++ CHECK_EQ((int)mPortStatus[kPortIndexInput], (int)ENABLED);
++
++ if (mFlags & kUseSecureInputBuffers) {
++ drainAnyInputBuffer();
++ } else {
++ drainInputBuffer(&buffers->editItemAt(i));
++ }
++ }
++ break;
++ }
++
++ case omx_message::FILL_BUFFER_DONE:
++ {
++ IOMX::buffer_id buffer = msg.u.extended_buffer_data.buffer;
++ OMX_U32 flags = msg.u.extended_buffer_data.flags;
++
++ CODEC_LOGV("FILL_BUFFER_DONE(buffer: %u, size: %u, flags: 0x%08x, timestamp: %lld us (%.2f secs))",
++ buffer,
++ msg.u.extended_buffer_data.range_length,
++ flags,
++ msg.u.extended_buffer_data.timestamp,
++ msg.u.extended_buffer_data.timestamp / 1E6);
++
++ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
++ size_t i = 0;
++ while (i < buffers->size() && (*buffers)[i].mBuffer != buffer) {
++ i;
++ }
++
++ CHECK(i < buffers->size());
++ BufferInfo *info = &buffers->editItemAt(i);
++
++ if (info->mStatus != OWNED_BY_COMPONENT) {
++ ALOGW("We already own output buffer %u, yet received "
++ "a FILL_BUFFER_DONE.", buffer);
++ }
++
++ info->mStatus = OWNED_BY_US;
++
++ if (mPortStatus[kPortIndexOutput] == DISABLING) {
++ CODEC_LOGV("Port is disabled, freeing buffer %u", buffer);
++
++ status_t err = freeBuffer(kPortIndexOutput, i);
++ CHECK_EQ(err, (status_t)OK);
++
++#if 0
++ } else if (mPortStatus[kPortIndexOutput] == ENABLED
++ && (flags & OMX_BUFFERFLAG_EOS)) {
++ CODEC_LOGV("No more output data.");
++ mNoMoreOutputData = true;
++ mBufferFilled.signal();
++#endif
++ } else if (mPortStatus[kPortIndexOutput] != SHUTTING_DOWN) {
++ CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
++
++ MediaBuffer *buffer = info->mMediaBuffer;
++ bool isGraphicBuffer = buffer->graphicBuffer() != NULL;
++
++ if (!isGraphicBuffer
++ && msg.u.extended_buffer_data.range_offset
++ msg.u.extended_buffer_data.range_length
++ > buffer->size()) {
++ CODEC_LOGE(
++ "Codec lied about its buffer size requirements, "
++ "sending a buffer larger than the originally "
++ "advertised size in FILL_BUFFER_DONE!");
++ }
++ buffer->set_range(
++ msg.u.extended_buffer_data.range_offset,
++ msg.u.extended_buffer_data.range_length);
++
++ buffer->meta_data()->clear();
++
++ buffer->meta_data()->setInt64(
++ kKeyTime, msg.u.extended_buffer_data.timestamp);
++
++ if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_SYNCFRAME) {
++ buffer->meta_data()->setInt32(kKeyIsSyncFrame, true);
++ }
++ bool isCodecSpecific = false;
++ if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_CODECCONFIG) {
++ buffer->meta_data()->setInt32(kKeyIsCodecConfig, true);
++ isCodecSpecific = true;
++ }
++
++ if (isGraphicBuffer || mQuirks & kOutputBuffersAreUnreadable) {
++ buffer->meta_data()->setInt32(kKeyIsUnreadable, true);
++ }
++
++ buffer->meta_data()->setInt32(
++ kKeyBufferID,
++ msg.u.extended_buffer_data.buffer);
++
++ if (msg.u.extended_buffer_data.flags & OMX_BUFFERFLAG_EOS) {
++ CODEC_LOGV("No more output data.");
++ mNoMoreOutputData = true;
++ }
++
++ if (mIsEncoder && mIsVideo) {
++ int64_t decodingTimeUs = isCodecSpecific? 0: getDecodingTimeUs();
++ buffer->meta_data()->setInt64(kKeyDecodingTime, decodingTimeUs);
++ }
++
++ if (mTargetTimeUs >= 0) {
++ CHECK(msg.u.extended_buffer_data.timestamp <= mTargetTimeUs);
++
++ if (msg.u.extended_buffer_data.timestamp < mTargetTimeUs) {
++ CODEC_LOGV(
++ "skipping output buffer at timestamp %lld us",
++ msg.u.extended_buffer_data.timestamp);
++
++ fillOutputBuffer(info);
++ break;
++ }
++
++ CODEC_LOGV(
++ "returning output buffer at target timestamp "
++ "%lld us",
++ msg.u.extended_buffer_data.timestamp);
++
++ mTargetTimeUs = -1;
++ }
++
++ mFilledBuffers.push_back(i);
++ mBufferFilled.signal();
++ if (mIsEncoder) {
++ sched_yield();
++ }
++ }
++
++ break;
++ }
++
++ default:
++ {
++ CHECK(!"should not be here.");
++ break;
++ }
++ }
++}
++
++// Has the format changed in any way that the client would have to be aware of?
++static bool formatHasNotablyChanged(
++ const sp<MetaData> &from, const sp<MetaData> &to) {
++ if (from.get() == NULL && to.get() == NULL) {
++ return false;
++ }
++
++ if ((from.get() == NULL && to.get() != NULL)
++ || (from.get() != NULL && to.get() == NULL)) {
++ return true;
++ }
++
++ const char *mime_from, *mime_to;
++ CHECK(from->findCString(kKeyMIMEType, &mime_from));
++ CHECK(to->findCString(kKeyMIMEType, &mime_to));
++
++ if (strcasecmp(mime_from, mime_to)) {
++ return true;
++ }
++
++ if (!strcasecmp(mime_from, MEDIA_MIMETYPE_VIDEO_RAW)) {
++ int32_t colorFormat_from, colorFormat_to;
++ CHECK(from->findInt32(kKeyColorFormat, &colorFormat_from));
++ CHECK(to->findInt32(kKeyColorFormat, &colorFormat_to));
++
++ if (colorFormat_from != colorFormat_to) {
++ return true;
++ }
++
++ int32_t width_from, width_to;
++ CHECK(from->findInt32(kKeyWidth, &width_from));
++ CHECK(to->findInt32(kKeyWidth, &width_to));
++
++ if (width_from != width_to) {
++ return true;
++ }
++
++ int32_t height_from, height_to;
++ CHECK(from->findInt32(kKeyHeight, &height_from));
++ CHECK(to->findInt32(kKeyHeight, &height_to));
++
++ if (height_from != height_to) {
++ return true;
++ }
++
++ int32_t left_from, top_from, right_from, bottom_from;
++ CHECK(from->findRect(
++ kKeyCropRect,
++ &left_from, &top_from, &right_from, &bottom_from));
++
++ int32_t left_to, top_to, right_to, bottom_to;
++ CHECK(to->findRect(
++ kKeyCropRect,
++ &left_to, &top_to, &right_to, &bottom_to));
++
++ if (left_to != left_from || top_to != top_from
++ || right_to != right_from || bottom_to != bottom_from) {
++ return true;
++ }
++ } else if (!strcasecmp(mime_from, MEDIA_MIMETYPE_AUDIO_RAW)) {
++ int32_t numChannels_from, numChannels_to;
++ CHECK(from->findInt32(kKeyChannelCount, &numChannels_from));
++ CHECK(to->findInt32(kKeyChannelCount, &numChannels_to));
++
++ if (numChannels_from != numChannels_to) {
++ return true;
++ }
++
++ int32_t sampleRate_from, sampleRate_to;
++ CHECK(from->findInt32(kKeySampleRate, &sampleRate_from));
++ CHECK(to->findInt32(kKeySampleRate, &sampleRate_to));
++
++ if (sampleRate_from != sampleRate_to) {
++ return true;
++ }
++ }
++
++ return false;
++}
++
++void OMXCodec::onEvent(OMX_EVENTTYPE event, OMX_U32 data1, OMX_U32 data2) {
++ switch (event) {
++ case OMX_EventCmdComplete:
++ {
++ onCmdComplete((OMX_COMMANDTYPE)data1, data2);
++ break;
++ }
++
++ case OMX_EventError:
++ {
++ CODEC_LOGE("OMX_EventError(0x%08x, %u)", data1, data2);
++
++ setState(ERROR);
++ break;
++ }
++
++ case OMX_EventPortSettingsChanged:
++ {
++ CODEC_LOGV("OMX_EventPortSettingsChanged(port=%u, data2=0x%08x)",
++ data1, data2);
++
++ if (data2 == 0 || data2 == OMX_IndexParamPortDefinition) {
++ onPortSettingsChanged(data1);
++ } else if (data1 == kPortIndexOutput &&
++ (data2 == OMX_IndexConfigCommonOutputCrop ||
++ data2 == OMX_IndexConfigCommonScale)) {
++
++ sp<MetaData> oldOutputFormat = mOutputFormat;
++ initOutputFormat(mSource->getFormat());
++
++ if (data2 == OMX_IndexConfigCommonOutputCrop &&
++ formatHasNotablyChanged(oldOutputFormat, mOutputFormat)) {
++ mOutputPortSettingsHaveChanged = true;
++
++ } else if (data2 == OMX_IndexConfigCommonScale) {
++ OMX_CONFIG_SCALEFACTORTYPE scale;
++ InitOMXParams(&scale);
++ scale.nPortIndex = kPortIndexOutput;
++
++ // Change display dimension only when necessary.
++ if (OK == mOMX->getConfig(
++ mNode,
++ OMX_IndexConfigCommonScale,
++ &scale, sizeof(scale))) {
++ int32_t left, top, right, bottom;
++ CHECK(mOutputFormat->findRect(kKeyCropRect,
++ &left, &top,
++ &right, &bottom));
++
++ // The scale is in 16.16 format.
++ // scale 1.0 = 0x010000. When there is no
++ // need to change the display, skip it.
++ ALOGV("Get OMX_IndexConfigScale: 0x%x/0x%x",
++ scale.xWidth, scale.xHeight);
++
++ if (scale.xWidth != 0x010000) {
++ mOutputFormat->setInt32(kKeyDisplayWidth,
++ ((right - left 1) * scale.xWidth) >> 16);
++ mOutputPortSettingsHaveChanged = true;
++ }
++
++ if (scale.xHeight != 0x010000) {
++ mOutputFormat->setInt32(kKeyDisplayHeight,
++ ((bottom - top 1) * scale.xHeight) >> 16);
++ mOutputPortSettingsHaveChanged = true;
++ }
++ }
++ }
++ }
++ break;
++ }
++
++#if 0
++ case OMX_EventBufferFlag:
++ {
++ CODEC_LOGV("EVENT_BUFFER_FLAG(%ld)", data1);
++
++ if (data1 == kPortIndexOutput) {
++ mNoMoreOutputData = true;
++ }
++ break;
++ }
++#endif
++#ifdef USE_S3D_SUPPORT
++ case (OMX_EVENTTYPE)OMX_EventS3DInformation:
++ {
++ sp<IServiceManager> sm = defaultServiceManager();
++ sp<android::IExynosHWCService> hwc = interface_cast<android::IExynosHWCService>(
++ sm->getService(String16("Exynos.HWCService")));
++ if (hwc != NULL) {
++ if (data1 == OMX_TRUE) {
++ int eS3DMode;
++ switch (data2) {
++ case OMX_SEC_FPARGMT_SIDE_BY_SIDE:
++ eS3DMode = S3D_SBS;
++ break;
++ case OMX_SEC_FPARGMT_TOP_BOTTOM:
++ eS3DMode = S3D_TB;
++ break;
++ case OMX_SEC_FPARGMT_CHECKERBRD_INTERL: // unsupport format at HDMI
++ case OMX_SEC_FPARGMT_COLUMN_INTERL:
++ case OMX_SEC_FPARGMT_ROW_INTERL:
++ case OMX_SEC_FPARGMT_TEMPORAL_INTERL:
++ default:
++ eS3DMode = S3D_NONE;
++ }
++
++ hwc->setHdmiResolution(0, eS3DMode);
++ }
++ } else {
++ ALOGE("Exynos.HWCService is unavailable");
++ }
++ break;
++ }
++#endif
++ default:
++ {
++ CODEC_LOGV("EVENT(%d, %u, %u)", event, data1, data2);
++ break;
++ }
++ }
++}
++
++void OMXCodec::onCmdComplete(OMX_COMMANDTYPE cmd, OMX_U32 data) {
++ switch (cmd) {
++ case OMX_CommandStateSet:
++ {
++ onStateChange((OMX_STATETYPE)data);
++ break;
++ }
++
++ case OMX_CommandPortDisable:
++ {
++ OMX_U32 portIndex = data;
++ CODEC_LOGV("PORT_DISABLED(%u)", portIndex);
++
++ CHECK(mState == EXECUTING || mState == RECONFIGURING);
++ CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLING);
++ CHECK_EQ(mPortBuffers[portIndex].size(), 0u);
++
++ mPortStatus[portIndex] = DISABLED;
++
++ if (mState == RECONFIGURING) {
++ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
++
++ sp<MetaData> oldOutputFormat = mOutputFormat;
++ initOutputFormat(mSource->getFormat());
++
++ // Don't notify clients if the output port settings change
++ // wasn't of importance to them, i.e. it may be that just the
++ // number of buffers has changed and nothing else.
++ bool formatChanged = formatHasNotablyChanged(oldOutputFormat, mOutputFormat);
++ if (!mOutputPortSettingsHaveChanged) {
++ mOutputPortSettingsHaveChanged = formatChanged;
++ }
++
++ status_t err = enablePortAsync(portIndex);
++ if (err != OK) {
++ CODEC_LOGE("enablePortAsync(%u) failed (err = %d)", portIndex, err);
++ setState(ERROR);
++ } else {
++ err = allocateBuffersOnPort(portIndex);
++ if (err != OK) {
++ CODEC_LOGE("allocateBuffersOnPort (%s) failed "
++ "(err = %d)",
++ portIndex == kPortIndexInput
++ ? "input" : "output",
++ err);
++
++ setState(ERROR);
++ }
++ }
++ }
++ break;
++ }
++
++ case OMX_CommandPortEnable:
++ {
++ OMX_U32 portIndex = data;
++ CODEC_LOGV("PORT_ENABLED(%u)", portIndex);
++
++ CHECK(mState == EXECUTING || mState == RECONFIGURING);
++ CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLING);
++
++ mPortStatus[portIndex] = ENABLED;
++
++ if (mState == RECONFIGURING) {
++ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
++
++ setState(EXECUTING);
++
++ fillOutputBuffers();
++ }
++ break;
++ }
++
++ case OMX_CommandFlush:
++ {
++ OMX_U32 portIndex = data;
++
++ CODEC_LOGV("FLUSH_DONE(%u)", portIndex);
++
++ CHECK_EQ((int)mPortStatus[portIndex], (int)SHUTTING_DOWN);
++ mPortStatus[portIndex] = ENABLED;
++
++ CHECK_EQ(countBuffersWeOwn(mPortBuffers[portIndex]),
++ mPortBuffers[portIndex].size());
++
++ if (mSkipCutBuffer != NULL && mPortStatus[kPortIndexOutput] == ENABLED) {
++ mSkipCutBuffer->clear();
++ }
++
++ if (mState == RECONFIGURING) {
++ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
++
++ disablePortAsync(portIndex);
++ } else if (mState == EXECUTING_TO_IDLE) {
++ if (mPortStatus[kPortIndexInput] == ENABLED
++ && mPortStatus[kPortIndexOutput] == ENABLED) {
++ CODEC_LOGV("Finished flushing both ports, now completing "
++ "transition from EXECUTING to IDLE.");
++
++ mPortStatus[kPortIndexInput] = SHUTTING_DOWN;
++ mPortStatus[kPortIndexOutput] = SHUTTING_DOWN;
++
++ status_t err =
++ mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
++ CHECK_EQ(err, (status_t)OK);
++ }
++ } else {
++ // We're flushing both ports in preparation for seeking.
++
++ if (mPortStatus[kPortIndexInput] == ENABLED
++ && mPortStatus[kPortIndexOutput] == ENABLED) {
++ CODEC_LOGV("Finished flushing both ports, now continuing from"
++ " seek-time.");
++
++ // We implicitly resume pulling on our upstream source.
++ mPaused = false;
++ mNoMoreOutputData = false;
++
++ drainInputBuffers();
++ fillOutputBuffers();
++ }
++
++ if (mOutputPortSettingsChangedPending) {
++ CODEC_LOGV(
++ "Honoring deferred output port settings change.");
++
++ mOutputPortSettingsChangedPending = false;
++ onPortSettingsChanged(kPortIndexOutput);
++ }
++ }
++
++ break;
++ }
++
++ default:
++ {
++ CODEC_LOGV("CMD_COMPLETE(%d, %u)", cmd, data);
++ break;
++ }
++ }
++}
++
++void OMXCodec::onStateChange(OMX_STATETYPE newState) {
++ CODEC_LOGV("onStateChange %d", newState);
++
++ switch (newState) {
++ case OMX_StateIdle:
++ {
++ CODEC_LOGV("Now Idle.");
++ if (mState == LOADED_TO_IDLE) {
++ status_t err = mOMX->sendCommand(
++ mNode, OMX_CommandStateSet, OMX_StateExecuting);
++
++ CHECK_EQ(err, (status_t)OK);
++
++ setState(IDLE_TO_EXECUTING);
++ } else {
++ CHECK_EQ((int)mState, (int)EXECUTING_TO_IDLE);
++
++ if (countBuffersWeOwn(mPortBuffers[kPortIndexInput]) !=
++ mPortBuffers[kPortIndexInput].size()) {
++ ALOGE("Codec did not return all input buffers "
++ "(received %zu / %zu)",
++ countBuffersWeOwn(mPortBuffers[kPortIndexInput]),
++ mPortBuffers[kPortIndexInput].size());
++ TRESPASS();
++ }
++
++ if (countBuffersWeOwn(mPortBuffers[kPortIndexOutput]) !=
++ mPortBuffers[kPortIndexOutput].size()) {
++ ALOGE("Codec did not return all output buffers "
++ "(received %zu / %zu)",
++ countBuffersWeOwn(mPortBuffers[kPortIndexOutput]),
++ mPortBuffers[kPortIndexOutput].size());
++ TRESPASS();
++ }
++
++ status_t err = mOMX->sendCommand(
++ mNode, OMX_CommandStateSet, OMX_StateLoaded);
++
++ CHECK_EQ(err, (status_t)OK);
++
++ err = freeBuffersOnPort(kPortIndexInput);
++ CHECK_EQ(err, (status_t)OK);
++
++ err = freeBuffersOnPort(kPortIndexOutput);
++ CHECK_EQ(err, (status_t)OK);
++
++ mPortStatus[kPortIndexInput] = ENABLED;
++ mPortStatus[kPortIndexOutput] = ENABLED;
++
++ if ((mFlags & kEnableGrallocUsageProtected) &&
++ mNativeWindow != NULL) {
++ // We push enough 1x1 blank buffers to ensure that one of
++ // them has made it to the display. This allows the OMX
++ // component teardown to zero out any protected buffers
++ // without the risk of scanning out one of those buffers.
++ pushBlankBuffersToNativeWindow(mNativeWindow.get());
++ }
++
++ setState(IDLE_TO_LOADED);
++ }
++ break;
++ }
++
++ case OMX_StateExecuting:
++ {
++ CHECK_EQ((int)mState, (int)IDLE_TO_EXECUTING);
++
++ CODEC_LOGV("Now Executing.");
++
++ mOutputPortSettingsChangedPending = false;
++
++ setState(EXECUTING);
++
++ // Buffers will be submitted to the component in the first
++ // call to OMXCodec::read as mInitialBufferSubmit is true at
++ // this point. This ensures that this on_message call returns,
++ // releases the lock and ::init can notice the state change and
++ // itself return.
++ break;
++ }
++
++ case OMX_StateLoaded:
++ {
++ CHECK_EQ((int)mState, (int)IDLE_TO_LOADED);
++
++ CODEC_LOGV("Now Loaded.");
++
++ setState(LOADED);
++ break;
++ }
++
++ case OMX_StatePause:
++ {
++ CODEC_LOGV("Now paused.");
++ CHECK_EQ((int)mState, (int)PAUSING);
++ setState(PAUSED);
++ break;
++ }
++
++ case OMX_StateInvalid:
++ {
++ setState(ERROR);
++ break;
++ }
++
++ default:
++ {
++ CHECK(!"should not be here.");
++ break;
++ }
++ }
++}
++
++// static
++size_t OMXCodec::countBuffersWeOwn(const Vector<BufferInfo> &buffers) {
++ size_t n = 0;
++ for (size_t i = 0; i < buffers.size(); i) {
++ if (buffers[i].mStatus != OWNED_BY_COMPONENT) {
++ n;
++ }
++ }
++
++ return n;
++}
++
++status_t OMXCodec::freeBuffersOnPort(
++ OMX_U32 portIndex, bool onlyThoseWeOwn) {
++ Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
++
++ status_t stickyErr = OK;
++
++ for (size_t i = buffers->size(); i > 0;) {
++ i--;
++ BufferInfo *info = &buffers->editItemAt(i);
++
++ if (onlyThoseWeOwn && info->mStatus == OWNED_BY_COMPONENT) {
++ continue;
++ }
++
++ CHECK(info->mStatus == OWNED_BY_US
++ || info->mStatus == OWNED_BY_NATIVE_WINDOW);
++
++ CODEC_LOGV("freeing buffer %u on port %u", info->mBuffer, portIndex);
++
++ status_t err = freeBuffer(portIndex, i);
++
++ if (err != OK) {
++ stickyErr = err;
++ }
++
++ }
++
++ CHECK(onlyThoseWeOwn || buffers->isEmpty());
++
++ return stickyErr;
++}
++
++status_t OMXCodec::freeBuffer(OMX_U32 portIndex, size_t bufIndex) {
++ Vector<BufferInfo> *buffers = &mPortBuffers[portIndex];
++
++ BufferInfo *info = &buffers->editItemAt(bufIndex);
++
++ status_t err = mOMX->freeBuffer(mNode, portIndex, info->mBuffer);
++
++ if (err == OK && info->mMediaBuffer != NULL) {
++ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
++ info->mMediaBuffer->setObserver(NULL);
++
++ // Make sure nobody but us owns this buffer at this point.
++ CHECK_EQ(info->mMediaBuffer->refcount(), 0);
++
++ // Cancel the buffer if it belongs to an ANativeWindow.
++ sp<GraphicBuffer> graphicBuffer = info->mMediaBuffer->graphicBuffer();
++ if (info->mStatus == OWNED_BY_US && graphicBuffer != 0) {
++ err = cancelBufferToNativeWindow(info);
++ }
++
++ info->mMediaBuffer->release();
++ info->mMediaBuffer = NULL;
++ }
++
++ if (err == OK) {
++ buffers->removeAt(bufIndex);
++ }
++
++ return err;
++}
++
++void OMXCodec::onPortSettingsChanged(OMX_U32 portIndex) {
++ CODEC_LOGV("PORT_SETTINGS_CHANGED(%u)", portIndex);
++
++ CHECK(mState == EXECUTING || mState == EXECUTING_TO_IDLE);
++ CHECK_EQ(portIndex, (OMX_U32)kPortIndexOutput);
++ CHECK(!mOutputPortSettingsChangedPending);
++
++ if (mPortStatus[kPortIndexOutput] != ENABLED) {
++ CODEC_LOGV("Deferring output port settings change.");
++ mOutputPortSettingsChangedPending = true;
++ return;
++ }
++
++ setState(RECONFIGURING);
++
++ if (mQuirks & kNeedsFlushBeforeDisable) {
++ if (!flushPortAsync(portIndex)) {
++ onCmdComplete(OMX_CommandFlush, portIndex);
++ }
++ } else {
++ disablePortAsync(portIndex);
++ }
++}
++
++bool OMXCodec::flushPortAsync(OMX_U32 portIndex) {
++ CHECK(mState == EXECUTING || mState == RECONFIGURING
++ || mState == EXECUTING_TO_IDLE || mState == FLUSHING);
++
++ CODEC_LOGV("flushPortAsync(%u): we own %zu out of %zu buffers already.",
++ portIndex, countBuffersWeOwn(mPortBuffers[portIndex]),
++ mPortBuffers[portIndex].size());
++
++ CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);
++ mPortStatus[portIndex] = SHUTTING_DOWN;
++
++ if ((mQuirks & kRequiresFlushCompleteEmulation)
++ && countBuffersWeOwn(mPortBuffers[portIndex])
++ == mPortBuffers[portIndex].size()) {
++ // No flush is necessary and this component fails to send a
++ // flush-complete event in this case.
++
++ return false;
++ }
++
++ status_t err =
++ mOMX->sendCommand(mNode, OMX_CommandFlush, portIndex);
++ CHECK_EQ(err, (status_t)OK);
++
++ return true;
++}
++
++void OMXCodec::disablePortAsync(OMX_U32 portIndex) {
++ CHECK(mState == EXECUTING || mState == RECONFIGURING);
++
++ CHECK_EQ((int)mPortStatus[portIndex], (int)ENABLED);
++ mPortStatus[portIndex] = DISABLING;
++
++ CODEC_LOGV("sending OMX_CommandPortDisable(%u)", portIndex);
++ status_t err =
++ mOMX->sendCommand(mNode, OMX_CommandPortDisable, portIndex);
++ CHECK_EQ(err, (status_t)OK);
++
++ freeBuffersOnPort(portIndex, true);
++}
++
++status_t OMXCodec::enablePortAsync(OMX_U32 portIndex) {
++ CHECK(mState == EXECUTING || mState == RECONFIGURING);
++
++ CHECK_EQ((int)mPortStatus[portIndex], (int)DISABLED);
++ mPortStatus[portIndex] = ENABLING;
++
++ CODEC_LOGV("sending OMX_CommandPortEnable(%u)", portIndex);
++ return mOMX->sendCommand(mNode, OMX_CommandPortEnable, portIndex);
++}
++
++void OMXCodec::fillOutputBuffers() {
++ CHECK(mState == EXECUTING || mState == FLUSHING);
++
++ // This is a workaround for some decoders not properly reporting
++ // end-of-output-stream. If we own all input buffers and also own
++ // all output buffers and we already signalled end-of-input-stream,
++ // the end-of-output-stream is implied.
++ if (mSignalledEOS
++ && countBuffersWeOwn(mPortBuffers[kPortIndexInput])
++ == mPortBuffers[kPortIndexInput].size()
++ && countBuffersWeOwn(mPortBuffers[kPortIndexOutput])
++ == mPortBuffers[kPortIndexOutput].size()) {
++ mNoMoreOutputData = true;
++ mBufferFilled.signal();
++
++ return;
++ }
++
++ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
++ for (size_t i = 0; i < buffers->size(); i) {
++ BufferInfo *info = &buffers->editItemAt(i);
++ if (info->mStatus == OWNED_BY_US) {
++ fillOutputBuffer(&buffers->editItemAt(i));
++ }
++ }
++}
++
++void OMXCodec::drainInputBuffers() {
++ CHECK(mState == EXECUTING || mState == RECONFIGURING || mState == FLUSHING);
++
++ if (mFlags & kUseSecureInputBuffers) {
++ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
++ for (size_t i = 0; i < buffers->size(); i) {
++ if (!drainAnyInputBuffer()
++ || (mFlags & kOnlySubmitOneInputBufferAtOneTime)) {
++ break;
++ }
++ }
++ } else {
++ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
++ for (size_t i = 0; i < buffers->size(); i) {
++ BufferInfo *info = &buffers->editItemAt(i);
++
++ if (info->mStatus != OWNED_BY_US) {
++ continue;
++ }
++
++ if (!drainInputBuffer(info)) {
++ break;
++ }
++
++ if (mFlags & kOnlySubmitOneInputBufferAtOneTime) {
++ break;
++ }
++ }
++ }
++}
++
++bool OMXCodec::drainAnyInputBuffer() {
++ return drainInputBuffer((BufferInfo *)NULL);
++}
++
++OMXCodec::BufferInfo *OMXCodec::findInputBufferByDataPointer(void *ptr) {
++ Vector<BufferInfo> *infos = &mPortBuffers[kPortIndexInput];
++ for (size_t i = 0; i < infos->size(); i) {
++ BufferInfo *info = &infos->editItemAt(i);
++
++ if (info->mData == ptr) {
++ CODEC_LOGV(
++ "input buffer data ptr = %p, buffer_id = %u",
++ ptr,
++ info->mBuffer);
++
++ return info;
++ }
++ }
++
++ TRESPASS();
++}
++
++OMXCodec::BufferInfo *OMXCodec::findEmptyInputBuffer() {
++ Vector<BufferInfo> *infos = &mPortBuffers[kPortIndexInput];
++ for (size_t i = 0; i < infos->size(); i) {
++ BufferInfo *info = &infos->editItemAt(i);
++
++ if (info->mStatus == OWNED_BY_US) {
++ return info;
++ }
++ }
++
++ TRESPASS();
++}
++
++bool OMXCodec::drainInputBuffer(BufferInfo *info) {
++ if (info != NULL) {
++ CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
++ }
++
++ if (mSignalledEOS) {
++ return false;
++ }
++
++ if (mCodecSpecificDataIndex < mCodecSpecificData.size()) {
++ CHECK(!(mFlags & kUseSecureInputBuffers));
++
++ const CodecSpecificData *specific =
++ mCodecSpecificData[mCodecSpecificDataIndex];
++
++ size_t size = specific->mSize;
++
++ if ((!strcasecmp(MEDIA_MIMETYPE_VIDEO_AVC, mMIME) ||
++ !strcasecmp(MEDIA_MIMETYPE_VIDEO_HEVC, mMIME))
++ && !(mQuirks & kWantsNALFragments)) {
++ static const uint8_t kNALStartCode[4] =
++ { 0x00, 0x00, 0x00, 0x01 };
++
++ CHECK(info->mSize >= specific->mSize 4);
++
++ size = 4;
++
++ memcpy(info->mData, kNALStartCode, 4);
++ memcpy((uint8_t *)info->mData 4,
++ specific->mData, specific->mSize);
++ } else {
++ CHECK(info->mSize >= specific->mSize);
++ memcpy(info->mData, specific->mData, specific->mSize);
++ }
++
++ mNoMoreOutputData = false;
++
++ CODEC_LOGV("calling emptyBuffer with codec specific data");
++
++ status_t err = mOMX->emptyBuffer(
++ mNode, info->mBuffer, 0, size,
++ OMX_BUFFERFLAG_ENDOFFRAME | OMX_BUFFERFLAG_CODECCONFIG,
++ 0);
++ CHECK_EQ(err, (status_t)OK);
++
++ info->mStatus = OWNED_BY_COMPONENT;
++
++ mCodecSpecificDataIndex;
++ return true;
++ }
++
++ if (mPaused) {
++ return false;
++ }
++
++ status_t err;
++
++ bool signalEOS = false;
++ int64_t timestampUs = 0;
++
++ size_t offset = 0;
++ int32_t n = 0;
++
++
++ for (;;) {
++ MediaBuffer *srcBuffer;
++ if (mSeekTimeUs >= 0) {
++ if (mLeftOverBuffer) {
++ mLeftOverBuffer->release();
++ mLeftOverBuffer = NULL;
++ }
++
++ MediaSource::ReadOptions options;
++ options.setSeekTo(mSeekTimeUs, mSeekMode);
++
++ mSeekTimeUs = -1;
++ mSeekMode = ReadOptions::SEEK_CLOSEST_SYNC;
++ mBufferFilled.signal();
++
++ err = mSource->read(&srcBuffer, &options);
++
++ if (err == OK) {
++ int64_t targetTimeUs;
++ if (srcBuffer->meta_data()->findInt64(
++ kKeyTargetTime, &targetTimeUs)
++ && targetTimeUs >= 0) {
++ CODEC_LOGV("targetTimeUs = %lld us", (long long)targetTimeUs);
++ mTargetTimeUs = targetTimeUs;
++ } else {
++ mTargetTimeUs = -1;
++ }
++ }
++ } else if (mLeftOverBuffer) {
++ srcBuffer = mLeftOverBuffer;
++ mLeftOverBuffer = NULL;
++
++ err = OK;
++ } else {
++ err = mSource->read(&srcBuffer);
++ }
++
++ if (err != OK) {
++ signalEOS = true;
++ mFinalStatus = err;
++ mSignalledEOS = true;
++ mBufferFilled.signal();
++ break;
++ }
++
++ if (mFlags & kUseSecureInputBuffers) {
++ info = findInputBufferByDataPointer(srcBuffer->data());
++ CHECK(info != NULL);
++ }
++
++ size_t remainingBytes = info->mSize - offset;
++
++ if (srcBuffer->range_length() > remainingBytes) {
++ if (offset == 0) {
++ CODEC_LOGE(
++ "Codec's input buffers are too small to accomodate "
++ "buffer read from source (info->mSize = %zu, srcLength = %zu)",
++ info->mSize, srcBuffer->range_length());
++
++ srcBuffer->release();
++ srcBuffer = NULL;
++
++ setState(ERROR);
++ return false;
++ }
++
++ mLeftOverBuffer = srcBuffer;
++ break;
++ }
++
++ bool releaseBuffer = true;
++ if (mFlags & kStoreMetaDataInVideoBuffers) {
++ releaseBuffer = false;
++ info->mMediaBuffer = srcBuffer;
++ }
++
++ if (mFlags & kUseSecureInputBuffers) {
++ // Data in "info" is already provided at this time.
++
++ releaseBuffer = false;
++
++ CHECK(info->mMediaBuffer == NULL);
++ info->mMediaBuffer = srcBuffer;
++ } else {
++ CHECK(srcBuffer->data() != NULL) ;
++ memcpy((uint8_t *)info->mData offset,
++ (const uint8_t *)srcBuffer->data()
++ srcBuffer->range_offset(),
++ srcBuffer->range_length());
++ }
++
++ int64_t lastBufferTimeUs;
++ CHECK(srcBuffer->meta_data()->findInt64(kKeyTime, &lastBufferTimeUs));
++ CHECK(lastBufferTimeUs >= 0);
++ if (mIsEncoder && mIsVideo) {
++ mDecodingTimeList.push_back(lastBufferTimeUs);
++ }
++
++ if (offset == 0) {
++ timestampUs = lastBufferTimeUs;
++ }
++
++ offset = srcBuffer->range_length();
++
++ if (!strcasecmp(MEDIA_MIMETYPE_AUDIO_VORBIS, mMIME)) {
++ CHECK(!(mQuirks & kSupportsMultipleFramesPerInputBuffer));
++ CHECK_GE(info->mSize, offset sizeof(int32_t));
++
++ int32_t numPageSamples;
++ if (!srcBuffer->meta_data()->findInt32(
++ kKeyValidSamples, &numPageSamples)) {
++ numPageSamples = -1;
++ }
++
++ memcpy((uint8_t *)info->mData offset,
++ &numPageSamples,
++ sizeof(numPageSamples));
++
++ offset = sizeof(numPageSamples);
++ }
++
++ if (releaseBuffer) {
++ srcBuffer->release();
++ srcBuffer = NULL;
++ }
++
++ n;
++
++ if (!(mQuirks & kSupportsMultipleFramesPerInputBuffer)) {
++ break;
++ }
++
++ int64_t coalescedDurationUs = lastBufferTimeUs - timestampUs;
++
++ if (coalescedDurationUs > 250000ll) {
++ // Don't coalesce more than 250ms worth of encoded data at once.
++ break;
++ }
++ }
++
++ if (n > 1) {
++ ALOGV("coalesced %d frames into one input buffer", n);
++ }
++
++ OMX_U32 flags = OMX_BUFFERFLAG_ENDOFFRAME;
++
++ if (signalEOS) {
++ flags |= OMX_BUFFERFLAG_EOS;
++ } else {
++ mNoMoreOutputData = false;
++ }
++
++ if (info == NULL) {
++ CHECK(mFlags & kUseSecureInputBuffers);
++ CHECK(signalEOS);
++
++ // This is fishy, there's still a MediaBuffer corresponding to this
++ // info available to the source at this point even though we're going
++ // to use it to signal EOS to the codec.
++ info = findEmptyInputBuffer();
++ }
++
++ CODEC_LOGV("Calling emptyBuffer on buffer %u (length %zu), "
++ "timestamp %lld us (%.2f secs)",
++ info->mBuffer, offset,
++ (long long)timestampUs, timestampUs / 1E6);
++
++ err = mOMX->emptyBuffer(
++ mNode, info->mBuffer, 0, offset,
++ flags, timestampUs);
++
++ if (err != OK) {
++ setState(ERROR);
++ return false;
++ }
++
++ info->mStatus = OWNED_BY_COMPONENT;
++
++ return true;
++}
++
++void OMXCodec::fillOutputBuffer(BufferInfo *info) {
++ CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
++
++ if (mNoMoreOutputData) {
++ CODEC_LOGV("There is no more output data available, not "
++ "calling fillOutputBuffer");
++ return;
++ }
++
++ CODEC_LOGV("Calling fillBuffer on buffer %u", info->mBuffer);
++ status_t err = mOMX->fillBuffer(mNode, info->mBuffer);
++
++ if (err != OK) {
++ CODEC_LOGE("fillBuffer failed w/ error 0x%08x", err);
++
++ setState(ERROR);
++ return;
++ }
++
++ info->mStatus = OWNED_BY_COMPONENT;
++}
++
++bool OMXCodec::drainInputBuffer(IOMX::buffer_id buffer) {
++ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexInput];
++ for (size_t i = 0; i < buffers->size(); i) {
++ if ((*buffers)[i].mBuffer == buffer) {
++ return drainInputBuffer(&buffers->editItemAt(i));
++ }
++ }
++
++ CHECK(!"should not be here.");
++
++ return false;
++}
++
++void OMXCodec::fillOutputBuffer(IOMX::buffer_id buffer) {
++ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
++ for (size_t i = 0; i < buffers->size(); i) {
++ if ((*buffers)[i].mBuffer == buffer) {
++ fillOutputBuffer(&buffers->editItemAt(i));
++ return;
++ }
++ }
++
++ CHECK(!"should not be here.");
++}
++
++void OMXCodec::setState(State newState) {
++ mState = newState;
++ mAsyncCompletion.signal();
++
++ // This may cause some spurious wakeups but is necessary to
++ // unblock the reader if we enter ERROR state.
++ mBufferFilled.signal();
++}
++
++status_t OMXCodec::waitForBufferFilled_l() {
++
++ if (mIsEncoder) {
++ // For timelapse video recording, the timelapse video recording may
++ // not send an input frame for a _long_ time. Do not use timeout
++ // for video encoding.
++ return mBufferFilled.wait(mLock);
++ }
++ status_t err = mBufferFilled.waitRelative(mLock, kBufferFilledEventTimeOutNs);
++ if (err != OK) {
++ CODEC_LOGE("Timed out waiting for output buffers: %zu/%zu",
++ countBuffersWeOwn(mPortBuffers[kPortIndexInput]),
++ countBuffersWeOwn(mPortBuffers[kPortIndexOutput]));
++ }
++ return err;
++}
++
++void OMXCodec::setRawAudioFormat(
++ OMX_U32 portIndex, int32_t sampleRate, int32_t numChannels) {
++
++ // port definition
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = portIndex;
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++ def.format.audio.eEncoding = OMX_AUDIO_CodingPCM;
++ CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition,
++ &def, sizeof(def)), (status_t)OK);
++
++ // pcm param
++ OMX_AUDIO_PARAM_PCMMODETYPE pcmParams;
++ InitOMXParams(&pcmParams);
++ pcmParams.nPortIndex = portIndex;
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
++
++ CHECK_EQ(err, (status_t)OK);
++
++ pcmParams.nChannels = numChannels;
++ pcmParams.eNumData = OMX_NumericalDataSigned;
++ pcmParams.bInterleaved = OMX_TRUE;
++ pcmParams.nBitPerSample = 16;
++ pcmParams.nSamplingRate = sampleRate;
++ pcmParams.ePCMMode = OMX_AUDIO_PCMModeLinear;
++
++ CHECK_EQ(getOMXChannelMapping(
++ numChannels, pcmParams.eChannelMapping), (status_t)OK);
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamAudioPcm, &pcmParams, sizeof(pcmParams));
++
++ CHECK_EQ(err, (status_t)OK);
++}
++
++static OMX_AUDIO_AMRBANDMODETYPE pickModeFromBitRate(bool isAMRWB, int32_t bps) {
++ if (isAMRWB) {
++ if (bps <= 6600) {
++ return OMX_AUDIO_AMRBandModeWB0;
++ } else if (bps <= 8850) {
++ return OMX_AUDIO_AMRBandModeWB1;
++ } else if (bps <= 12650) {
++ return OMX_AUDIO_AMRBandModeWB2;
++ } else if (bps <= 14250) {
++ return OMX_AUDIO_AMRBandModeWB3;
++ } else if (bps <= 15850) {
++ return OMX_AUDIO_AMRBandModeWB4;
++ } else if (bps <= 18250) {
++ return OMX_AUDIO_AMRBandModeWB5;
++ } else if (bps <= 19850) {
++ return OMX_AUDIO_AMRBandModeWB6;
++ } else if (bps <= 23050) {
++ return OMX_AUDIO_AMRBandModeWB7;
++ }
++
++ // 23850 bps
++ return OMX_AUDIO_AMRBandModeWB8;
++ } else { // AMRNB
++ if (bps <= 4750) {
++ return OMX_AUDIO_AMRBandModeNB0;
++ } else if (bps <= 5150) {
++ return OMX_AUDIO_AMRBandModeNB1;
++ } else if (bps <= 5900) {
++ return OMX_AUDIO_AMRBandModeNB2;
++ } else if (bps <= 6700) {
++ return OMX_AUDIO_AMRBandModeNB3;
++ } else if (bps <= 7400) {
++ return OMX_AUDIO_AMRBandModeNB4;
++ } else if (bps <= 7950) {
++ return OMX_AUDIO_AMRBandModeNB5;
++ } else if (bps <= 10200) {
++ return OMX_AUDIO_AMRBandModeNB6;
++ }
++
++ // 12200 bps
++ return OMX_AUDIO_AMRBandModeNB7;
++ }
++}
++
++void OMXCodec::setAMRFormat(bool isWAMR, int32_t bitRate) {
++ OMX_U32 portIndex = mIsEncoder ? kPortIndexOutput : kPortIndexInput;
++
++ OMX_AUDIO_PARAM_AMRTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = portIndex;
++
++ status_t err =
++ mOMX->getParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
++
++ CHECK_EQ(err, (status_t)OK);
++
++ def.eAMRFrameFormat = OMX_AUDIO_AMRFrameFormatFSF;
++
++ def.eAMRBandMode = pickModeFromBitRate(isWAMR, bitRate);
++ err = mOMX->setParameter(mNode, OMX_IndexParamAudioAmr, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ ////////////////////////
++
++ if (mIsEncoder) {
++ sp<MetaData> format = mSource->getFormat();
++ int32_t sampleRate;
++ int32_t numChannels;
++ CHECK(format->findInt32(kKeySampleRate, &sampleRate));
++ CHECK(format->findInt32(kKeyChannelCount, &numChannels));
++
++ setRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
++ }
++}
++
++status_t OMXCodec::setAACFormat(
++ int32_t numChannels, int32_t sampleRate, int32_t bitRate, int32_t aacProfile, bool isADTS) {
++ if (numChannels > 2) {
++ ALOGW("Number of channels: (%d) \n", numChannels);
++ }
++
++ if (mIsEncoder) {
++ if (isADTS) {
++ return -EINVAL;
++ }
++
++ //////////////// input port ////////////////////
++ setRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
++
++ //////////////// output port ////////////////////
++ // format
++ OMX_AUDIO_PARAM_PORTFORMATTYPE format;
++ InitOMXParams(&format);
++ format.nPortIndex = kPortIndexOutput;
++ format.nIndex = 0;
++ status_t err = OMX_ErrorNone;
++ while (OMX_ErrorNone == err) {
++ CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioPortFormat,
++ &format, sizeof(format)), (status_t)OK);
++ if (format.eEncoding == OMX_AUDIO_CodingAAC) {
++ break;
++ }
++ format.nIndex;
++ }
++ CHECK_EQ((status_t)OK, err);
++ CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamAudioPortFormat,
++ &format, sizeof(format)), (status_t)OK);
++
++ // port definition
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = kPortIndexOutput;
++ CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamPortDefinition,
++ &def, sizeof(def)), (status_t)OK);
++ def.format.audio.bFlagErrorConcealment = OMX_TRUE;
++ def.format.audio.eEncoding = OMX_AUDIO_CodingAAC;
++ CHECK_EQ(mOMX->setParameter(mNode, OMX_IndexParamPortDefinition,
++ &def, sizeof(def)), (status_t)OK);
++
++ // profile
++ OMX_AUDIO_PARAM_AACPROFILETYPE profile;
++ InitOMXParams(&profile);
++ profile.nPortIndex = kPortIndexOutput;
++ CHECK_EQ(mOMX->getParameter(mNode, OMX_IndexParamAudioAac,
++ &profile, sizeof(profile)), (status_t)OK);
++ profile.nChannels = numChannels;
++ profile.eChannelMode = (numChannels == 1?
++ OMX_AUDIO_ChannelModeMono: OMX_AUDIO_ChannelModeStereo);
++ profile.nSampleRate = sampleRate;
++ profile.nBitRate = bitRate;
++ profile.nAudioBandWidth = 0;
++ profile.nFrameLength = 0;
++ profile.nAACtools = OMX_AUDIO_AACToolAll;
++ profile.nAACERtools = OMX_AUDIO_AACERNone;
++ profile.eAACProfile = (OMX_AUDIO_AACPROFILETYPE) aacProfile;
++ profile.eAACStreamFormat = OMX_AUDIO_AACStreamFormatMP4FF;
++ err = mOMX->setParameter(mNode, OMX_IndexParamAudioAac,
++ &profile, sizeof(profile));
++
++ if (err != OK) {
++ CODEC_LOGE("setParameter('OMX_IndexParamAudioAac') failed "
++ "(err = %d)",
++ err);
++ return err;
++ }
++ } else {
++ OMX_AUDIO_PARAM_AACPROFILETYPE profile;
++ InitOMXParams(&profile);
++ profile.nPortIndex = kPortIndexInput;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
++ CHECK_EQ(err, (status_t)OK);
++
++ profile.nChannels = numChannels;
++ profile.nSampleRate = sampleRate;
++
++ profile.eAACStreamFormat =
++ isADTS
++ ? OMX_AUDIO_AACStreamFormatMP4ADTS
++ : OMX_AUDIO_AACStreamFormatMP4FF;
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamAudioAac, &profile, sizeof(profile));
++
++ if (err != OK) {
++ CODEC_LOGE("setParameter('OMX_IndexParamAudioAac') failed "
++ "(err = %d)",
++ err);
++ return err;
++ }
++ }
++
++ return OK;
++}
++
++status_t OMXCodec::setAC3Format(int32_t numChannels, int32_t sampleRate) {
++ OMX_AUDIO_PARAM_ANDROID_AC3TYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = kPortIndexInput;
++
++ status_t err = mOMX->getParameter(
++ mNode,
++ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
++ &def,
++ sizeof(def));
++
++ if (err != OK) {
++ return err;
++ }
++
++ def.nChannels = numChannels;
++ def.nSampleRate = sampleRate;
++
++ return mOMX->setParameter(
++ mNode,
++ (OMX_INDEXTYPE)OMX_IndexParamAudioAndroidAc3,
++ &def,
++ sizeof(def));
++}
++
++void OMXCodec::setG711Format(int32_t sampleRate, int32_t numChannels) {
++ CHECK(!mIsEncoder);
++ setRawAudioFormat(kPortIndexInput, sampleRate, numChannels);
++}
++
++void OMXCodec::setImageOutputFormat(
++ OMX_COLOR_FORMATTYPE format, OMX_U32 width, OMX_U32 height) {
++ CODEC_LOGV("setImageOutputFormat(%u, %u)", width, height);
++
++#if 0
++ OMX_INDEXTYPE index;
++ status_t err = mOMX->get_extension_index(
++ mNode, "OMX.TI.JPEG.decode.Config.OutputColorFormat", &index);
++ CHECK_EQ(err, (status_t)OK);
++
++ err = mOMX->set_config(mNode, index, &format, sizeof(format));
++ CHECK_EQ(err, (status_t)OK);
++#endif
++
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = kPortIndexOutput;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage);
++
++ OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
++
++ CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingUnused);
++ imageDef->eColorFormat = format;
++ imageDef->nFrameWidth = width;
++ imageDef->nFrameHeight = height;
++
++ switch (format) {
++ case OMX_COLOR_FormatYUV420PackedPlanar:
++ case OMX_COLOR_FormatYUV411Planar:
++ {
++ def.nBufferSize = (width * height * 3) / 2;
++ break;
++ }
++
++ case OMX_COLOR_FormatCbYCrY:
++ {
++ def.nBufferSize = width * height * 2;
++ break;
++ }
++
++ case OMX_COLOR_Format32bitARGB8888:
++ {
++ def.nBufferSize = width * height * 4;
++ break;
++ }
++
++ case OMX_COLOR_Format16bitARGB4444:
++ case OMX_COLOR_Format16bitARGB1555:
++ case OMX_COLOR_Format16bitRGB565:
++ case OMX_COLOR_Format16bitBGR565:
++ {
++ def.nBufferSize = width * height * 2;
++ break;
++ }
++
++ default:
++ CHECK(!"Should not be here. Unknown color format.");
++ break;
++ }
++
++ def.nBufferCountActual = def.nBufferCountMin;
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++}
++
++void OMXCodec::setJPEGInputFormat(
++ OMX_U32 width, OMX_U32 height, OMX_U32 compressedSize) {
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = kPortIndexInput;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ CHECK_EQ((int)def.eDomain, (int)OMX_PortDomainImage);
++ OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
++
++ CHECK_EQ((int)imageDef->eCompressionFormat, (int)OMX_IMAGE_CodingJPEG);
++ imageDef->nFrameWidth = width;
++ imageDef->nFrameHeight = height;
++
++ def.nBufferSize = compressedSize;
++ def.nBufferCountActual = def.nBufferCountMin;
++
++ err = mOMX->setParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++}
++
++void OMXCodec::addCodecSpecificData(const void *data, size_t size) {
++ CodecSpecificData *specific =
++ (CodecSpecificData *)malloc(sizeof(CodecSpecificData) size - 1);
++
++ specific->mSize = size;
++ memcpy(specific->mData, data, size);
++
++ mCodecSpecificData.push(specific);
++}
++
++void OMXCodec::clearCodecSpecificData() {
++ for (size_t i = 0; i < mCodecSpecificData.size(); i) {
++ free(mCodecSpecificData.editItemAt(i));
++ }
++ mCodecSpecificData.clear();
++ mCodecSpecificDataIndex = 0;
++}
++
++status_t OMXCodec::start(MetaData *meta) {
++ Mutex::Autolock autoLock(mLock);
++
++ if (mPaused) {
++ status_t err = resumeLocked(true);
++ return err;
++ }
++
++ if (mState != LOADED) {
++ CODEC_LOGE("called start in the unexpected state: %d", mState);
++ return UNKNOWN_ERROR;
++ }
++
++ sp<MetaData> params = new MetaData;
++ if (mQuirks & kWantsNALFragments) {
++ params->setInt32(kKeyWantsNALFragments, true);
++ }
++ if (meta) {
++ int64_t startTimeUs = 0;
++ int64_t timeUs;
++ if (meta->findInt64(kKeyTime, &timeUs)) {
++ startTimeUs = timeUs;
++ }
++ params->setInt64(kKeyTime, startTimeUs);
++ }
++
++ mCodecSpecificDataIndex = 0;
++ mInitialBufferSubmit = true;
++ mSignalledEOS = false;
++ mNoMoreOutputData = false;
++ mOutputPortSettingsHaveChanged = false;
++ mSeekTimeUs = -1;
++ mSeekMode = ReadOptions::SEEK_CLOSEST_SYNC;
++ mTargetTimeUs = -1;
++ mFilledBuffers.clear();
++ mPaused = false;
++
++ status_t err;
++ if (mIsEncoder) {
++ // Calling init() before starting its source so that we can configure,
++ // if supported, the source to use exactly the same number of input
++ // buffers as requested by the encoder.
++ if ((err = init()) != OK) {
++ CODEC_LOGE("init failed: %d", err);
++ return err;
++ }
++
++ params->setInt32(kKeyNumBuffers, mPortBuffers[kPortIndexInput].size());
++ err = mSource->start(params.get());
++ if (err != OK) {
++ CODEC_LOGE("source failed to start: %d", err);
++ stopOmxComponent_l();
++ }
++ return err;
++ }
++
++ // Decoder case
++ if ((err = mSource->start(params.get())) != OK) {
++ CODEC_LOGE("source failed to start: %d", err);
++ return err;
++ }
++ return init();
++}
++
++status_t OMXCodec::stop() {
++ CODEC_LOGV("stop mState=%d", mState);
++ Mutex::Autolock autoLock(mLock);
++ status_t err = stopOmxComponent_l();
++ mSource->stop();
++
++ CODEC_LOGV("stopped in state %d", mState);
++ return err;
++}
++
++status_t OMXCodec::stopOmxComponent_l() {
++ CODEC_LOGV("stopOmxComponent_l mState=%d", mState);
++
++ while (isIntermediateState(mState)) {
++ mAsyncCompletion.wait(mLock);
++ }
++
++ bool isError = false;
++ switch (mState) {
++ case LOADED:
++ break;
++
++ case ERROR:
++ {
++ if (mPortStatus[kPortIndexOutput] == ENABLING) {
++ // Codec is in a wedged state (technical term)
++ // We've seen an output port settings change from the codec,
++ // We've disabled the output port, then freed the output
++ // buffers, initiated re-enabling the output port but
++ // failed to reallocate the output buffers.
++ // There doesn't seem to be a way to orderly transition
++ // from executing->idle and idle->loaded now that the
++ // output port hasn't been reenabled yet...
++ // Simply free as many resources as we can and pretend
++ // that we're in LOADED state so that the destructor
++ // will free the component instance without asserting.
++ freeBuffersOnPort(kPortIndexInput, true /* onlyThoseWeOwn */);
++ freeBuffersOnPort(kPortIndexOutput, true /* onlyThoseWeOwn */);
++ setState(LOADED);
++ break;
++ } else {
++ OMX_STATETYPE state = OMX_StateInvalid;
++ status_t err = mOMX->getState(mNode, &state);
++ CHECK_EQ(err, (status_t)OK);
++
++ if (state != OMX_StateExecuting) {
++ break;
++ }
++ // else fall through to the idling code
++ }
++
++ isError = true;
++ }
++
++ case PAUSED:
++ case EXECUTING:
++ {
++ setState(EXECUTING_TO_IDLE);
++
++ if (mQuirks & kRequiresFlushBeforeShutdown) {
++ CODEC_LOGV("This component requires a flush before transitioning "
++ "from EXECUTING to IDLE...");
++
++ bool emulateInputFlushCompletion =
++ !flushPortAsync(kPortIndexInput);
++
++ bool emulateOutputFlushCompletion =
++ !flushPortAsync(kPortIndexOutput);
++
++ if (emulateInputFlushCompletion) {
++ onCmdComplete(OMX_CommandFlush, kPortIndexInput);
++ }
++
++ if (emulateOutputFlushCompletion) {
++ onCmdComplete(OMX_CommandFlush, kPortIndexOutput);
++ }
++ } else {
++ mPortStatus[kPortIndexInput] = SHUTTING_DOWN;
++ mPortStatus[kPortIndexOutput] = SHUTTING_DOWN;
++
++ status_t err =
++ mOMX->sendCommand(mNode, OMX_CommandStateSet, OMX_StateIdle);
++ CHECK_EQ(err, (status_t)OK);
++ }
++
++ while (mState != LOADED && mState != ERROR) {
++ mAsyncCompletion.wait(mLock);
++ }
++
++ if (isError) {
++ // We were in the ERROR state coming in, so restore that now
++ // that we've idled the OMX component.
++ setState(ERROR);
++ }
++
++ break;
++ }
++
++ default:
++ {
++ CHECK(!"should not be here.");
++ break;
++ }
++ }
++
++ if (mLeftOverBuffer) {
++ mLeftOverBuffer->release();
++ mLeftOverBuffer = NULL;
++ }
++
++ return OK;
++}
++
++sp<MetaData> OMXCodec::getFormat() {
++ Mutex::Autolock autoLock(mLock);
++
++ return mOutputFormat;
++}
++
++status_t OMXCodec::read(
++ MediaBuffer **buffer, const ReadOptions *options) {
++ status_t err = OK;
++ *buffer = NULL;
++
++ Mutex::Autolock autoLock(mLock);
++
++ if (mPaused) {
++ err = resumeLocked(false);
++ if(err != OK) {
++ CODEC_LOGE("Failed to restart codec err= %d", err);
++ return err;
++ }
++ }
++
++ if (mState != EXECUTING && mState != RECONFIGURING) {
++ return UNKNOWN_ERROR;
++ }
++
++ bool seeking = false;
++ int64_t seekTimeUs;
++ ReadOptions::SeekMode seekMode;
++ if (options && options->getSeekTo(&seekTimeUs, &seekMode)) {
++ seeking = true;
++ }
++
++ if (mInitialBufferSubmit) {
++ mInitialBufferSubmit = false;
++
++ if (seeking) {
++ CHECK(seekTimeUs >= 0);
++ mSeekTimeUs = seekTimeUs;
++ mSeekMode = seekMode;
++
++ // There's no reason to trigger the code below, there's
++ // nothing to flush yet.
++ seeking = false;
++ mPaused = false;
++ }
++
++ drainInputBuffers();
++
++ if (mState == EXECUTING) {
++ // Otherwise mState == RECONFIGURING and this code will trigger
++ // after the output port is reenabled.
++ fillOutputBuffers();
++ }
++ }
++
++ if (seeking) {
++ while (mState == RECONFIGURING) {
++ if ((err = waitForBufferFilled_l()) != OK) {
++ return err;
++ }
++ }
++
++ if (mState != EXECUTING) {
++ return UNKNOWN_ERROR;
++ }
++
++ CODEC_LOGV("seeking to %" PRId64 " us (%.2f secs)", seekTimeUs, seekTimeUs / 1E6);
++
++ mSignalledEOS = false;
++
++ CHECK(seekTimeUs >= 0);
++ mSeekTimeUs = seekTimeUs;
++ mSeekMode = seekMode;
++
++ mFilledBuffers.clear();
++
++ CHECK_EQ((int)mState, (int)EXECUTING);
++ //DSP supports flushing of ports simultaneously. Flushing individual port is not supported.
++ setState(FLUSHING);
++
++ bool emulateInputFlushCompletion = !flushPortAsync(kPortIndexInput);
++ bool emulateOutputFlushCompletion = !flushPortAsync(kPortIndexOutput);
++
++ if (emulateInputFlushCompletion) {
++ onCmdComplete(OMX_CommandFlush, kPortIndexInput);
++ }
++
++ if (emulateOutputFlushCompletion) {
++ onCmdComplete(OMX_CommandFlush, kPortIndexOutput);
++ }
++
++ while (mSeekTimeUs >= 0) {
++ if ((err = waitForBufferFilled_l()) != OK) {
++ return err;
++ }
++ }
++ }
++
++ while (mState != ERROR && !mNoMoreOutputData && mFilledBuffers.empty()) {
++ if ((err = waitForBufferFilled_l()) != OK) {
++ return err;
++ }
++ }
++
++ if (mState == ERROR) {
++ return UNKNOWN_ERROR;
++ }
++
++ if (seeking) {
++ CHECK_EQ((int)mState, (int)FLUSHING);
++ setState(EXECUTING);
++ }
++
++ if (mFilledBuffers.empty()) {
++ return mSignalledEOS ? mFinalStatus : ERROR_END_OF_STREAM;
++ }
++
++ if (mOutputPortSettingsHaveChanged) {
++ mOutputPortSettingsHaveChanged = false;
++
++ return INFO_FORMAT_CHANGED;
++ }
++
++ size_t index = *mFilledBuffers.begin();
++ mFilledBuffers.erase(mFilledBuffers.begin());
++
++ BufferInfo *info = &mPortBuffers[kPortIndexOutput].editItemAt(index);
++ CHECK_EQ((int)info->mStatus, (int)OWNED_BY_US);
++ info->mStatus = OWNED_BY_CLIENT;
++
++ info->mMediaBuffer->add_ref();
++ if (mSkipCutBuffer != NULL) {
++ mSkipCutBuffer->submit(info->mMediaBuffer);
++ }
++ *buffer = info->mMediaBuffer;
++
++ return OK;
++}
++
++void OMXCodec::signalBufferReturned(MediaBuffer *buffer) {
++ Mutex::Autolock autoLock(mLock);
++
++ Vector<BufferInfo> *buffers = &mPortBuffers[kPortIndexOutput];
++ for (size_t i = 0; i < buffers->size(); i) {
++ BufferInfo *info = &buffers->editItemAt(i);
++
++ if (info->mMediaBuffer == buffer) {
++ CHECK_EQ((int)mPortStatus[kPortIndexOutput], (int)ENABLED);
++ CHECK_EQ((int)info->mStatus, (int)OWNED_BY_CLIENT);
++
++ info->mStatus = OWNED_BY_US;
++
++ if (buffer->graphicBuffer() == 0) {
++ fillOutputBuffer(info);
++ } else {
++ sp<MetaData> metaData = info->mMediaBuffer->meta_data();
++ int32_t rendered = 0;
++ if (!metaData->findInt32(kKeyRendered, &rendered)) {
++ rendered = 0;
++ }
++ if (!rendered) {
++ status_t err = cancelBufferToNativeWindow(info);
++ if (err < 0) {
++ return;
++ }
++ }
++
++ info->mStatus = OWNED_BY_NATIVE_WINDOW;
++
++ // Dequeue the next buffer from the native window.
++ BufferInfo *nextBufInfo = dequeueBufferFromNativeWindow();
++ if (nextBufInfo == 0) {
++ return;
++ }
++
++ // Give the buffer to the OMX node to fill.
++ fillOutputBuffer(nextBufInfo);
++ }
++ return;
++ }
++ }
++
++ CHECK(!"should not be here.");
++}
++
++void OMXCodec::dumpPortStatus(OMX_U32 portIndex) {
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = portIndex;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ printf("%s Port = {\n", portIndex == kPortIndexInput ? "Input" : "Output");
++
++ CHECK((portIndex == kPortIndexInput && def.eDir == OMX_DirInput)
++ || (portIndex == kPortIndexOutput && def.eDir == OMX_DirOutput));
++
++ printf(" nBufferCountActual = %" PRIu32 "\n", def.nBufferCountActual);
++ printf(" nBufferCountMin = %" PRIu32 "\n", def.nBufferCountMin);
++ printf(" nBufferSize = %" PRIu32 "\n", def.nBufferSize);
++
++ switch (def.eDomain) {
++ case OMX_PortDomainImage:
++ {
++ const OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
++
++ printf("\n");
++ printf(" // Image\n");
++ printf(" nFrameWidth = %" PRIu32 "\n", imageDef->nFrameWidth);
++ printf(" nFrameHeight = %" PRIu32 "\n", imageDef->nFrameHeight);
++ printf(" nStride = %" PRIu32 "\n", imageDef->nStride);
++
++ printf(" eCompressionFormat = %s\n",
++ asString(imageDef->eCompressionFormat));
++
++ printf(" eColorFormat = %s\n",
++ asString(imageDef->eColorFormat));
++
++ break;
++ }
++
++ case OMX_PortDomainVideo:
++ {
++ OMX_VIDEO_PORTDEFINITIONTYPE *videoDef = &def.format.video;
++
++ printf("\n");
++ printf(" // Video\n");
++ printf(" nFrameWidth = %" PRIu32 "\n", videoDef->nFrameWidth);
++ printf(" nFrameHeight = %" PRIu32 "\n", videoDef->nFrameHeight);
++ printf(" nStride = %" PRIu32 "\n", videoDef->nStride);
++
++ printf(" eCompressionFormat = %s\n",
++ asString(videoDef->eCompressionFormat));
++
++ printf(" eColorFormat = %s\n",
++ asString(videoDef->eColorFormat));
++
++ break;
++ }
++
++ case OMX_PortDomainAudio:
++ {
++ OMX_AUDIO_PORTDEFINITIONTYPE *audioDef = &def.format.audio;
++
++ printf("\n");
++ printf(" // Audio\n");
++ printf(" eEncoding = %s\n",
++ asString(audioDef->eEncoding));
++
++ if (audioDef->eEncoding == OMX_AUDIO_CodingPCM) {
++ OMX_AUDIO_PARAM_PCMMODETYPE params;
++ InitOMXParams(¶ms);
++ params.nPortIndex = portIndex;
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
++ CHECK_EQ(err, (status_t)OK);
++
++ printf(" nSamplingRate = %" PRIu32 "\n", params.nSamplingRate);
++ printf(" nChannels = %" PRIu32 "\n", params.nChannels);
++ printf(" bInterleaved = %d\n", params.bInterleaved);
++ printf(" nBitPerSample = %" PRIu32 "\n", params.nBitPerSample);
++
++ printf(" eNumData = %s\n",
++ params.eNumData == OMX_NumericalDataSigned
++ ? "signed" : "unsigned");
++
++ printf(" ePCMMode = %s\n", asString(params.ePCMMode));
++ } else if (audioDef->eEncoding == OMX_AUDIO_CodingAMR) {
++ OMX_AUDIO_PARAM_AMRTYPE amr;
++ InitOMXParams(&amr);
++ amr.nPortIndex = portIndex;
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
++ CHECK_EQ(err, (status_t)OK);
++
++ printf(" nChannels = %" PRIu32 "\n", amr.nChannels);
++ printf(" eAMRBandMode = %s\n",
++ asString(amr.eAMRBandMode));
++ printf(" eAMRFrameFormat = %s\n",
++ asString(amr.eAMRFrameFormat));
++ }
++
++ break;
++ }
++
++ default:
++ {
++ printf(" // Unknown\n");
++ break;
++ }
++ }
++
++ printf("}\n");
++}
++
++status_t OMXCodec::initNativeWindow() {
++ // Enable use of a GraphicBuffer as the output for this node. This must
++ // happen before getting the IndexParamPortDefinition parameter because it
++ // will affect the pixel format that the node reports.
++ status_t err = mOMX->enableGraphicBuffers(mNode, kPortIndexOutput, OMX_TRUE);
++ if (err != 0) {
++ return err;
++ }
++
++ return OK;
++}
++
++void OMXCodec::initNativeWindowCrop() {
++ int32_t left, top, right, bottom;
++
++ CHECK(mOutputFormat->findRect(
++ kKeyCropRect,
++ &left, &top, &right, &bottom));
++
++ android_native_rect_t crop;
++ crop.left = left;
++ crop.top = top;
++ crop.right = right 1;
++ crop.bottom = bottom 1;
++
++ // We'll ignore any errors here, if the surface is
++ // already invalid, we'll know soon enough.
++ native_window_set_crop(mNativeWindow.get(), &crop);
++}
++
++void OMXCodec::initOutputFormat(const sp<MetaData> &inputFormat) {
++ mOutputFormat = new MetaData;
++ mOutputFormat->setCString(kKeyDecoderComponent, mComponentName);
++ if (mIsEncoder) {
++ int32_t timeScale;
++ if (inputFormat->findInt32(kKeyTimeScale, &timeScale)) {
++ mOutputFormat->setInt32(kKeyTimeScale, timeScale);
++ }
++ }
++
++ OMX_PARAM_PORTDEFINITIONTYPE def;
++ InitOMXParams(&def);
++ def.nPortIndex = kPortIndexOutput;
++
++ status_t err = mOMX->getParameter(
++ mNode, OMX_IndexParamPortDefinition, &def, sizeof(def));
++ CHECK_EQ(err, (status_t)OK);
++
++ switch (def.eDomain) {
++ case OMX_PortDomainImage:
++ {
++ OMX_IMAGE_PORTDEFINITIONTYPE *imageDef = &def.format.image;
++ CHECK_EQ((int)imageDef->eCompressionFormat,
++ (int)OMX_IMAGE_CodingUnused);
++
++ mOutputFormat->setCString(kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
++ mOutputFormat->setInt32(kKeyColorFormat, imageDef->eColorFormat);
++ mOutputFormat->setInt32(kKeyWidth, imageDef->nFrameWidth);
++ mOutputFormat->setInt32(kKeyHeight, imageDef->nFrameHeight);
++ break;
++ }
++
++ case OMX_PortDomainAudio:
++ {
++ OMX_AUDIO_PORTDEFINITIONTYPE *audio_def = &def.format.audio;
++
++ if (audio_def->eEncoding == OMX_AUDIO_CodingPCM) {
++ OMX_AUDIO_PARAM_PCMMODETYPE params;
++ InitOMXParams(¶ms);
++ params.nPortIndex = kPortIndexOutput;
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamAudioPcm, ¶ms, sizeof(params));
++ CHECK_EQ(err, (status_t)OK);
++
++ CHECK_EQ((int)params.eNumData, (int)OMX_NumericalDataSigned);
++ CHECK_EQ(params.nBitPerSample, 16u);
++ CHECK_EQ((int)params.ePCMMode, (int)OMX_AUDIO_PCMModeLinear);
++
++ int32_t numChannels, sampleRate;
++ inputFormat->findInt32(kKeyChannelCount, &numChannels);
++ inputFormat->findInt32(kKeySampleRate, &sampleRate);
++
++ if ((OMX_U32)numChannels != params.nChannels) {
++ ALOGV("Codec outputs a different number of channels than "
++ "the input stream contains (contains %d channels, "
++ "codec outputs %u channels).",
++ numChannels, params.nChannels);
++ }
++
++ if (sampleRate != (int32_t)params.nSamplingRate) {
++ ALOGV("Codec outputs at different sampling rate than "
++ "what the input stream contains (contains data at "
++ "%d Hz, codec outputs %u Hz)",
++ sampleRate, params.nSamplingRate);
++ }
++
++ mOutputFormat->setCString(
++ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_RAW);
++
++ // Use the codec-advertised number of channels, as some
++ // codecs appear to output stereo even if the input data is
++ // mono. If we know the codec lies about this information,
++ // use the actual number of channels instead.
++ mOutputFormat->setInt32(
++ kKeyChannelCount,
++ (mQuirks & kDecoderLiesAboutNumberOfChannels)
++ ? numChannels : params.nChannels);
++
++ mOutputFormat->setInt32(kKeySampleRate, params.nSamplingRate);
++ } else if (audio_def->eEncoding == OMX_AUDIO_CodingAMR) {
++ OMX_AUDIO_PARAM_AMRTYPE amr;
++ InitOMXParams(&amr);
++ amr.nPortIndex = kPortIndexOutput;
++
++ err = mOMX->getParameter(
++ mNode, OMX_IndexParamAudioAmr, &amr, sizeof(amr));
++ CHECK_EQ(err, (status_t)OK);
++
++ CHECK_EQ(amr.nChannels, 1u);
++ mOutputFormat->setInt32(kKeyChannelCount, 1);
++
++ if (amr.eAMRBandMode >= OMX_AUDIO_AMRBandModeNB0
++ && amr.eAMRBandMode <= OMX_AUDIO_AMRBandModeNB7) {
++ mOutputFormat->setCString(
++ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_NB);
++ mOutputFormat->setInt32(kKeySampleRate, 8000);
++ } else if (amr.eAMRBandMode >= OMX_AUDIO_AMRBandModeWB0
++ && amr.eAMRBandMode <= OMX_AUDIO_AMRBandModeWB8) {
++ mOutputFormat->setCString(
++ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AMR_WB);
++ mOutputFormat->setInt32(kKeySampleRate, 16000);
++ } else {
++ CHECK(!"Unknown AMR band mode.");
++ }
++ } else if (audio_def->eEncoding == OMX_AUDIO_CodingAAC) {
++ mOutputFormat->setCString(
++ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AAC);
++ int32_t numChannels, sampleRate, bitRate;
++ inputFormat->findInt32(kKeyChannelCount, &numChannels);
++ inputFormat->findInt32(kKeySampleRate, &sampleRate);
++ inputFormat->findInt32(kKeyBitRate, &bitRate);
++ mOutputFormat->setInt32(kKeyChannelCount, numChannels);
++ mOutputFormat->setInt32(kKeySampleRate, sampleRate);
++ mOutputFormat->setInt32(kKeyBitRate, bitRate);
++ } else if (audio_def->eEncoding ==
++ (OMX_AUDIO_CODINGTYPE)OMX_AUDIO_CodingAndroidAC3) {
++ mOutputFormat->setCString(
++ kKeyMIMEType, MEDIA_MIMETYPE_AUDIO_AC3);
++ int32_t numChannels, sampleRate, bitRate;
++ inputFormat->findInt32(kKeyChannelCount, &numChannels);
++ inputFormat->findInt32(kKeySampleRate, &sampleRate);
++ inputFormat->findInt32(kKeyBitRate, &bitRate);
++ mOutputFormat->setInt32(kKeyChannelCount, numChannels);
++ mOutputFormat->setInt32(kKeySampleRate, sampleRate);
++ mOutputFormat->setInt32(kKeyBitRate, bitRate);
++ } else {
++ CHECK(!"Should not be here. Unknown audio encoding.");
++ }
++ break;
++ }
++
++ case OMX_PortDomainVideo:
++ {
++ OMX_VIDEO_PORTDEFINITIONTYPE *video_def = &def.format.video;
++
++ if (video_def->eCompressionFormat == OMX_VIDEO_CodingUnused) {
++ mOutputFormat->setCString(
++ kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_RAW);
++ } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingMPEG4) {
++ mOutputFormat->setCString(
++ kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_MPEG4);
++ } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingH263) {
++ mOutputFormat->setCString(
++ kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_H263);
++ } else if (video_def->eCompressionFormat == OMX_VIDEO_CodingAVC) {
++ mOutputFormat->setCString(
++ kKeyMIMEType, MEDIA_MIMETYPE_VIDEO_AVC);
++ } else {
++ CHECK(!"Unknown compression format.");
++ }
++
++ mOutputFormat->setInt32(kKeyWidth, video_def->nFrameWidth);
++ mOutputFormat->setInt32(kKeyHeight, video_def->nFrameHeight);
++ mOutputFormat->setInt32(kKeyColorFormat, video_def->eColorFormat);
++
++ if (!mIsEncoder) {
++ OMX_CONFIG_RECTTYPE rect;
++ InitOMXParams(&rect);
++ rect.nPortIndex = kPortIndexOutput;
++ status_t err =
++ mOMX->getConfig(
++ mNode, OMX_IndexConfigCommonOutputCrop,
++ &rect, sizeof(rect));
++
++ CODEC_LOGI("video dimensions are %u x %u",
++ video_def->nFrameWidth, video_def->nFrameHeight);
++
++ if (err == OK) {
++ CHECK_GE(rect.nLeft, 0);
++ CHECK_GE(rect.nTop, 0);
++ CHECK_GE(rect.nWidth, 0u);
++ CHECK_GE(rect.nHeight, 0u);
++ CHECK_LE(rect.nLeft rect.nWidth - 1, video_def->nFrameWidth);
++ CHECK_LE(rect.nTop rect.nHeight - 1, video_def->nFrameHeight);
++
++ mOutputFormat->setRect(
++ kKeyCropRect,
++ rect.nLeft,
++ rect.nTop,
++ rect.nLeft rect.nWidth - 1,
++ rect.nTop rect.nHeight - 1);
++
++ CODEC_LOGI("Crop rect is %u x %u @ (%d, %d)",
++ rect.nWidth, rect.nHeight, rect.nLeft, rect.nTop);
++ } else {
++ mOutputFormat->setRect(
++ kKeyCropRect,
++ 0, 0,
++ video_def->nFrameWidth - 1,
++ video_def->nFrameHeight - 1);
++ }
++
++ if (mNativeWindow != NULL) {
++ initNativeWindowCrop();
++ }
++ }
++ break;
++ }
++
++ default:
++ {
++ CHECK(!"should not be here, neither audio nor video.");
++ break;
++ }
++ }
++
++ // If the input format contains rotation information, flag the output
++ // format accordingly.
++
++ int32_t rotationDegrees;
++ if (mSource->getFormat()->findInt32(kKeyRotation, &rotationDegrees)) {
++ mOutputFormat->setInt32(kKeyRotation, rotationDegrees);
++ }
++}
++
++status_t OMXCodec::pause() {
++ CODEC_LOGV("pause mState=%d", mState);
++
++ Mutex::Autolock autoLock(mLock);
++
++ if (mState != EXECUTING) {
++ return UNKNOWN_ERROR;
++ }
++
++ while (isIntermediateState(mState)) {
++ mAsyncCompletion.wait(mLock);
++ }
++ if (!strncmp(mComponentName, "OMX.qcom.", 9)) {
++ status_t err = mOMX->sendCommand(mNode,
++ OMX_CommandStateSet, OMX_StatePause);
++ CHECK_EQ(err, (status_t)OK);
++ setState(PAUSING);
++
++ mPaused = true;
++ while (mState != PAUSED && mState != ERROR) {
++ mAsyncCompletion.wait(mLock);
++ }
++ return mState == ERROR ? UNKNOWN_ERROR : OK;
++ } else {
++ mPaused = true;
++ return OK;
++ }
++
++}
++
++status_t OMXCodec::resumeLocked(bool drainInputBuf) {
++ CODEC_LOGV("resume mState=%d", mState);
++
++ if (!strncmp(mComponentName, "OMX.qcom.", 9)) {
++ while (isIntermediateState(mState)) {
++ mAsyncCompletion.wait(mLock);
++ }
++ CHECK_EQ(mState, (status_t)PAUSED);
++ status_t err = mOMX->sendCommand(mNode,
++ OMX_CommandStateSet, OMX_StateExecuting);
++ CHECK_EQ(err, (status_t)OK);
++ setState(IDLE_TO_EXECUTING);
++ mPaused = false;
++ while (mState != EXECUTING && mState != ERROR) {
++ mAsyncCompletion.wait(mLock);
++ }
++ if(drainInputBuf)
++ drainInputBuffers();
++ return mState == ERROR ? UNKNOWN_ERROR : OK;
++ } else { // SW Codec
++ mPaused = false;
++ if(drainInputBuf)
++ drainInputBuffers();
++ return OK;
++ }
++}
++
++////////////////////////////////////////////////////////////////////////////////
++
++status_t QueryCodecs(
++ const sp<IOMX> &omx,
++ const char *mime, bool queryDecoders, bool hwCodecOnly,
++ Vector<CodecCapabilities> *results) {
++ Vector<OMXCodec::CodecNameAndQuirks> matchingCodecs;
++ results->clear();
++
++ OMXCodec::findMatchingCodecs(mime,
++ !queryDecoders /*createEncoder*/,
++ NULL /*matchComponentName*/,
++ hwCodecOnly ? OMXCodec::kHardwareCodecsOnly : 0 /*flags*/,
++ &matchingCodecs);
++
++ for (size_t c = 0; c < matchingCodecs.size(); c) {
++ const char *componentName = matchingCodecs.itemAt(c).mName.string();
++
++ results->push();
++ CodecCapabilities *caps = &results->editItemAt(results->size() - 1);
++
++ status_t err =
++ QueryCodec(omx, componentName, mime, !queryDecoders, caps);
++
++ if (err != OK) {
++ results->removeAt(results->size() - 1);
++ }
++ }
++
++ return OK;
++}
++
++status_t QueryCodec(
++ const sp<IOMX> &omx,
++ const char *componentName, const char *mime,
++ bool isEncoder,
++ CodecCapabilities *caps) {
++ bool isVideo = !strncasecmp(mime, "video/", 6);
++
++ sp<OMXCodecObserver> observer = new OMXCodecObserver;
++ IOMX::node_id node;
++ status_t err = omx->allocateNode(componentName, observer, &node);
++
++ if (err != OK) {
++ return err;
++ }
++
++ OMXCodec::setComponentRole(omx, node, isEncoder, mime);
++
++ caps->mFlags = 0;
++ caps->mComponentName = componentName;
++
++ // NOTE: OMX does not provide a way to query AAC profile support
++ if (isVideo) {
++ OMX_VIDEO_PARAM_PROFILELEVELTYPE param;
++ InitOMXParams(¶m);
++
++ param.nPortIndex = !isEncoder ? 0 : 1;
++
++ for (param.nProfileIndex = 0;; param.nProfileIndex) {
++ err = omx->getParameter(
++ node, OMX_IndexParamVideoProfileLevelQuerySupported,
++ ¶m, sizeof(param));
++
++ if (err != OK) {
++ break;
++ }
++
++ CodecProfileLevel profileLevel;
++ profileLevel.mProfile = param.eProfile;
++ profileLevel.mLevel = param.eLevel;
++
++ caps->mProfileLevels.push(profileLevel);
++ }
++
++ // Color format query
++ // return colors in the order reported by the OMX component
++ // prefix "flexible" standard ones with the flexible equivalent
++ OMX_VIDEO_PARAM_PORTFORMATTYPE portFormat;
++ InitOMXParams(&portFormat);
++ portFormat.nPortIndex = !isEncoder ? 1 : 0;
++ for (portFormat.nIndex = 0;; portFormat.nIndex) {
++ err = omx->getParameter(
++ node, OMX_IndexParamVideoPortFormat,
++ &portFormat, sizeof(portFormat));
++ if (err != OK) {
++ break;
++ }
++
++ OMX_U32 flexibleEquivalent;
++ if (ACodec::isFlexibleColorFormat(
++ omx, node, portFormat.eColorFormat, false /* usingNativeWindow */,
++ &flexibleEquivalent)) {
++ bool marked = false;
++ for (size_t i = 0; i < caps->mColorFormats.size(); i) {
++ if (caps->mColorFormats.itemAt(i) == flexibleEquivalent) {
++ marked = true;
++ break;
++ }
++ }
++ if (!marked) {
++ caps->mColorFormats.push(flexibleEquivalent);
++ }
++ }
++ caps->mColorFormats.push(portFormat.eColorFormat);
++ }
++ }
++
++ if (isVideo && !isEncoder) {
++ if (omx->storeMetaDataInBuffers(
++ node, 1 /* port index */, OMX_TRUE) == OK ||
++ omx->prepareForAdaptivePlayback(
++ node, 1 /* port index */, OMX_TRUE,
++ 1280 /* width */, 720 /* height */) == OK) {
++ caps->mFlags |= CodecCapabilities::kFlagSupportsAdaptivePlayback;
++ }
++ }
++
++ CHECK_EQ(omx->freeNode(node), (status_t)OK);
++
++ return OK;
++}
++
++status_t QueryCodecs(
++ const sp<IOMX> &omx,
++ const char *mimeType, bool queryDecoders,
++ Vector<CodecCapabilities> *results) {
++ return QueryCodecs(omx, mimeType, queryDecoders, false /*hwCodecOnly*/, results);
++}
++
++// These are supposed be equivalent to the logic in
++// "audio_channel_out_mask_from_count".
++status_t getOMXChannelMapping(size_t numChannels, OMX_AUDIO_CHANNELTYPE map[]) {
++ switch (numChannels) {
++ case 1:
++ map[0] = OMX_AUDIO_ChannelCF;
++ break;
++ case 2:
++ map[0] = OMX_AUDIO_ChannelLF;
++ map[1] = OMX_AUDIO_ChannelRF;
++ break;
++ case 3:
++ map[0] = OMX_AUDIO_ChannelLF;
++ map[1] = OMX_AUDIO_ChannelRF;
++ map[2] = OMX_AUDIO_ChannelCF;
++ break;
++ case 4:
++ map[0] = OMX_AUDIO_ChannelLF;
++ map[1] = OMX_AUDIO_ChannelRF;
++ map[2] = OMX_AUDIO_ChannelLR;
++ map[3] = OMX_AUDIO_ChannelRR;
++ break;
++ case 5:
++ map[0] = OMX_AUDIO_ChannelLF;
++ map[1] = OMX_AUDIO_ChannelRF;
++ map[2] = OMX_AUDIO_ChannelCF;
++ map[3] = OMX_AUDIO_ChannelLR;
++ map[4] = OMX_AUDIO_ChannelRR;
++ break;
++ case 6:
++ map[0] = OMX_AUDIO_ChannelLF;
++ map[1] = OMX_AUDIO_ChannelRF;
++ map[2] = OMX_AUDIO_ChannelCF;
++ map[3] = OMX_AUDIO_ChannelLFE;
++ map[4] = OMX_AUDIO_ChannelLR;
++ map[5] = OMX_AUDIO_ChannelRR;
++ break;
++ case 7:
++ map[0] = OMX_AUDIO_ChannelLF;
++ map[1] = OMX_AUDIO_ChannelRF;
++ map[2] = OMX_AUDIO_ChannelCF;
++ map[3] = OMX_AUDIO_ChannelLFE;
++ map[4] = OMX_AUDIO_ChannelLR;
++ map[5] = OMX_AUDIO_ChannelRR;
++ map[6] = OMX_AUDIO_ChannelCS;
++ break;
++ case 8:
++ map[0] = OMX_AUDIO_ChannelLF;
++ map[1] = OMX_AUDIO_ChannelRF;
++ map[2] = OMX_AUDIO_ChannelCF;
++ map[3] = OMX_AUDIO_ChannelLFE;
++ map[4] = OMX_AUDIO_ChannelLR;
++ map[5] = OMX_AUDIO_ChannelRR;
++ map[6] = OMX_AUDIO_ChannelLS;
++ map[7] = OMX_AUDIO_ChannelRS;
++ break;
++ default:
++ return -EINVAL;
++ }
++
++ return OK;
++}
++
++} // namespace android
+diff --git a/media/libstagefright/colorconversion/Android.mk b/media/libstagefright/colorconversion/Android.mk
+index 0bf9701c8..334aa7e5e 100644
+--- a/media/libstagefright/colorconversion/Android.mk
++++ b/media/libstagefright/colorconversion/Android.mk
+@@ -13,6 +13,13 @@ LOCAL_C_INCLUDES := \
+ LOCAL_STATIC_LIBRARIES := \
+ libyuv_static \
+
++ifeq ($(BOARD_HAS_MTK_HARDWARE),true)
++LOCAL_CFLAGS += -DMTK_HARDWARE
++
++LOCAL_C_INCLUDES += \
++ $(TOP)/frameworks/av/include/media/stagefright/dpframework
++endif
++
+ LOCAL_CFLAGS += -Werror
+ LOCAL_CLANG := true
+ LOCAL_SANITIZE := signed-integer-overflow
+diff --git a/media/libstagefright/colorconversion/ColorConverter.cpp b/media/libstagefright/colorconversion/ColorConverter.cpp
+index 3ca7cc05c..b250342e7 100644
+--- a/media/libstagefright/colorconversion/ColorConverter.cpp
++++ b/media/libstagefright/colorconversion/ColorConverter.cpp
+@@ -26,6 +26,14 @@
+
+ #define USE_LIBYUV
+
++#ifdef MTK_HARDWARE
++#include <DpBlitStream.h>
++
++const OMX_COLOR_FORMATTYPE OMX_MTK_COLOR_FormatYV12 = (OMX_COLOR_FORMATTYPE)0x7F000200;
++const OMX_COLOR_FORMATTYPE OMX_COLOR_FormatVendorMTKYUV = (OMX_COLOR_FORMATTYPE)0x7F000001;
++const OMX_COLOR_FORMATTYPE OMX_COLOR_FormatVendorMTKYUV_FCM = (OMX_COLOR_FORMATTYPE)0x7F000002;
++#endif
++
+ namespace android {
+
+ ColorConverter::ColorConverter(
+@@ -51,6 +59,11 @@ bool ColorConverter::isValid() const {
+ case OMX_QCOM_COLOR_FormatYVU420SemiPlanar:
+ case OMX_COLOR_FormatYUV420SemiPlanar:
+ case OMX_TI_COLOR_FormatYUV420PackedSemiPlanar:
++#ifdef MTK_HARDWARE
++ case OMX_MTK_COLOR_FormatYV12:
++ case OMX_COLOR_FormatVendorMTKYUV:
++ case OMX_COLOR_FormatVendorMTKYUV_FCM:
++#endif
+ return true;
+
+ default:
+@@ -557,4 +570,92 @@ uint8_t *ColorConverter::initClip() {
+ return &mClip[-kClipMin];
+ }
+
++#ifdef MTK_HARDWARE
++status_t ColorConverter::convertYUVToRGBHW(const BitmapParams &src, const BitmapParams &dst) {
++ DpBlitStream blitStream;
++ unsigned int srcWStride = src.mWidth;
++ unsigned int srcHStride = src.mHeight;
++
++ DpRect srcRoi;
++ srcRoi.x = src.mCropLeft;
++ srcRoi.y = src.mCropTop;
++ srcRoi.w = src.mCropRight - src.mCropLeft;
++ srcRoi.h = src.mCropBottom - src.mCropTop;
++
++ unsigned int dstWStride = dst.mWidth ;
++ unsigned int dstHStride = dst.mHeight ;
++ char name_yuv[100];
++ char retriever_yuv_propty[100];
++ char name_rgb[100];
++ char retriever_propty_rgb[100];
++
++ if (mSrcFormat == OMX_COLOR_FormatYUV420Planar) {
++ char* planar[3];
++ unsigned int length[3];
++ planar[0] = (char*)src.mBits;
++ length[0] = srcWStride*srcHStride;
++ planar[1] = planar[0] + length[0];
++ length[1] = srcWStride*srcHStride/4;
++ planar[2] = planar[1] + length[1];
++ length[2] = length[1];
++
++ blitStream.setSrcBuffer((void**)planar, (unsigned int*)length, 3);
++ blitStream.setSrcConfig(srcWStride, srcHStride, eYUV_420_3P, eInterlace_None, &srcRoi);
++ }
++ else if (mSrcFormat == OMX_MTK_COLOR_FormatYV12) {
++ char* planar[3];
++ unsigned int length[3];
++ planar[0] = (char*)src.mBits;
++ length[0] = srcWStride*srcHStride;
++ planar[1] = planar[0] + length[0];
++ length[1] = srcWStride*srcHStride/4;
++ planar[2] = planar[1] + length[1];
++ length[2] = length[1];
++
++ blitStream.setSrcBuffer((void**)planar, (unsigned int*)length, 3);
++ blitStream.setSrcConfig(srcWStride, srcHStride, eYV12, eInterlace_None, &srcRoi);
++ }
++ else if (mSrcFormat == OMX_COLOR_FormatVendorMTKYUV) {
++ char* planar[2];
++ unsigned int length[2];
++ planar[0] = (char*)src.mBits;
++ length[0] = srcWStride*srcHStride;
++ planar[1] = planar[0] + length[0];
++ length[1] = srcWStride*srcHStride/2;
++
++ blitStream.setSrcBuffer((void**)planar, (unsigned int*)length, 2);
++ blitStream.setSrcConfig(srcWStride, srcHStride, srcWStride * 32, srcWStride * 16, eNV12_BLK, DP_PROFILE_BT601, eInterlace_None, &srcRoi);
++ }
++ else if (mSrcFormat == OMX_COLOR_FormatVendorMTKYUV_FCM) {
++ char* planar[2];
++ unsigned int length[2];
++ planar[0] = (char*)src.mBits;
++ length[0] = srcWStride*srcHStride;
++ planar[1] = planar[0] + length[0];
++ length[1] = srcWStride*srcHStride/2;
++
++ blitStream.setSrcBuffer((void**)planar, (unsigned int*)length, 2);
++ blitStream.setSrcConfig(srcWStride, srcHStride, srcWStride * 32, srcWStride * 16, eNV12_BLK_FCM, DP_PROFILE_BT601, eInterlace_None, &srcRoi);
++ }
++
++ if (mDstFormat == OMX_COLOR_Format16bitRGB565) {
++ blitStream.setDstBuffer(dst.mBits, dst.mWidth * dst.mHeight * 2);
++ blitStream.setDstConfig(dst.mWidth, dst.mHeight, eRGB565);
++ }
++ else if (mDstFormat == OMX_COLOR_Format32bitARGB8888) {
++ blitStream.setDstBuffer(dst.mBits, dst.mWidth * dst.mHeight * 4);
++ blitStream.setDstConfig(dst.mWidth, dst.mHeight, eRGBA8888);
++ }
++
++ // Add Sharpness in Video Thumbnail
++ blitStream.setTdshp(1);
++ bool bRet = blitStream.invalidate();
++
++ if (!bRet)
++ return OK;
++ else
++ return UNKNOWN_ERROR;
++}
++#endif
++
+ } // namespace android
+--
+2.11.0
+