hardware: samsung_slsi: libcamera2: Stability Enhancement
authorSungjoong Kang <sj3.kang@samsung.com>
Thu, 23 Aug 2012 23:16:44 +0000 (16:16 -0700)
committerRebecca Schultz Zavin <rebecca@android.com>
Tue, 28 Aug 2012 16:41:08 +0000 (09:41 -0700)
Also added supported resolutions

Change-Id: Ie53adbbfd3b115811625ca85df05c370074107e6
Signed-off-by: Sungjoong Kang <sj3.kang@samsung.com>
libcamera2/ExynosCamera2.cpp
libcamera2/ExynosCameraHWInterface2.cpp
libcamera2/ExynosCameraHWInterface2.h
libcamera2/MetadataConverter.cpp

index cbdea7e967a476ebb24f03f72cf1e600789aaf43..aea8970bffebd4512388d37f55d7e9f7965bdcc6 100644 (file)
@@ -118,13 +118,21 @@ const int32_t scalerResolutionS5K4E5[] =
     1920, 1080,
     1440, 1080,
     1280,  720,
+     640,  480,
+     320,  240,
+     176,  144,
 };
 
 const int32_t jpegResolutionS5K4E5[] =
 {
     2560, 1920,
-    2560, 1440,
-    1280,  720,
+    2048, 1536,
+    1600, 1200,
+    1280, 1024,
+    1280,  960,
+    1152,  864,
+     640,  480,
+     320,  240,
 };
 
 ExynosCamera2InfoS5K4E5::ExynosCamera2InfoS5K4E5()
@@ -142,16 +150,28 @@ ExynosCamera2InfoS5K4E5::ExynosCamera2InfoS5K4E5()
     aperture            = 2.7f;
 }
 
+ExynosCamera2InfoS5K4E5::~ExynosCamera2InfoS5K4E5()
+{
+    ALOGV("%s", __FUNCTION__);
+}
 const int32_t scalerResolutionS5K6A3[] =
 {
     1280,  960,
     1280,  720,
+     640,  480,
+     176,  144,
 };
 
 const int32_t jpegResolutionS5K6A3[] =
 {
+    1392, 1392,
+    1392, 1040,
+    1392,  784,
+    1280, 1024,
     1280,  960,
-    1280,  720,
+    1152,  864,
+     640,  480,
+     320,  240,
 };
 
 ExynosCamera2InfoS5K6A3::ExynosCamera2InfoS5K6A3()
@@ -169,6 +189,10 @@ ExynosCamera2InfoS5K6A3::ExynosCamera2InfoS5K6A3()
     aperture            = 2.8f;
 }
 
+ExynosCamera2InfoS5K6A3::~ExynosCamera2InfoS5K6A3()
+{
+    ALOGV("%s", __FUNCTION__);
+}
 ExynosCamera2::ExynosCamera2(int cameraId):
     m_cameraId(cameraId)
 {
@@ -180,7 +204,9 @@ ExynosCamera2::ExynosCamera2(int cameraId):
 
 ExynosCamera2::~ExynosCamera2()
 {
+    ALOGV("%s", __FUNCTION__);
        delete m_curCameraInfo;
+    m_curCameraInfo = NULL;
 }
 
 int32_t ExynosCamera2::getSensorW()
index 07e1de2c9928f428a1586b324ee428e544cdf62d..b0778a2907bb419644299d91056ec4b3884f80d6 100644 (file)
@@ -286,6 +286,12 @@ RequestManager::RequestManager(SignalDrivenThread* main_thread):
 
 RequestManager::~RequestManager()
 {
+    ALOGV("%s", __FUNCTION__);
+    if (m_metadataConverter != NULL) {
+        delete m_metadataConverter;
+        m_metadataConverter = NULL;
+    }
+
     return;
 }
 
@@ -804,6 +810,7 @@ ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_
             m_IsAfModeUpdateRequired(false),
             m_IsAfTriggerRequired(false),
             m_IsAfLockRequired(false),
+            m_wideAspect(false),
             m_afTriggerId(0),
             m_halDevice(dev),
             m_need_streamoff(0),
@@ -844,7 +851,8 @@ ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_
     m_exynosVideoCSC = csc_init(cscMethod);
     if (m_exynosVideoCSC == NULL)
         ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
-    csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, PREVIEW_GSC_NODE_NUM);
+    csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
+
 
     ALOGV("DEBUG(%s): END", __FUNCTION__);
 }
@@ -892,6 +900,7 @@ void ExynosCameraHWInterface2::release()
         csc_deinit(m_exynosVideoCSC);
     m_exynosVideoCSC = NULL;
 
+
     if (m_streamThreads[1] != NULL) {
         while (!m_streamThreads[1]->IsTerminated())
         {
@@ -937,7 +946,17 @@ void ExynosCameraHWInterface2::release()
         m_mainThread = NULL;
     }
 
-    for(i = 0; i < m_camera_info.sensor.buffers; i++)
+    if (m_requestManager != NULL) {
+        delete m_requestManager;
+        m_requestManager = NULL;
+    }
+
+    if (m_BayerManager != NULL) {
+        delete m_BayerManager;
+        m_BayerManager = NULL;
+    }
+//    for(i = 0; i < m_camera_info.sensor.buffers; i++)
+    for (i = 0; i < NUM_BAYER_BUFFERS; i++)
         freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
 
     for(i = 0; i < m_camera_info.capture.buffers; i++)
@@ -1150,7 +1169,11 @@ void ExynosCameraHWInterface2::InitializeISPChain()
     }
 
     ALOGV("== stream_on :: capture");
-    cam_int_streamon(&(m_camera_info.capture));
+    if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
+        ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
+    } else {
+        m_camera_info.capture.status = true;
+    }
 }
 
 void ExynosCameraHWInterface2::StartISP()
@@ -1284,6 +1307,14 @@ int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, in
                 allocCase = 2;
             }
         }
+        if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)) {
+            m_wideAspect = true;
+        }
+        else {
+            m_wideAspect = false;
+        }
+        ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
+
         if (allocCase == 0 || allocCase == 2) {
             *stream_id = 0;
 
@@ -1366,8 +1397,23 @@ int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, in
 
         *stream_id = 1;
 
-        m_streamThreads[1]  = new StreamThread(this, *stream_id);
-        AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
+        if (!(m_streamThreads[*stream_id].get())) {
+            ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
+            m_streamThreads[1]  = new StreamThread(this, *stream_id);
+            allocCase = 0;
+        }
+        else {
+            if ((m_streamThreads[*stream_id].get())->m_activated == true) {
+                ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
+                allocCase = 1;
+            }
+            else {
+                ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
+                allocCase = 2;
+            }
+        }
+
+        AllocatedStream = (StreamThread*)(m_streamThreads[*stream_id].get());
 
         fd = m_camera_info.capture.fd;
         usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
@@ -1426,8 +1472,28 @@ int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
     }
     else if (stream_id == 1) {
         targetStreamParms = &(m_streamThreads[1]->m_parameters);
+        // TODO : make clear stream off case
+        m_need_streamoff = 0;
+
+        if (m_camera_info.capture.status == false) {
+            /* capture */
+            m_camera_info.capture.buffers = 8;
+            cam_int_s_fmt(&(m_camera_info.capture));
+            cam_int_reqbufs(&(m_camera_info.capture));
+            for (i = 0; i < m_camera_info.capture.buffers; i++) {
+                ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
+                cam_int_qbuf(&(m_camera_info.capture), i);
+            }
+
+            if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
+                ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
+            } else {
+                m_camera_info.capture.status = true;
+            }
+        }
     }
     else if (stream_id == 2) {
+        m_need_streamoff = 0;
         targetRecordParms = &(m_streamThreads[0]->m_recordParameters);
 
         targetRecordParms->numSvcBuffers = num_buffers;
@@ -1493,8 +1559,21 @@ int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
 
     if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
         if(m_need_streamoff == 1) {
+            if (m_sensorThread != NULL) {
+                m_sensorThread->release();
+                /* TODO */
+                usleep(500000);
+            } else {
+                ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
+            }
+
             ALOGV("(%s): calling capture streamoff", __FUNCTION__);
-            cam_int_streamoff(&(m_camera_info.capture));
+            if (cam_int_streamoff(&(m_camera_info.capture)) < 0) {
+                ALOGE("ERR(%s): capture stream off fail", __FUNCTION__);
+            } else {
+                m_camera_info.capture.status = false;
+            }
+
             ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
 
             m_camera_info.capture.buffers = 0;
@@ -1502,24 +1581,18 @@ int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
             cam_int_reqbufs(&(m_camera_info.capture));
             ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
 
-            if (m_sensorThread != NULL) {
-                m_sensorThread->release();
-                usleep(500000);
-            } else {
-                ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
-            }
             m_isIspStarted = false;
         }
 
-        cam_int_s_input(currentNode, m_camera_info.sensor_id);
-        cam_int_s_fmt(currentNode);
-        cam_int_reqbufs(currentNode);
-
         if (m_need_streamoff == 1) {
             m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
             m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
             m_camera_info.capture.buffers = 8;
+            /* isp */
+            cam_int_s_fmt(&(m_camera_info.isp));
             cam_int_reqbufs(&(m_camera_info.isp));
+            /* sensor */
+            cam_int_s_fmt(&(m_camera_info.sensor));
             cam_int_reqbufs(&(m_camera_info.sensor));
 
             for (i = 0; i < 8; i++) {
@@ -1531,15 +1604,20 @@ int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
                 cam_int_qbuf(&(m_camera_info.sensor), i);
             }
 
+            /* capture */
+            cam_int_s_fmt(&(m_camera_info.capture));
             cam_int_reqbufs(&(m_camera_info.capture));
-            cam_int_streamon(&(m_camera_info.capture));
-            cam_int_streamon(&(m_camera_info.sensor));
+            for (i = 0; i < m_camera_info.capture.buffers; i++) {
+                ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
+                cam_int_qbuf(&(m_camera_info.capture), i);
+            }
+
+       }
+
+        cam_int_s_input(currentNode, m_camera_info.sensor_id);
+        cam_int_s_fmt(currentNode);
+        cam_int_reqbufs(currentNode);
 
-            m_need_streamoff = 0;
-            m_requestManager->SetInitialSkip(2);
-            m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
-            m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
-        }
     }
     else if (targetStreamParms->streamType == STREAM_TYPE_INDIRECT) {
         for(i = 0; i < currentNode->buffers; i++){
@@ -1615,20 +1693,36 @@ int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
     }
 
     ALOGV("DEBUG(%s): calling  streamon", __FUNCTION__);
-    if (targetStreamParms->streamType == 0) {
+    if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
         ALOGD("%s(%d), stream id = %d", __FUNCTION__, __LINE__, stream_id);
         cam_int_streamon(&(targetStreamParms->node));
     }
 
+    if (m_need_streamoff == 1) {
+        if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
+            ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
+        } else {
+            m_camera_info.capture.status = true;
+        }
+
+        cam_int_streamon(&(m_camera_info.sensor));
+    }
+
     ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
     ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
 
     if(!m_isIspStarted) {
         m_isIspStarted = true;
         StartISP();
-        m_need_streamoff = 1;
     }
 
+    if (m_need_streamoff == 1) {
+        m_requestManager->SetInitialSkip(8);
+        m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
+        m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
+    }
+    m_need_streamoff = 1;
+
     return 0;
 }
 
@@ -1655,12 +1749,11 @@ int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
     }
 
     targetStream->m_releasing = true;
-    targetStream->release();
-    while (targetStream->m_releasing)
-    {
-        ALOGD("stream thread release fail %d", __LINE__);
-        usleep(2000);
-    }
+    do {
+        ALOGD("stream thread release %d", __LINE__);
+        targetStream->release();
+        usleep(33000);
+    } while (targetStream->m_releasing);
     targetStream->m_activated = false;
     ALOGV("DEBUG(%s): DONE", __FUNCTION__);
     return 0;
@@ -1876,6 +1969,11 @@ BayerBufManager::BayerBufManager()
     numOnHalEmpty = NUM_BAYER_BUFFERS;
 }
 
+BayerBufManager::~BayerBufManager()
+{
+    ALOGV("%s", __FUNCTION__);
+}
+
 int     BayerBufManager::GetIndexForSensorEnqueue()
 {
     int ret = 0;
@@ -2255,6 +2353,14 @@ void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
             else {
                 m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, false);
             }
+            if (m_wideAspect) {
+//                shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
+                shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
+                shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
+            }
+            else {
+//                shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
+            }
             if (triggered)
                 shot_ext->shot.ctl.aa.afTrigger = 1;
 
@@ -2338,6 +2444,9 @@ void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
                 m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
             }
 
+            if (shot_ext->request_scc) {
+                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
+            }
 
             ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
                shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
@@ -2369,7 +2478,7 @@ void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
             shot_ext->request_sensor = 0;
         }
 
-        ALOGV("### Sensor Qbuf start(%d) SCP(%d) SCC(%d) DIS(%d)", index, shot_ext->request_scp, shot_ext->request_scc, shot_ext->dis_bypass);
+//        ALOGD("### Sensor Qbuf start(%d) SCP(%d) SCC(%d) DIS(%d)", index, shot_ext->request_scp, shot_ext->request_scc, shot_ext->dis_bypass);
 
         cam_int_qbuf(&(m_camera_info.sensor), index);
         ALOGV("### Sensor QBUF done");
@@ -2445,28 +2554,33 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                 if (selfStreamParms->svcBufStatus[i] ==ON_DRIVER) cnt_to_dq++;
             }
 
-            m_scp_closing = true;
             ALOGV("DEBUG(%s): calling stream(%d) streamoff (fd:%d)", __FUNCTION__,
             selfThread->m_index, selfStreamParms->fd);
-            cam_int_streamoff(&(selfStreamParms->node));
-            ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
-            if (selfStreamParms->streamType == STREAM_TYPE_DIRECT) {
-                ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 (fd:%d)", __FUNCTION__,
-                selfThread->m_index, selfStreamParms->fd);
-                currentNode->buffers = 0;
-                cam_int_reqbufs(currentNode);
-                ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
-                selfThread->m_index, selfStreamParms->fd);
+            if (cam_int_streamoff(&(selfStreamParms->node)) < 0 ){
+                ALOGE("ERR(%s): stream off fail", __FUNCTION__);
+            } else {
+                if (selfStreamParms->streamType == STREAM_TYPE_DIRECT) {
+                    m_scp_closing = true;
+                } else {
+                    m_camera_info.capture.status = false;
+                }
             }
+            ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
+            ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 (fd:%d)", __FUNCTION__,
+                    selfThread->m_index, selfStreamParms->fd);
+            currentNode->buffers = 0;
+            cam_int_reqbufs(currentNode);
+            ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
+                    selfThread->m_index, selfStreamParms->fd);
         }
-            selfThread->m_releasing = false;
-        ALOGD("m_releasing set false");
         if (selfThread->m_index == 1 && m_resizeBuf.size.s != 0) {
             freeCameraMemory(&m_resizeBuf, 1);
         }
         selfThread->m_isBufferInit = false;
         selfThread->m_index = 255;
 
+        selfThread->m_releasing = false;
+
         ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
 
         return;
@@ -2715,7 +2829,6 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                 ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
                     selfThread->m_index, selfStreamParms->streamType, index);
 
-                m_jpegEncodingFrameCnt = m_ispProcessingFrameCnt;
 
                 for (int i = 0; i < selfStreamParms->numSvcBuffers ; i++) {
                     if (selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] == ON_HAL) {
@@ -2803,8 +2916,7 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
 
                 m_resizeBuf = resizeBufInfo;
 
-                res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
-                        m_requestManager->GetTimestamp(m_jpegEncodingFrameCnt), &(selfStreamParms->svcBufHandle[selfStreamParms->svcBufIndex]));
+                res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, systemTime(), &(selfStreamParms->svcBufHandle[selfStreamParms->svcBufIndex]));
 
                 ALOGV("DEBUG(%s): stream(%d) enqueue_buffer index(%d) to svc done res(%d)",
                         __FUNCTION__, selfThread->m_index, selfStreamParms->svcBufIndex, res);
@@ -3841,11 +3953,14 @@ void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNu
 {
 
     int i =0 ;
+    int ret = 0;
 
     for (i=0;i<iMemoryNum;i++) {
         if (buf->fd.extFd[i] != -1) {
             if (buf->virt.extP[i] != (char *)MAP_FAILED) {
-                ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
+                ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
+                if (ret < 0)
+                    ALOGE("ERR(%s)", __FUNCTION__);
             }
             ion_free(buf->fd.extFd[i]);
         }
@@ -3885,6 +4000,16 @@ static int HAL2_camera_device_close(struct hw_device_t* device)
         free(cam_device);
         g_camera_vaild = false;
     }
+    if (g_camera2[0] != NULL) {
+        delete static_cast<ExynosCamera2 *>(g_camera2[0]);
+        g_camera2[0] = NULL;
+    }
+
+    if (g_camera2[1] != NULL) {
+        delete static_cast<ExynosCamera2 *>(g_camera2[1]);
+        g_camera2[1] = NULL;
+    }
+
     ALOGV("%s: EXIT", __FUNCTION__);
     return 0;
 }
index 44e143662734cb9a05c5e4e13403a52229b143e6..250a34495b64981c9e35d956fede9139ce2565ed 100644 (file)
@@ -65,8 +65,8 @@ namespace android {
 #define NUM_BAYER_BUFFERS           (8)
 #define NUM_SENSOR_QBUF             (3)
 
-#define PREVIEW_GSC_NODE_NUM (1)
 #define PICTURE_GSC_NODE_NUM (2)
+#define VIDEO_GSC_NODE_NUM (1)
 
 #define STREAM_TYPE_DIRECT   (0)
 #define STREAM_TYPE_INDIRECT (1)
@@ -103,6 +103,14 @@ enum sensor_name {
     SENSOR_NAME_END
 };
 
+enum is_subscenario_id {
+       ISS_SUB_SCENARIO_STILL,
+       ISS_SUB_SCENARIO_VIDEO,
+       ISS_SUB_SCENARIO_SCENE1,
+       ISS_SUB_SCENARIO_SCENE2,
+       ISS_SUB_SCENARIO_SCENE3,
+       ISS_SUB_END
+};
 
 typedef struct node_info {
     int fd;
@@ -115,6 +123,7 @@ typedef struct node_info {
     enum v4l2_buf_type type;
     ion_client ionClient;
     ExynosBuffer buffer[NUM_MAX_CAMERA_BUFFERS];
+    int status;
 } node_info_t;
 
 
@@ -342,6 +351,7 @@ class MainThread : public SignalDrivenThread {
             return;
         }
         void        release(void);
+        bool        m_releasing;
     };
 
     class SensorThread : public SignalDrivenThread {
@@ -364,6 +374,7 @@ class MainThread : public SignalDrivenThread {
     //private:
         bool            m_isBayerOutputEnabled;
         int             m_sensorFd;
+        bool            m_releasing;
     };
 
     class IspThread : public SignalDrivenThread {
@@ -384,6 +395,7 @@ class MainThread : public SignalDrivenThread {
         void            release(void);
     //private:
         int             m_ispFd;
+        bool            m_releasing;
     };
 
     class StreamThread : public SignalDrivenThread {
@@ -504,7 +516,6 @@ class MainThread : public SignalDrivenThread {
 
     bool                                m_initFlag1;
     bool                                m_initFlag2;
-    int                                 m_ispProcessingFrameCnt;
 
     int                                 indexToQueue[3+1];
     int                                 m_fd_scp;
@@ -520,6 +531,7 @@ class MainThread : public SignalDrivenThread {
     int                                            m_cameraId;
     bool                                m_scp_closing;
     bool                                m_scp_closed;
+    bool                                m_wideAspect;
 
     mutable Mutex    m_qbufLock;
 
index 38c9ea7b3ce61a0829568401d668ab39e24bf1c0..869fae32e2bf61676b2ef9635af341a7b8512369 100644 (file)
@@ -44,7 +44,7 @@ MetadataConverter::MetadataConverter()
 
 MetadataConverter::~MetadataConverter()
 {
-    ALOGV("DEBUG(%s):", __FUNCTION__);
+    ALOGE("DEBUG(%s)destroy!!:", __FUNCTION__);
     return;
 }
 
@@ -151,90 +151,6 @@ status_t MetadataConverter::ToInternalShot(camera_metadata_t * request, struct c
 
 
 
-            case ANDROID_HOT_PIXEL_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.hotpixel.mode = (enum processing_mode)curr_entry.data.u8[0];
-                break;
-
-
-
-            case ANDROID_DEMOSAIC_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.demosaic.mode = (enum processing_mode)curr_entry.data.u8[0];
-                break;
-
-
-
-            case ANDROID_SHADING_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.shading.mode = (enum processing_mode)curr_entry.data.u8[0];
-                break;
-
-
-
-            case ANDROID_GEOMETRIC_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.geometric.mode = (enum processing_mode)curr_entry.data.u8[0];
-                break;
-
-
-
-            case ANDROID_COLOR_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.color.mode = (enum colorcorrection_mode)curr_entry.data.u8[0];
-                break;
-
-            case ANDROID_COLOR_TRANSFORM:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_FLOAT, 9))
-                    break;
-                for (i=0 ; i<curr_entry.count ; i++)
-                    dst->ctl.color.transform[i] = curr_entry.data.f[i];
-                break;
-
-
-
-            case ANDROID_TONEMAP_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.tonemap.mode = (enum tonemap_mode)curr_entry.data.u8[0];
-                break;
-
-            case ANDROID_TONEMAP_CURVE_RED:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_FLOAT, 32))
-                    break;
-                for (i=0 ; i<curr_entry.count ; i++)
-                    dst->ctl.tonemap.curveRed[i] = curr_entry.data.f[i];
-                break;
-
-            case ANDROID_TONEMAP_CURVE_GREEN:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_FLOAT, 32))
-                    break;
-                for (i=0 ; i<curr_entry.count ; i++)
-                    dst->ctl.tonemap.curveGreen[i] = curr_entry.data.f[i];
-                break;
-
-            case ANDROID_TONEMAP_CURVE_BLUE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_FLOAT, 32))
-                    break;
-                for (i=0 ; i<curr_entry.count ; i++)
-                    dst->ctl.tonemap.curveBlue[i] = curr_entry.data.f[i];
-                break;
-
-
-
-
-            case ANDROID_SCALER_CROP_REGION:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_INT32, 3))
-                    break;
-                for (i=0 ; i<curr_entry.count ; i++)
-                    dst->ctl.scaler.cropRegion[i] = curr_entry.data.i32[i];
-                break;
-
 
 
             case ANDROID_JPEG_QUALITY:
@@ -287,23 +203,9 @@ status_t MetadataConverter::ToInternalShot(camera_metadata_t * request, struct c
             case ANDROID_STATS_FACE_DETECT_MODE:
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
                     break;
-                dst->ctl.stats.faceDetectMode = (enum facedetect_mode)curr_entry.data.u8[0];
-                break;
-
-            case ANDROID_STATS_HISTOGRAM_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.stats.histogramMode = (enum stats_mode)curr_entry.data.u8[0];
+                dst->ctl.stats.faceDetectMode = (enum facedetect_mode)(curr_entry.data.u8[0] + 1);
                 break;
 
-            case ANDROID_STATS_SHARPNESS_MAP_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.stats.sharpnessMapMode = (enum stats_mode)curr_entry.data.u8[0];
-                break;
-
-
-
             case ANDROID_CONTROL_CAPTURE_INTENT:
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
                     break;
@@ -337,13 +239,6 @@ status_t MetadataConverter::ToInternalShot(camera_metadata_t * request, struct c
                 dst->ctl.aa.aeExpCompensation = curr_entry.data.i32[0] + 5;
                 break;
 
-
-            case ANDROID_CONTROL_AE_ANTIBANDING_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.aa.aeAntibandingMode = (enum aa_ae_antibanding_mode)curr_entry.data.u8[0];
-                break;
-
             case ANDROID_CONTROL_AWB_MODE:
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
                     break;
@@ -590,6 +485,10 @@ status_t MetadataConverter::ToDynamicMetadata(struct camera2_shot_ext * metadata
                 &intData, 1))
         return NO_MEMORY;
 
+    byteData = ANDROID_STATS_FACE_DETECTION_OFF;
+    if (0 != add_camera_metadata_entry(dst, ANDROID_STATS_FACE_DETECT_MODE,
+                &byteData, 1))
+        return NO_MEMORY;
 
     ALOGV("(%s): AWB(%d) AE(%d) SCENE(%d)  AEComp(%d)", __FUNCTION__,
        metadata->dm.aa.awbMode - 1, metadata->dm.aa.aeMode - 1, metadata->ctl.aa.sceneMode - 1,