hardware: samsung_slsi: libcamera2: 4th release of camera
authorSungjoong Kang <sj3.kang@samsung.com>
Fri, 17 Aug 2012 19:34:33 +0000 (12:34 -0700)
committerRebecca Schultz Zavin <rebecca@android.com>
Wed, 22 Aug 2012 00:22:01 +0000 (17:22 -0700)
Redesigned HAL for increasing stability

Change-Id: If04eb3af7675e04fc9b59775e6e4ca396f1ed64a
Signed-off-by: Sungjoong Kang <sj3.kang@samsung.com>
libcamera2/ExynosCamera2.cpp
libcamera2/ExynosCameraHWInterface2.cpp
libcamera2/ExynosCameraHWInterface2.h
libcamera2/MetadataConverter.cpp
libcamera2/fimc-is-metadata.h

index 66176a444623871f67ddb32a1a6730693f0f889a..9fdf83b8e356bf50be070927d21fc3d97d6bb07a 100644 (file)
@@ -116,12 +116,14 @@ const uint64_t kAvailableJpegMinDurations[1] = {
 const int32_t scalerResolutionS5K4E5[] =
 {
     1920, 1080,
+    1440, 1080,
     1280,  720,
 };
 
 const int32_t jpegResolutionS5K4E5[] =
 {
     2560, 1920,
+    2560, 1440,
     1280,  720,
 };
 
@@ -139,11 +141,13 @@ ExynosCamera2InfoS5K4E5::ExynosCamera2InfoS5K4E5()
 
 const int32_t scalerResolutionS5K6A3[] =
 {
+    1280,  960,
     1280,  720,
 };
 
 const int32_t jpegResolutionS5K6A3[] =
 {
+    1280,  960,
     1280,  720,
 };
 
index 3fd954b9e64e047a07d1736bd9b0a4f8c829db9a..1be6cdcf37a7732020edcef906752b525e61dee7 100644 (file)
@@ -49,14 +49,14 @@ void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
     int cnt = 0;
     uint32_t written = 0;
 
-    ALOGD("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
+    ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
     int fd = open(fname, O_RDWR | O_CREAT, 0644);
     if (fd < 0) {
         ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
         return;
     }
 
-    ALOGD("writing %d bytes to file [%s]", size, fname);
+    ALOGV("writing %d bytes to file [%s]", size, fname);
     while (written < size) {
         nw = ::write(fd, buf + written, size - written);
         if (nw < 0) {
@@ -66,7 +66,7 @@ void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
         written += nw;
         cnt++;
     }
-    ALOGD("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
+    ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
     ::close(fd);
 }
 
@@ -139,7 +139,6 @@ int cam_int_s_fmt(node_info_t *node)
     if (ret < 0)
         ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
 
-    node->streamOn = false;
 
     return ret;
 }
@@ -192,15 +191,11 @@ int cam_int_streamon(node_info_t *node)
     enum v4l2_buf_type type = node->type;
     int ret;
 
-    if (node->streamOn)
-        return 0;
 
     ret = exynos_v4l2_streamon(node->fd, type);
 
     if (ret < 0)
-        ALOGE("%s: VIDIOC_STREAMON failed (%d)",__FUNCTION__, ret);
-    else
-        node->streamOn = true;
+        ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
 
     ALOGV("On streaming I/O... ... fd(%d)", node->fd);
 
@@ -209,35 +204,31 @@ int cam_int_streamon(node_info_t *node)
 
 int cam_int_streamoff(node_info_t *node)
 {
-       enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
-       int ret;
+    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    int ret;
 
-    if (!node->streamOn)
-        return 0;
 
-       ALOGV("Off streaming I/O... fd(%d)", node->fd);
-       ret = exynos_v4l2_streamoff(node->fd, type);
+    ALOGV("Off streaming I/O... fd(%d)", node->fd);
+    ret = exynos_v4l2_streamoff(node->fd, type);
 
     if (ret < 0)
         ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
-    else
-        node->streamOn = false;
 
-       return ret;
+    return ret;
 }
 
 int isp_int_streamoff(node_info_t *node)
 {
-       enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
-       int ret;
+    enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+    int ret;
 
-       ALOGV("Off streaming I/O... fd(%d)", node->fd);
-       ret = exynos_v4l2_streamoff(node->fd, type);
+    ALOGV("Off streaming I/O... fd(%d)", node->fd);
+    ret = exynos_v4l2_streamoff(node->fd, type);
 
     if (ret < 0)
         ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
 
-       return ret;
+    return ret;
 }
 
 int cam_int_dqbuf(node_info_t *node)
@@ -274,9 +265,10 @@ gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
 
 RequestManager::RequestManager(SignalDrivenThread* main_thread):
     m_numOfEntries(0),
-    m_entryInsertionIndex(0),
-    m_entryProcessingIndex(0),
-    m_entryFrameOutputIndex(0)
+    m_entryInsertionIndex(-1),
+    m_entryProcessingIndex(-1),
+    m_entryFrameOutputIndex(-1),
+    m_frameIndex(-1)
 {
     m_metadataConverter = new MetadataConverter;
     m_mainThread = main_thread;
@@ -284,7 +276,7 @@ RequestManager::RequestManager(SignalDrivenThread* main_thread):
         memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
         entries[i].internal_shot.shot.ctl.request.frameCount = -1;
     }
-    m_sensorPipelineSkipCnt = 8;
+    m_sensorPipelineSkipCnt = 0;
     return;
 }
 
@@ -331,7 +323,7 @@ void RequestManager::RegisterRequest(camera_metadata_t * new_request)
     }
     newEntry->status = REGISTERED;
     newEntry->original_request = new_request;
-    // TODO : allocate internal_request dynamically
+    memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
     m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
     newEntry->output_stream_count = newEntry->internal_shot.shot.ctl.request.outputStreams[15];
 
@@ -346,12 +338,15 @@ void RequestManager::RegisterRequest(camera_metadata_t * new_request)
 void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
 {
     ALOGV("DEBUG(%s):", __FUNCTION__);
-    Mutex::Autolock lock(m_requestMutex);
+    int frame_index;
+    request_manager_entry * currentEntry;
 
-    request_manager_entry * currentEntry =  &(entries[m_entryFrameOutputIndex]);
+    Mutex::Autolock lock(m_requestMutex);
 
+    frame_index = GetFrameIndex();
+    currentEntry =  &(entries[frame_index]);
     if (currentEntry->status != CAPTURED) {
-        ALOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
+        ALOGV("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
         , m_entryProcessingIndex, m_entryFrameOutputIndex,(int)(currentEntry->status));
         return;
     }
@@ -364,7 +359,6 @@ void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request
     currentEntry->output_stream_count = 0;
     currentEntry->dynamic_meta_vaild = false;
     m_numOfEntries--;
-    // Dump();
     ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
      m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
 
@@ -377,34 +371,13 @@ bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
     ALOGV("DEBUG(%s):", __FUNCTION__);
     Mutex::Autolock lock(m_requestMutex);
     status_t res = NO_ERROR;
-    int tempFrameOutputIndex = GetNextIndex(m_entryFrameOutputIndex);
+    int tempFrameOutputIndex = GetFrameIndex();
     request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
     ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
         m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
 
-    if (m_completedIndex != tempFrameOutputIndex) {
-        ALOGV("DEBUG(%s): frame left behind : completed(%d), preparing(%d)", __FUNCTION__, m_completedIndex,tempFrameOutputIndex);
-
-        request_manager_entry * currentEntry2 =  &(entries[tempFrameOutputIndex]);
-        currentEntry2->status = EMPTY;
-        currentEntry2->original_request = NULL;
-        memset(&(currentEntry2->internal_shot), 0, sizeof(struct camera2_shot_ext));
-        currentEntry2->internal_shot.shot.ctl.request.frameCount = -1;
-        currentEntry2->output_stream_count = 0;
-        currentEntry2->dynamic_meta_vaild = false;
-        m_numOfEntries--;
-        // Dump();
-        tempFrameOutputIndex = m_completedIndex;
-        currentEntry =  &(entries[tempFrameOutputIndex]);
-    }
-
-    if (currentEntry->output_stream_count!=0) {
-        ALOGD("DBG(%s): Circular buffer has remaining output : stream_count(%d)", __FUNCTION__, currentEntry->output_stream_count);
-        return false;
-    }
-
     if (currentEntry->status != CAPTURED) {
-        ALOGD("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
+        ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
 
         return false;
     }
@@ -427,54 +400,57 @@ bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
 
 int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
 {
-    ALOGV("DEBUG(%s):", __FUNCTION__);
+
     Mutex::Autolock lock(m_requestMutex);
     struct camera2_shot_ext * shot_ext;
     struct camera2_shot_ext * request_shot;
     int targetStreamIndex = 0;
+    request_manager_entry * newEntry = NULL;
 
     if (m_numOfEntries == 0)  {
-        ALOGV("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
+        ALOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
         return -1;
     }
 
     if ((m_entryProcessingIndex == m_entryInsertionIndex)
         && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
-        ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
+        ALOGD("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
          m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
         return -1;
     }
 
-    request_manager_entry * newEntry = NULL;
     int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
+    ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
 
     newEntry = &(entries[newProcessingIndex]);
-    request_shot = &newEntry->internal_shot;
+    request_shot = &(newEntry->internal_shot);
     if (newEntry->status != REGISTERED) {
-        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
+        ALOGD("DEBUG(%s)(%d): Circular buffer abnormal ", __FUNCTION__, newProcessingIndex);
         return -1;
     }
+
     newEntry->status = REQUESTED;
 
-    shot_ext = (struct camera2_shot_ext *)(buf->virt.extP[1]);
-    ALOGV("DEBUG(%s):Writing the info of Framecnt(%d)", __FUNCTION__, request_shot->shot.ctl.request.frameCount);
-    memcpy(shot_ext, &newEntry->internal_shot, sizeof(struct camera2_shot_ext));
+    shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
 
+    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
+    shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
     shot_ext->request_sensor = 1;
     shot_ext->dis_bypass = 1;
     shot_ext->dnr_bypass = 1;
+    shot_ext->fd_bypass = 1;
+    shot_ext->setfile = 0;
+
     for (int i = 0; i < newEntry->output_stream_count; i++) {
         targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
 
         if (targetStreamIndex==0) {
             ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
             shot_ext->request_scp = 1;
-            shot_ext->shot.ctl.request.outputStreams[0] = 1;
         }
         else if (targetStreamIndex == 1) {
             ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
             shot_ext->request_scc = 1;
-            shot_ext->shot.ctl.request.outputStreams[1] = 1;
         }
         else if (targetStreamIndex == 2) {
             ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
@@ -485,20 +461,21 @@ int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
             ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
         }
     }
-    if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_ON) {
-        ALOGV("(%s): AE_ON => ignoring some params", __FUNCTION__);
-        shot_ext->shot.ctl.sensor.exposureTime = 0;
-        shot_ext->shot.ctl.sensor.sensitivity = 0;
-        shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
-        // TODO : check frameDuration
-    }
-    m_entryProcessingIndex = newProcessingIndex;
 
-    //    Dump();
-    ALOGV("## MarkProcReq DONE totalentry(%d), insert(%d), processing(%d), frame(%d) frameCnt(%d)",
-    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
+    shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
+    shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
+    shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
+    shot_ext->shot.magicNumber = 0x23456789;
+    shot_ext->shot.ctl.sensor.exposureTime = 0;
+    shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
+    shot_ext->shot.ctl.sensor.sensitivity = 0;
+
+    shot_ext->shot.ctl.scaler.cropRegion[0] = 0;
+    shot_ext->shot.ctl.scaler.cropRegion[1] = 0;
+    shot_ext->shot.ctl.scaler.cropRegion[2] = m_cropX;
 
-    return m_entryProcessingIndex;
+    m_entryProcessingIndex = newProcessingIndex;
+    return newProcessingIndex;
 }
 
 void RequestManager::NotifyStreamOutput(int frameCnt, int stream_id)
@@ -522,37 +499,50 @@ void RequestManager::NotifyStreamOutput(int frameCnt, int stream_id)
 void RequestManager::CheckCompleted(int index)
 {
     ALOGV("DEBUG(%s): reqIndex(%d) current Count(%d)", __FUNCTION__, index, entries[index].output_stream_count);
-    if (entries[index].output_stream_count == 0 && entries[index].dynamic_meta_vaild) {
-        ALOGV("DEBUG(%s): index[%d] completed and sending SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__, index);
-        // Dump();
-        m_completedIndex = index;
-        m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
-    }
+    SetFrameIndex(index);
+    m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
     return;
 }
 
-void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext, int frameCnt)
+void RequestManager::SetFrameIndex(int index)
+{
+    Mutex::Autolock lock(m_requestMutex);
+    m_frameIndex = index;
+}
+
+int RequestManager::GetFrameIndex()
+{
+    return m_frameIndex;
+}
+
+void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
 {
     int index;
     struct camera2_shot_ext * request_shot;
     nsecs_t timeStamp;
+    int i;
 
-    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
+    ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
 
-    index = FindEntryIndexByFrameCnt(frameCnt);
-    if (index == -1) {
-        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
+    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
+        if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
+            && (entries[i].status == CAPTURED))
+            break;
+    }
+
+    if (i == NUM_MAX_REQUEST_MGR_ENTRY){
+        ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
         return;
     }
 
-    request_manager_entry * newEntry = &(entries[index]);
+    request_manager_entry * newEntry = &(entries[i]);
     request_shot = &(newEntry->internal_shot);
 
     newEntry->dynamic_meta_vaild = true;
     timeStamp = request_shot->shot.dm.sensor.timeStamp;
-    memcpy(&request_shot->shot.dm, &shot_ext->shot.dm, sizeof(struct camera2_dm));
+    memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
     request_shot->shot.dm.sensor.timeStamp = timeStamp;
-    CheckCompleted(index);
+    CheckCompleted(i);
 }
 
 void RequestManager::DumpInfoWithIndex(int index)
@@ -590,38 +580,42 @@ void    RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, i
     }
 
     request_manager_entry * newEntry = &(entries[index]);
-    request_shot = &newEntry->internal_shot;
+    request_shot = &(newEntry->internal_shot);
     shot_ext->request_sensor = 1;
+    shot_ext->dis_bypass = 1;
+    shot_ext->dnr_bypass = 1;
+    shot_ext->fd_bypass = 1;
+    shot_ext->setfile = 0;
+
     shot_ext->request_scc = 0;
     shot_ext->request_scp = 0;
+
     shot_ext->shot.ctl.request.outputStreams[0] = 0;
     shot_ext->shot.ctl.request.outputStreams[1] = 0;
     shot_ext->shot.ctl.request.outputStreams[2] = 0;
 
-    memcpy(&shot_ext->shot.ctl, &request_shot->shot.ctl, sizeof(struct camera2_ctl));
+
     for (int i = 0; i < newEntry->output_stream_count; i++) {
-        // TODO : match with actual stream index;
-        targetStreamIndex = request_shot->shot.ctl.request.outputStreams[i];
+       targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
 
         if (targetStreamIndex==0) {
-            ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerP", __FUNCTION__, i);
+            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
             shot_ext->request_scp = 1;
-            shot_ext->shot.ctl.request.outputStreams[0] = 1;
         }
         else if (targetStreamIndex == 1) {
-            ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerC", __FUNCTION__, i);
+            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
             shot_ext->request_scc = 1;
-            shot_ext->shot.ctl.request.outputStreams[1] = 1;
         }
         else if (targetStreamIndex == 2) {
-            ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerP (record)", __FUNCTION__, i);
+            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
             shot_ext->request_scp = 1;
             shot_ext->shot.ctl.request.outputStreams[2] = 1;
         }
         else {
-            ALOGV("DEBUG(%s): outputstreams item[%d] has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
+            ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
         }
     }
+
 }
 
 int     RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
@@ -647,11 +641,11 @@ void    RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
         index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
 }
 
-uint64_t  RequestManager::GetTimestamp(int frameCnt)
+uint64_t  RequestManager::GetTimestamp(int index)
 {
-    int index = FindEntryIndexByFrameCnt(frameCnt);
+
     if (index == -1) {
-        ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
+        ALOGE("ERR(%s): Cannot find entry ", __FUNCTION__);
         return 0;
     }
 
@@ -663,31 +657,26 @@ uint64_t  RequestManager::GetTimestamp(int frameCnt)
 
 int     RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
 {
-       int tempIndex, i;
-    if (m_sensorPipelineSkipCnt > 0) {
-        m_sensorPipelineSkipCnt--;
-        return -1;
-    }
+    int i;
+
     if (m_numOfEntries == 0) {
-        ALOGD("(%s): No Entry found", __FUNCTION__);
+        ALOGV("(%s): No Entry found", __FUNCTION__);
         return -1;
     }
-    tempIndex = GetNextIndex(m_entryFrameOutputIndex);
+
     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
-        if (entries[tempIndex].status == REQUESTED) {
-            entries[tempIndex].status = CAPTURED;
-            return entries[tempIndex].internal_shot.shot.ctl.request.frameCount;
-        }
-        else if (entries[tempIndex].status == CAPTURED) {
-            tempIndex = GetNextIndex(tempIndex);
+        if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
             continue;
+
+        if (entries[i].status == REQUESTED) {
+            entries[i].status = CAPTURED;
+            return entries[i].internal_shot.shot.ctl.request.frameCount;
         }
-        else {
-            ALOGE("(%s): enry state abnormal status(%d)", __FUNCTION__, entries[tempIndex].status);
-            Dump();
-            return -1;
-        }
+
     }
+
+    ALOGD("(%s): No Entry found", __FUNCTION__);
+
     return -1;
 }
 
@@ -698,18 +687,29 @@ void     RequestManager::SetInitialSkip(int count)
         m_sensorPipelineSkipCnt = count;
 }
 
+int     RequestManager::GetSkipCnt()
+{
+    ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
+    if (m_sensorPipelineSkipCnt == 0)
+        return m_sensorPipelineSkipCnt;
+    else
+        return --m_sensorPipelineSkipCnt;
+}
+
 void RequestManager::Dump(void)
 {
     int i = 0;
     request_manager_entry * currentEntry;
-    ALOGV("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
+    ALOGD("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
 
     for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
         currentEntry =  &(entries[i]);
-        ALOGV("[%2d] status[%d] frameCnt[%3d] numOutput[%d]", i,
+        ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%d outstream[1]-%d", i,
         currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
-            currentEntry->output_stream_count);
+            currentEntry->output_stream_count,
+            currentEntry->internal_shot.shot.ctl.request.outputStreams[0],
+            currentEntry->internal_shot.shot.ctl.request.outputStreams[1]);
     }
 }
 
@@ -731,6 +731,7 @@ ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_
             m_isRequestQueueNull(true),
             m_isSensorThreadOn(false),
             m_isSensorStarted(false),
+            m_isIspStarted(false),
             m_ionCameraClient(0),
             m_initFlag1(false),
             m_initFlag2(false),
@@ -742,6 +743,7 @@ ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_
             m_scp_closing(false),
             m_scp_closed(false),
             m_halDevice(dev),
+            m_need_streamoff(0),
             m_cameraId(cameraId)
 {
     ALOGV("DEBUG(%s):", __FUNCTION__);
@@ -764,11 +766,10 @@ ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_
 
     m_BayerManager = new BayerBufManager();
     m_mainThread    = new MainThread(this);
+    InitializeISPChain();
     m_sensorThread  = new SensorThread(this);
-    m_ispThread     = new IspThread(this);
     m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
     ALOGV("DEBUG(%s): created sensorthread ################", __FUNCTION__);
-    usleep(1600000);
 
     m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
     CSC_METHOD cscMethod = CSC_METHOD_HW;
@@ -787,9 +788,9 @@ ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_
 
 ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
 {
-    ALOGD("%s: ENTER", __FUNCTION__);
+    ALOGV("%s: ENTER", __FUNCTION__);
     this->release();
-    ALOGD("%s: EXIT", __FUNCTION__);
+    ALOGV("%s: EXIT", __FUNCTION__);
 }
 
 void ExynosCameraHWInterface2::release()
@@ -798,8 +799,16 @@ void ExynosCameraHWInterface2::release()
     ALOGD("%s: ENTER", __func__);
     m_closing = true;
 
-    while (!m_scp_closed)
-        usleep(1000);
+    if (m_streamThreads[1] != NULL) {
+        m_streamThreads[1]->release();
+        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
+    }
+
+    if (m_streamThreads[0] != NULL) {
+        m_streamThreads[0]->release();
+        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
+    }
+
     if (m_ispThread != NULL) {
         m_ispThread->release();
     }
@@ -812,17 +821,6 @@ void ExynosCameraHWInterface2::release()
         m_mainThread->release();
     }
 
-    if (m_streamThreads[0] != NULL) {
-        m_streamThreads[0]->release();
-        m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
-    }
-
-    if (m_streamThreads[1] != NULL) {
-        m_streamThreads[1]->release();
-        m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);        
-    }
-
-
     if (m_exynosPictureCSC)
         csc_deinit(m_exynosPictureCSC);
     m_exynosPictureCSC = NULL;
@@ -831,36 +829,51 @@ void ExynosCameraHWInterface2::release()
         csc_deinit(m_exynosVideoCSC);
     m_exynosVideoCSC = NULL;
 
+    if (m_streamThreads[1] != NULL) {
+        while (!m_streamThreads[1]->IsTerminated())
+        {
+            ALOGD("Waiting for ISP thread is tetminated");
+            usleep(100000);
+        }
+        m_streamThreads[1] = NULL;
+    }
+
+    if (m_streamThreads[0] != NULL) {
+        while (!m_streamThreads[0]->IsTerminated())
+        {
+            ALOGD("Waiting for sensor thread is tetminated");
+            usleep(100000);
+        }
+        m_streamThreads[0] = NULL;
+    }
+
     if (m_ispThread != NULL) {
         while (!m_ispThread->IsTerminated())
-            usleep(1000);
+        {
+            ALOGD("Waiting for isp thread is tetminated");
+            usleep(100000);
+        }
         m_ispThread = NULL;
     }
 
     if (m_sensorThread != NULL) {
         while (!m_sensorThread->IsTerminated())
-            usleep(1000);
+        {
+            ALOGD("Waiting for sensor thread is tetminated");
+            usleep(100000);
+        }
         m_sensorThread = NULL;
     }
 
-    if (m_mainThread != NULL) {   
+    if (m_mainThread != NULL) {
         while (!m_mainThread->IsTerminated())
-            usleep(1000);        
+        {
+            ALOGD("Waiting for main thread is tetminated");
+            usleep(100000);
+        }
         m_mainThread = NULL;
     }
 
-    if (m_streamThreads[0] != NULL) {
-        while (!m_streamThreads[0]->IsTerminated())
-            usleep(1000);
-        m_streamThreads[0] = NULL;
-    }
-
-    if (m_streamThreads[1] != NULL) {
-        while (!m_streamThreads[1]->IsTerminated())
-            usleep(1000);
-        m_streamThreads[1] = NULL;
-    }
-    
     for(i = 0; i < m_camera_info.sensor.buffers; i++)
         freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
 
@@ -886,14 +899,214 @@ void ExynosCameraHWInterface2::release()
     }
 
     ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
-    res = exynos_v4l2_close(m_fd_scp); 
+    res = exynos_v4l2_close(m_fd_scp);
     if (res != NO_ERROR ) {
         ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
     }
     ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
     deleteIonClient(m_ionCameraClient);
-    
-    ALOGD("%s: EXIT", __func__);
+
+    ALOGV("%s: EXIT", __func__);
+}
+
+void ExynosCameraHWInterface2::InitializeISPChain()
+{
+    char node_name[30];
+    int fd = 0;
+    int i;
+
+    /* Open Sensor */
+    memset(&node_name, 0x00, sizeof(char[30]));
+    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
+    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+
+    if (fd < 0) {
+        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    else {
+        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    m_camera_info.sensor.fd = fd;
+
+    /* Open ISP */
+    memset(&node_name, 0x00, sizeof(char[30]));
+    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
+    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+
+    if (fd < 0) {
+        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    else {
+        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    m_camera_info.isp.fd = fd;
+
+    /* Open ScalerC */
+    memset(&node_name, 0x00, sizeof(char[30]));
+    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
+    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+
+    if (fd < 0) {
+        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    else {
+        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    m_camera_info.capture.fd = fd;
+
+    /* Open ScalerP */
+    memset(&node_name, 0x00, sizeof(char[30]));
+    sprintf(node_name, "%s%d", NODE_PREFIX, 44);
+    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+    if (fd < 0) {
+        ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    else {
+        ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    m_fd_scp = fd;
+
+    if(m_cameraId == 0)
+        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
+    else
+        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
+
+    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
+    m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
+    m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
+
+    m_camera_info.dummy_shot.dis_bypass = 1;
+    m_camera_info.dummy_shot.dnr_bypass = 1;
+    m_camera_info.dummy_shot.fd_bypass = 1;
+
+    /*sensor setting*/
+    m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
+    m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
+    m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
+
+    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
+    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
+
+    /*request setting*/
+    m_camera_info.dummy_shot.request_sensor = 1;
+    m_camera_info.dummy_shot.request_scc = 0;
+    m_camera_info.dummy_shot.request_scp = 0;
+    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
+    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[1] = 0;
+    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[2] = 0;
+
+    m_camera_info.sensor.width = m_camera2->getSensorRawW();
+    m_camera_info.sensor.height = m_camera2->getSensorRawH();
+
+    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
+    m_camera_info.sensor.planes = 2;
+    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
+    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
+    m_camera_info.sensor.ionClient = m_ionCameraClient;
+
+    for(i = 0; i < m_camera_info.sensor.buffers; i++){
+        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
+        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
+        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
+        allocCameraMemory(m_camera_info.sensor.ionClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
+    }
+
+    m_camera_info.isp.width = m_camera_info.sensor.width;
+    m_camera_info.isp.height = m_camera_info.sensor.height;
+    m_camera_info.isp.format = m_camera_info.sensor.format;
+    m_camera_info.isp.planes = m_camera_info.sensor.planes;
+    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
+    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
+    m_camera_info.isp.ionClient = m_ionCameraClient;
+
+    for(i = 0; i < m_camera_info.isp.buffers; i++){
+        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
+        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
+        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
+        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
+        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
+        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
+        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
+    };
+
+    /* init ISP */
+    cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
+    cam_int_s_fmt(&(m_camera_info.isp));
+    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
+    cam_int_reqbufs(&(m_camera_info.isp));
+    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
+    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
+
+    /* init Sensor */
+    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
+    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
+    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
+        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
+    }
+    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
+    cam_int_reqbufs(&(m_camera_info.sensor));
+    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
+    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
+        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
+        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
+                sizeof(struct camera2_shot_ext));
+        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
+        m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
+        cam_int_qbuf(&(m_camera_info.sensor), i);
+    }
+    ALOGV("== stream_on :: .sensor");
+    cam_int_streamon(&(m_camera_info.sensor));
+
+    /* init Capture */
+    m_camera_info.capture.width = m_camera2->getSensorW();
+    m_camera_info.capture.height = m_camera2->getSensorH();
+    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
+    m_camera_info.capture.planes = 1;
+    m_camera_info.capture.buffers = 8;
+    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
+    m_camera_info.capture.ionClient = m_ionCameraClient;
+
+    for(i = 0; i < m_camera_info.capture.buffers; i++){
+        initCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
+        m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
+        allocCameraMemory(m_camera_info.capture.ionClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
+    }
+
+    cam_int_s_input(&(m_camera_info.capture), m_camera_info.sensor_id);
+    cam_int_s_fmt(&(m_camera_info.capture));
+    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
+    cam_int_reqbufs(&(m_camera_info.capture));
+    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
+
+    for (i = 0; i < m_camera_info.capture.buffers; i++) {
+        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
+        cam_int_qbuf(&(m_camera_info.capture), i);
+    }
+
+    ALOGV("== stream_on :: capture");
+    cam_int_streamon(&(m_camera_info.capture));
+}
+
+void ExynosCameraHWInterface2::StartISP()
+{
+    int i;
+
+    for (i = 0; i < m_camera_info.isp.buffers; i++) {
+        ALOGV("DEBUG(%s): isp initial QBUF [%d]",  __FUNCTION__, i);
+        cam_int_qbuf(&(m_camera_info.isp), i);
+    }
+
+    ALOGV("== stream_on :: isp");
+    cam_int_streamon(&(m_camera_info.isp));
+
+    for (i = 0; i < m_camera_info.isp.buffers; i++) {
+        ALOGV("DEBUG(%s): isp initial DQBUF [%d]",  __FUNCTION__, i);
+        cam_int_dqbuf(&(m_camera_info.isp));
+    }
+    exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
 }
 
 int ExynosCameraHWInterface2::getCameraId() const
@@ -923,8 +1136,6 @@ int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
         return 0;
     }
     m_isRequestQueueNull = false;
-    if (m_requestManager->GetNumEntries() == 0)
-        m_requestManager->SetInitialSkip(5);
     m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
     return 0;
 }
@@ -986,7 +1197,7 @@ int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, came
 int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
                                     uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
 {
-    ALOGD("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
+    ALOGV("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
     char node_name[30];
     int fd = 0, allocCase = 0;
     StreamThread *AllocatedStream;
@@ -1013,18 +1224,6 @@ int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, in
 
             if (allocCase == 0) {
                 m_streamThreads[0]  = new StreamThread(this, *stream_id);
-           
-            
-                memset(&node_name, 0x00, sizeof(char[30]));
-                sprintf(node_name, "%s%d", NODE_PREFIX, 44);
-                fd = exynos_v4l2_open(node_name, O_RDWR, 0);
-                if (fd < 0) {
-                    ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
-                }
-                else {
-                    ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
-                }
-                m_fd_scp = fd; 
              }
             AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
             m_scp_flushing = false;
@@ -1061,8 +1260,8 @@ int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, in
             m_scp_flushing = false;
             m_scp_closing = false;
             m_scp_closed = false;
-            m_requestManager->SetDefaultParameters(width);
-            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = width;           
+            m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
+            m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
             return 0;
         }
         else if (allocCase == 1) {
@@ -1097,7 +1296,7 @@ int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, in
             return 0;
         }
     }
-    else if (format == HAL_PIXEL_FORMAT_BLOB 
+    else if (format == HAL_PIXEL_FORMAT_BLOB
             && m_camera2->isSupportedJpegResolution(width, height)) {
 
         *stream_id = 1;
@@ -1228,9 +1427,54 @@ int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
     currentNode->ionClient  = targetStreamParms->ionClient;
 
     if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
+        if(m_need_streamoff == 1) {
+            ALOGV("(%s): calling capture streamoff", __FUNCTION__);
+            cam_int_streamoff(&(m_camera_info.capture));
+            ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
+
+            m_camera_info.capture.buffers = 0;
+            ALOGV("DEBUG(%s): capture calling reqbuf 0 ", __FUNCTION__);
+            cam_int_reqbufs(&(m_camera_info.capture));
+            ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
+
+            if (m_sensorThread != NULL) {
+                m_sensorThread->release();
+                usleep(500000);
+            } else {
+                ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
+            }
+            m_isIspStarted = false;
+        }
+
         cam_int_s_input(currentNode, m_camera_info.sensor_id);
         cam_int_s_fmt(currentNode);
         cam_int_reqbufs(currentNode);
+
+        if (m_need_streamoff == 1) {
+            m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
+            m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
+            m_camera_info.capture.buffers = 8;
+            cam_int_reqbufs(&(m_camera_info.isp));
+            cam_int_reqbufs(&(m_camera_info.sensor));
+
+            for (i = 0; i < 8; i++) {
+                ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
+                memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
+                        sizeof(struct camera2_shot_ext));
+                m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
+                m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
+                cam_int_qbuf(&(m_camera_info.sensor), i);
+            }
+
+            cam_int_reqbufs(&(m_camera_info.capture));
+            cam_int_streamon(&(m_camera_info.capture));
+            cam_int_streamon(&(m_camera_info.sensor));
+
+            m_need_streamoff = 0;
+            m_requestManager->SetInitialSkip(2);
+            m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
+            m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
+        }
     }
     else if (targetStreamParms->streamType == STREAM_TYPE_INDIRECT) {
         for(i = 0; i < currentNode->buffers; i++){
@@ -1304,10 +1548,22 @@ int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
             }
         }
     }
+
     ALOGV("DEBUG(%s): calling  streamon", __FUNCTION__);
-    cam_int_streamon(&(targetStreamParms->node));
-    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);  
+    if (targetStreamParms->streamType == 0) {
+        ALOGD("%s(%d), stream id = %d", __FUNCTION__, __LINE__, stream_id);
+        cam_int_streamon(&(targetStreamParms->node));
+    }
+
+    ALOGV("DEBUG(%s): calling  streamon END", __FUNCTION__);
     ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
+
+    if(!m_isIspStarted) {
+        m_isIspStarted = true;
+        StartISP();
+        m_need_streamoff = 1;
+    }
+
     return 0;
 }
 
@@ -1325,6 +1581,7 @@ int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
     }
     else if (stream_id == 2 && m_recordingEnabled) {
         m_recordingEnabled = false;
+        m_needsRecordBufferInit = true;
         return 0;
     }
     else {
@@ -1335,7 +1592,10 @@ int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
     targetStream->m_releasing = true;
     targetStream->release();
     while (targetStream->m_releasing)
+    {
+        ALOGD("stream thread release fail %d", __LINE__);
         usleep(2000);
+    }
     targetStream->m_activated = false;
     ALOGV("DEBUG(%s): DONE", __FUNCTION__);
     return 0;
@@ -1577,25 +1837,16 @@ int    BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *t
 {
     ALOGV("DEBUG(%s)    : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
 
-    // sanity check
-    if (index != sensorDequeueHead) {
-        ALOGV("DEBUG(%s)    : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorDequeueHead);
-        return -1;
-    }
     if (entries[index].status != BAYER_ON_SENSOR) {
-        ALOGV("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
+        ALOGE("DEBUG(%s)    : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
             index, entries[index].status, BAYER_ON_SENSOR);
         return -1;
     }
 
     entries[index].status = BAYER_ON_HAL_FILLED;
-    entries[index].reqFrameCnt = reqFrameCnt;
-    entries[index].timeStamp = *timeStamp;
     numOnHalFilled++;
     numOnSensor--;
-    sensorDequeueHead = GetNextIndex(index);
-    ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
-        __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
+
     return 0;
 }
 
@@ -1710,6 +1961,8 @@ void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
     MainThread *  selfThread      = ((MainThread*)self);
     int res = 0;
 
+    int ret;
+
     ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
 
     if (currentSignal & SIGNAL_THREAD_RELEASE) {
@@ -1722,11 +1975,10 @@ void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
 
     if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
         ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
-        if (m_requestManager->IsRequestQueueFull()==false
-                && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST) {
+        if (m_requestManager->IsRequestQueueFull()==false) {
             m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
             if (NULL == currentRequest) {
-                ALOGV("DEBUG(%s): dequeue_request returned NULL ", __FUNCTION__);
+                ALOGE("DEBUG(%s)(0x%x): dequeue_request returned NULL ", __FUNCTION__, currentSignal);
                 m_isRequestQueueNull = true;
             }
             else {
@@ -1734,8 +1986,7 @@ void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
 
                 m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
                 ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
-                if (m_requestManager->IsRequestQueueFull()==false
-                    && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST)
+                if (m_requestManager->IsRequestQueueFull()==false)
                     selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
 
                 m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
@@ -1749,12 +2000,22 @@ void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
     if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
         ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
         /*while (1)*/ {
-            m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame);
+            ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame);
+            if (ret == false)
+                ALOGD("++++++ PrepareFrame ret = %d", ret);
+
             m_requestManager->DeregisterRequest(&deregisteredRequest);
-            m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
-            m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
+
+            ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
+            if (ret < 0)
+                ALOGD("++++++ free_request ret = %d", ret);
+
+            ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
+            if (ret < 0)
+                ALOGD("++++++ dequeue_frame ret = %d", ret);
+
             if (currentFrame==NULL) {
-                ALOGD("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
+                ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
             }
             else {
                 ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
@@ -1763,108 +2024,39 @@ void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
             if (res==0) {
                 ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
                 m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
-            }
-            else {
-                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
-            }
-        }
-        if (!m_isRequestQueueNull) {
-            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
-        }
-
-        if (getInProgressCount()>0) {
-            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
-            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
-        }
-    }
-    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
-    return;
-}
-
-void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
-{
-    ALOGV("DEBUG(%s): ", __FUNCTION__ );
-    SensorThread * selfThread = ((SensorThread*)self);
-    char node_name[30];
-    int fd = 0;
-    int i =0, j=0;
-
-    if(m_cameraId == 0)
-        m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
-    else
-        m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
-
-    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
-       m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
-       m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
-
-    m_camera_info.dummy_shot.dis_bypass = 1;
-    m_camera_info.dummy_shot.dnr_bypass = 1;
-
-    /*sensor setting*/
-       m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
-       m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
-       m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
-
-    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
-    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
-    //m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = 1920;
-
-       /*request setting*/
-       m_camera_info.dummy_shot.request_sensor = 1;
-       m_camera_info.dummy_shot.request_scc = 0;
-       m_camera_info.dummy_shot.request_scp = 0;
-    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
-    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[1] = 0;
-    m_camera_info.dummy_shot.shot.ctl.request.outputStreams[2] = 0;
-
-    /*sensor init*/
-    memset(&node_name, 0x00, sizeof(char[30]));
-    sprintf(node_name, "%s%d", NODE_PREFIX, 40);
-    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
-
-    if (fd < 0) {
-        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
-    }
-    else {
-        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
-    }
-    m_camera_info.sensor.fd = fd;
-
-    m_camera_info.sensor.width = m_camera2->getSensorRawW();
-    m_camera_info.sensor.height = m_camera2->getSensorRawH();
-
-    m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
-    m_camera_info.sensor.planes = 2;
-    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
-    m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
-    m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
-    m_camera_info.sensor.ionClient = m_ionCameraClient;
+            }
+            else {
+                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
+            }
+        }
+        if (!m_isRequestQueueNull) {
+            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
+        }
 
-    for(i = 0; i < m_camera_info.sensor.buffers; i++){
-        initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
-        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
-        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
-        allocCameraMemory(m_camera_info.sensor.ionClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
+        if (getInProgressCount()>0) {
+            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
+            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
+        }
     }
+    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
+    return;
+}
 
-    m_initFlag1 = true;
-
-
-    while (!m_initFlag2) // temp
-        usleep(100000);
-    ALOGV("DEBUG(%s): END of SensorThreadInitialize ", __FUNCTION__);
+void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
+{
+    ALOGV("DEBUG(%s): ", __FUNCTION__ );
+    /* will add */
     return;
 }
 
 
 void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
 {
-    ALOGV("####  common Section");
-    ALOGV("####                 magic(%x) ",
+    ALOGD("####  common Section");
+    ALOGD("####                 magic(%x) ",
         shot_ext->shot.magicNumber);
-    ALOGV("####  ctl Section");
-    ALOGV("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
+    ALOGD("####  ctl Section");
+    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
         shot_ext->shot.ctl.request.metadataMode,
         shot_ext->shot.ctl.lens.aperture,
         shot_ext->shot.ctl.sensor.exposureTime,
@@ -1872,13 +2064,13 @@ void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_e
         shot_ext->shot.ctl.sensor.sensitivity,
         shot_ext->shot.ctl.aa.awbMode);
 
-    ALOGV("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) pv(%d) rec(%d)",
+    ALOGD("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d) pv(%d) rec(%d)",
         shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
         shot_ext->shot.ctl.request.outputStreams[0],
         shot_ext->shot.ctl.request.outputStreams[2]);
 
-    ALOGV("####  DM Section");
-    ALOGV("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
+    ALOGD("####  DM Section");
+    ALOGD("####     meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
         shot_ext->shot.dm.request.metadataMode,
         shot_ext->shot.dm.lens.aperture,
         shot_ext->shot.dm.sensor.exposureTime,
@@ -1894,13 +2086,16 @@ void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
     uint32_t        currentSignal = self->GetProcessingSignal();
     SensorThread *  selfThread      = ((SensorThread*)self);
     int index;
+    int index_isp;
     status_t res;
     nsecs_t frameTime;
     int bayersOnSensor = 0, bayersOnIsp = 0;
+    int j = 0;
+    bool isCapture = false;
     ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
 
     if (currentSignal & SIGNAL_THREAD_RELEASE) {
-        ALOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
+        ALOGV("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
 
         ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
         cam_int_streamoff(&(m_camera_info.sensor));
@@ -1910,11 +2105,11 @@ void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
         ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
         cam_int_reqbufs(&(m_camera_info.sensor));
         ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
-        
+
         ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
         isp_int_streamoff(&(m_camera_info.isp));
         ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
-        
+
         m_camera_info.isp.buffers = 0;
         ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
         cam_int_reqbufs(&(m_camera_info.isp));
@@ -1922,7 +2117,7 @@ void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
 
         exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
 
-        ALOGD("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
+        ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
         selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
         return;
     }
@@ -1931,331 +2126,150 @@ void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
     {
         ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
         int targetStreamIndex = 0, i=0;
-        int matchedFrameCnt, processingReqIndex;
+        int matchedFrameCnt = -1, processingReqIndex;
         struct camera2_shot_ext *shot_ext;
-        if (!m_isSensorStarted)
-        {
-            m_isSensorStarted = true;
-            ALOGD("(%s): calling preview streamon", __FUNCTION__);
-            cam_int_streamon(&(m_streamThreads[0]->m_parameters.node));
-            ALOGD("(%s): calling isp streamon done", __FUNCTION__);
-            for (i = 0; i < m_camera_info.isp.buffers; i++) {
-                ALOGV("DEBUG(%s): isp initial QBUF [%d]",  __FUNCTION__, i);
-                cam_int_qbuf(&(m_camera_info.isp), i);
-            }
-
-            cam_int_streamon(&(m_camera_info.isp));
-
-            for (i = 0; i < m_camera_info.isp.buffers; i++) {
-                ALOGV("DEBUG(%s): isp initial DQBUF [%d]",  __FUNCTION__, i);
-                cam_int_dqbuf(&(m_camera_info.isp));
-            }
+        struct camera2_shot_ext *shot_ext_capture;
 
-            ALOGV("DEBUG(%s): calling isp sctrl done", __FUNCTION__);
-            exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
-            ALOGV("DEBUG(%s): calling sensor sctrl done", __FUNCTION__);
-
-        }
-
-        ALOGV("### Sensor DQBUF start");
+        /* dqbuf from sensor */
+        ALOGV("Sensor DQbuf start");
         index = cam_int_dqbuf(&(m_camera_info.sensor));
-        frameTime = systemTime();
-        ALOGV("### Sensor DQBUF done BayerIndex(%d)", index);
         shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
+
+        m_recordOutput = shot_ext->shot.ctl.request.outputStreams[2];
+
         matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
-        ALOGV("### Matched(%d) last(%d), dqbuf timestamp(%lld)", matchedFrameCnt, lastFrameCnt
-            , shot_ext->shot.dm.sensor.timeStamp);
+
         if (matchedFrameCnt != -1) {
-            while (matchedFrameCnt == lastFrameCnt) {
-                 m_BayerManager->MarkSensorDequeue(index, -1, &frameTime);
-                ALOGV("### Sensor DQBUF start");
-                index = cam_int_dqbuf(&(m_camera_info.sensor));
                 frameTime = systemTime();
-                ALOGV("### Sensor DQBUF done BayerIndex(%d)", index);
-                shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
-                matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
-                ALOGV("### Matched(%d) last(%d)", matchedFrameCnt, lastFrameCnt);
-            }
-            lastFrameCnt = matchedFrameCnt;
-                       m_scp_closing = false;
-                       m_scp_closed = false;
-        }
-        m_BayerManager->MarkSensorDequeue(index, matchedFrameCnt, &frameTime);
-
         m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
-        ALOGV("### Sensor DQed BayerIndex[%d] passing to ISP. frameCnt(%d) timestamp(%lld)",
-            index, matchedFrameCnt, frameTime);
-
-        if (!(m_ispThread.get()))
-            return;
-
-        m_ispThread->SetSignal(SIGNAL_ISP_START_BAYER_INPUT);
-
-        while (m_BayerManager->GetNumOnSensor() <= NUM_SENSOR_QBUF) {
-
-            index = m_BayerManager->GetIndexForSensorEnqueue();
-            if (index == -1) {
-                ALOGE("ERR(%s) No free Bayer buffer", __FUNCTION__);
-                break;
-            }
-            processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
-
-            shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
-            if (processingReqIndex == -1) {
-                ALOGV("DEBUG(%s) req underrun => inserting bubble to BayerIndex(%d)", __FUNCTION__, index);
-                memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(struct camera2_shot_ext));
+            m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt);
+            ALOGD("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
+                index,
+                shot_ext->shot.ctl.request.frameCount,
+                shot_ext->request_scp,
+                shot_ext->request_scc,
+                shot_ext->dis_bypass, sizeof(camera2_shot));
+
+            if(shot_ext->request_scc == 1) {
+                isCapture = true;
             }
 
-            m_BayerManager->MarkSensorEnqueue(index);
-            if (m_scp_closing || m_scp_closed) {
-                ALOGV("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
-                shot_ext->request_scc = 0;
-                shot_ext->request_scp = 0;
-                shot_ext->request_sensor = 0;
+            if(isCapture)
+            {
+                for(j = 0; j < m_camera_info.isp.buffers; j++)
+                {
+                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
+                    shot_ext_capture->request_scc = 1;
+                }
             }
-            ALOGV("### Sensor QBUF start BayerIndex[%d]", index);
-            cam_int_qbuf(&(m_camera_info.sensor), index);
-            ALOGV("### Sensor QBUF done");
-        }
-        if (!m_closing){
-            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
-        }
-        return;
-    }
-    return;
-}
 
-void ExynosCameraHWInterface2::m_ispThreadInitialize(SignalDrivenThread * self)
-{
-    ALOGV("DEBUG(%s): ", __FUNCTION__ );
-    IspThread * selfThread = ((IspThread*)self);
-    char node_name[30];
-    int fd = 0;
-    int i =0, j=0;
+            cam_int_qbuf(&(m_camera_info.isp), index);
+            //m_ispThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
 
+            usleep(10000);
+            if(isCapture)
+            {
+                for(j = 0; j < m_camera_info.isp.buffers; j++)
+                {
+                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
+                    ALOGD("shot_ext_capture[%d] scp = %d, scc = %d", j, shot_ext_capture->request_scp, shot_ext_capture->request_scc);
+//                    DumpInfoWithShot(shot_ext_capture);
+                }
+            }
 
-    while (!m_initFlag1) //temp
-        usleep(100000);
 
-    /*isp init*/
-    memset(&node_name, 0x00, sizeof(char[30]));
-    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
-    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+            ALOGV("### isp DQBUF start");
+            index_isp = cam_int_dqbuf(&(m_camera_info.isp));
+            //m_previewOutput = 0;
 
-    if (fd < 0) {
-        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
-    }
-    else {
-        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
-    }
-    m_camera_info.isp.fd = fd;
+            if(isCapture)
+            {
+                for(j = 0; j < m_camera_info.isp.buffers; j++)
+                {
+                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
+                    ALOGD("shot_ext_capture[%d] scp = %d, scc = %d", j, shot_ext_capture->request_scp, shot_ext_capture->request_scc);
+//                    DumpInfoWithShot(shot_ext_capture);
+                }
+            }
+            shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
 
-    m_camera_info.isp.width = m_camera_info.sensor.width;
-    m_camera_info.isp.height = m_camera_info.sensor.height;
-    m_camera_info.isp.format = m_camera_info.sensor.format;
-    m_camera_info.isp.planes = m_camera_info.sensor.planes;
-    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
-    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
-    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
+            ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) shot_size(%d)",
+                index,
+                shot_ext->shot.ctl.request.frameCount,
+                shot_ext->request_scp,
+                shot_ext->request_scc,
+                shot_ext->dis_bypass, sizeof(camera2_shot));
 
-    for(i = 0; i < m_camera_info.isp.buffers; i++){
-        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
-        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
-        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
-        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
-        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
-        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
-        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
-    };
+            if(isCapture) {
+                    ALOGD("======= request_scc is 1");
+                    m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
 
-    cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
-    cam_int_s_fmt(&(m_camera_info.isp));
-    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
-    cam_int_reqbufs(&(m_camera_info.isp));
-    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
-    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
+                for(j = 0; j < m_camera_info.isp.buffers; j++)
+                {
+                    shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
+                    shot_ext_capture->request_scc = 0;
+                }
 
-    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
-    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
-    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
-        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
-    }
-    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
-    cam_int_reqbufs(&(m_camera_info.sensor));
-    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
-    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
-        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
-        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
-                sizeof(struct camera2_shot_ext));
-        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
+                isCapture = false;
+            }
 
-        cam_int_qbuf(&(m_camera_info.sensor), i);
-        m_BayerManager->MarkSensorEnqueue(i);
-    }
-    ALOGE("== stream_on :: m_camera_info.sensor");
-    cam_int_streamon(&(m_camera_info.sensor));
+            if (shot_ext->request_scp) {
+                m_previewOutput = 1;
+                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
+            }
 
 
+            ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
+               shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
+            if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
+                ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
+                m_scp_closed = true;
+            }
+            else
+                m_scp_closed = false;
 
-/*capture init*/
-    memset(&node_name, 0x00, sizeof(char[30]));
-    sprintf(node_name, "%s%d", NODE_PREFIX, 42);
-    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+            m_requestManager->ApplyDynamicMetadata(shot_ext);
+        }
 
-    if (fd < 0) {
-        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
-    }
-    else {
-        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
-    }
-    m_camera_info.capture.fd = fd;
+        processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
+        if (processingReqIndex == -1)
+        {
+            ALOGE("DEBUG(%s) req underrun => inserting bubble to BayerIndex(%d)", __FUNCTION__, index);
+        }
 
-    m_camera_info.capture.width = m_camera2->getSensorW();
-    m_camera_info.capture.height = m_camera2->getSensorH();
-    m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
-    m_camera_info.capture.planes = 1;
-    m_camera_info.capture.buffers = 8;
-    m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
-    m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
-    m_camera_info.capture.ionClient = m_ionCameraClient;
+        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
+        if (m_scp_closing || m_scp_closed) {
+            ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
+            shot_ext->request_scc = 0;
+            shot_ext->request_scp = 0;
+            shot_ext->request_sensor = 0;
+        }
 
-    for(i = 0; i < m_camera_info.capture.buffers; i++){
-        initCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
-        m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
-        allocCameraMemory(m_camera_info.capture.ionClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
-    }
+        ALOGD("### Sensor Qbuf start(%d) SCP(%d) SCC(%d) DIS(%d)", index, shot_ext->request_scp, shot_ext->request_scc, shot_ext->dis_bypass);
 
-    cam_int_s_input(&(m_camera_info.capture), m_camera_info.sensor_id);
-    cam_int_s_fmt(&(m_camera_info.capture));
-    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
-    cam_int_reqbufs(&(m_camera_info.capture));
-    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
+        cam_int_qbuf(&(m_camera_info.sensor), index);
+        ALOGV("### Sensor QBUF done");
 
-    for (i = 0; i < m_camera_info.capture.buffers; i++) {
-        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
-        cam_int_qbuf(&(m_camera_info.capture), i);
+        if (!m_closing){
+            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
+        }
+        return;
     }
+    return;
+}
 
-    ALOGE("== stream_on :: m_camera_info.capture");
-    cam_int_streamon(&(m_camera_info.capture));
-
-    m_initFlag2 = true;
-    ALOGV("DEBUG(%s): END of IspThreadInitialize ", __FUNCTION__);
+void ExynosCameraHWInterface2::m_ispThreadInitialize(SignalDrivenThread * self)
+{
+    ALOGV("DEBUG(%s): ", __FUNCTION__ );
+    /* will add */
     return;
 }
 
 
 void ExynosCameraHWInterface2::m_ispThreadFunc(SignalDrivenThread * self)
 {
-    uint32_t        currentSignal = self->GetProcessingSignal();
-    IspThread *  selfThread      = ((IspThread*)self);
-    int index;
-    status_t res;
-    ALOGV("DEBUG(%s): m_ispThreadFunc (%x)", __FUNCTION__, currentSignal);
-
-    if (currentSignal & SIGNAL_THREAD_RELEASE) {
-        ALOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
-
-        ALOGV("(%s): calling capture streamoff", __FUNCTION__);
-        cam_int_streamoff(&(m_camera_info.capture));
-        ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
-
-        m_camera_info.capture.buffers = 0;
-        ALOGV("DEBUG(%s): capture calling reqbuf 0 ", __FUNCTION__);
-        cam_int_reqbufs(&(m_camera_info.capture));
-        ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
-
-        ALOGD("(%s): EXIT  processing SIGNAL_THREAD_RELEASE ", __FUNCTION__);
-        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
-        return;
-    }
-
-    if (currentSignal & SIGNAL_ISP_START_BAYER_INPUT)
-    {
-        struct camera2_shot_ext *shot_ext;
-        int bayerIndexToEnqueue = 0;
-        int processingFrameCnt = 0;
-
-           ALOGV("DEBUG(%s): IspThread processing SIGNAL_ISP_START_BAYER_INPUT", __FUNCTION__);
-
-        bayerIndexToEnqueue = m_BayerManager->GetIndexForIspEnqueue(&processingFrameCnt);
-        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[bayerIndexToEnqueue].virt.extP[1]);
-
-        ALOGV("### isp QBUF start bayerIndex[%d] for frameCnt(%d)", bayerIndexToEnqueue, processingFrameCnt);
-
-        if (processingFrameCnt != -1) {
-            ALOGV("### writing output stream info");
-            m_requestManager->UpdateIspParameters(shot_ext, processingFrameCnt);
-        }
-        else {
-            memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(struct camera2_shot_ext));
-        }
-        if (m_scp_flushing) {
-            shot_ext->request_scp = 1;
-        }
-        if (m_scp_closing || m_scp_closed) {
-            ALOGV("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
-            shot_ext->request_scc = 0;
-            shot_ext->request_scp = 0;
-            shot_ext->request_sensor = 0;
-        }
-        cam_int_qbuf(&(m_camera_info.isp), bayerIndexToEnqueue);
-        ALOGV("### isp QBUF done bayerIndex[%d] scp(%d)", bayerIndexToEnqueue, shot_ext->request_scp);
-        m_BayerManager->MarkIspEnqueue(bayerIndexToEnqueue);
-
-        if (m_BayerManager->GetNumOnHalFilled() != 0) {
-            // input has priority
-            selfThread->SetSignal(SIGNAL_ISP_START_BAYER_INPUT);
-            return;
-        }
-        else {
-            selfThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
-        }
-    }
-
-    if (currentSignal & SIGNAL_ISP_START_BAYER_DEQUEUE)
-    {
-        struct camera2_shot_ext *shot_ext;
-        int bayerIndexToDequeue = 0;
-        int processingFrameCnt = 0;
-           ALOGV("DEBUG(%s): IspThread processing SIGNAL_ISP_START_BAYER_DEQUEUE", __FUNCTION__);
-        bayerIndexToDequeue = m_BayerManager->GetIndexForIspDequeue(&processingFrameCnt);
-        m_ispProcessingFrameCnt = processingFrameCnt;
-        m_previewOutput = 0;
-        m_recordOutput = 0;
-        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[bayerIndexToDequeue].virt.extP[1]);
-        if (processingFrameCnt != -1 || m_scp_flushing) // bubble
-        {
-            if (shot_ext->request_scc) {
-                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
-            }
-            m_previewOutput = shot_ext->shot.ctl.request.outputStreams[0];
-            m_recordOutput = shot_ext->shot.ctl.request.outputStreams[2];
-            if (m_previewOutput || m_recordOutput) {
-                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
-            }            
-        }
-        ALOGV("### isp DQBUF start");
-        index = cam_int_dqbuf(&(m_camera_info.isp));
-        ALOGV("### isp DQBUF done bayerIndex(%d) for frameCnt(%d)", index, processingFrameCnt);
-        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);        
-        ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__, 
-           shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
-        if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
-            ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
-            m_scp_closed = true;
-        } 
-        else
-            m_scp_closed = false;
-        if (processingFrameCnt != -1) {
-            m_requestManager->ApplyDynamicMetadata(shot_ext, processingFrameCnt);
-        }
-        m_BayerManager->MarkIspDequeue(index);
-        if (m_BayerManager->GetNumOnIsp() != 0) {
-            selfThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
-        }
-    }
-
+     ALOGV("DEBUG(%s): ", __FUNCTION__ );
+    /* will add */
     return;
 }
 
@@ -2298,8 +2312,7 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
         int i, index = -1, cnt_to_dq = 0;
         status_t res;
         ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
-
-
+        ALOGD("(%s):(%d) SIGNAL_THREAD_RELEASE", __FUNCTION__, selfStreamParms->streamType);
 
         if (selfThread->m_isBufferInit) {
             for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
@@ -2321,8 +2334,9 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                 ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
                 selfThread->m_index, selfStreamParms->fd);
             }
-            selfThread->m_releasing = false;
         }
+            selfThread->m_releasing = false;
+        ALOGD("m_releasing set false");
         if (selfThread->m_index == 1 && m_resizeBuf.size.s != 0) {
             freeCameraMemory(&m_resizeBuf, 1);
         }
@@ -2340,8 +2354,17 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
         void *virtAddr[3];
         int i, j;
         int index;
+        nsecs_t timestamp;
+
         ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING",
             __FUNCTION__,selfThread->m_index);
+
+        if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT)
+        {
+            ALOGD("stream(%s) processing SIGNAL_STREAM_DATA_COMING",
+                __FUNCTION__,selfThread->m_index);
+        }
+
         if (!(selfThread->m_isBufferInit)) {
             for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
                 res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
@@ -2386,12 +2409,12 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
             }
             selfThread->m_isBufferInit = true;
         }
-        
+
         if (m_recordingEnabled && m_needsRecordBufferInit) {
             ALOGV("DEBUG(%s): Recording Buffer Initialization numsvcbuf(%d)",
                 __FUNCTION__, selfRecordParms->numSvcBuffers);
-            int checkingIndex = 0;            
-            bool found = false;            
+            int checkingIndex = 0;
+            bool found = false;
             for ( i=0 ; i < selfRecordParms->numSvcBuffers; i++) {
                 res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
                 if (res != NO_ERROR || buf == NULL) {
@@ -2407,10 +2430,9 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                        selfRecordParms->outputWidth, selfRecordParms->outputHeight, virtAddr) != 0) {
                     ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
                 }
-                else {  
+                else {
                       ALOGV("DEBUG(%s): [record] locked img buf plane0(%x) plane1(%x) plane2(%x)",
                         __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
-
                 }
                 found = false;
                 for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
@@ -2421,11 +2443,11 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                 }
                 ALOGV("DEBUG(%s): [record] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
                 if (!found) break;
-                index = checkingIndex;
 
+                index = checkingIndex;
 
                 if (index == -1) {
-                    ALOGD("ERR(%s): could not find buffer index", __FUNCTION__);
+                    ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
                 }
                 else {
                     ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
@@ -2457,7 +2479,7 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
 
 
                 if (selfStreamParms->svcBufStatus[index] !=  ON_DRIVER)
-                    ALOGD("DBG(%s): DQed buffer status abnormal (%d) ",
+                    ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
                            __FUNCTION__, selfStreamParms->svcBufStatus[index]);
                 selfStreamParms->svcBufStatus[index] = ON_HAL;
 
@@ -2522,7 +2544,7 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                     }
 
                     res = selfRecordParms->streamOps->enqueue_buffer(selfRecordParms->streamOps,
-                            m_requestManager->GetTimestamp(m_ispProcessingFrameCnt),
+                            systemTime(),
                             &(selfRecordParms->svcBufHandle[selfRecordParms->svcBufIndex]));
                     ALOGV("DEBUG(%s): stream(%d) record enqueue_buffer to svc done res(%d)", __FUNCTION__,
                         selfThread->m_index, res);
@@ -2530,13 +2552,14 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                         selfRecordParms->svcBufStatus[selfRecordParms->svcBufIndex] = ON_SERVICE;
                         selfRecordParms->numSvcBufsInHal--;
                     }
-
-                    m_requestManager->NotifyStreamOutput(m_ispProcessingFrameCnt, 2);
-
                 }
-                if (m_previewOutput) {
+                if (m_previewOutput && m_requestManager->GetSkipCnt() <= 0) {
+
+                    ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
                     res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
-                            m_requestManager->GetTimestamp(m_ispProcessingFrameCnt), &(selfStreamParms->svcBufHandle[index]));
+                            m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()),
+                            &(selfStreamParms->svcBufHandle[index]));
+
                     ALOGV("DEBUG(%s): stream(%d) enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
                 }
                 else {
@@ -2551,7 +2574,6 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                 else {
                     selfStreamParms->svcBufStatus[index] = ON_HAL;
                 }
-                m_requestManager->NotifyStreamOutput(m_ispProcessingFrameCnt, selfThread->m_index);
             }
             else if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT) {
                 ExynosRect jpegRect;
@@ -2563,10 +2585,10 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                 ExynosBuffer resizeBufInfo;
                 ExynosRect   m_orgPictureRect;
 
-                ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
+                ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
                     selfThread->m_index, selfStreamParms->streamType);
                 index = cam_int_dqbuf(&(selfStreamParms->node));
-                ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
+                ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
                     selfThread->m_index, selfStreamParms->streamType, index);
 
                 m_jpegEncodingFrameCnt = m_ispProcessingFrameCnt;
@@ -2600,7 +2622,7 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                                    m_orgPictureRect.w, m_orgPictureRect.h,
                                    &cropX, &cropY,
                                    &cropW, &cropH,
-                                   0); 
+                                   0);
 
                     ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
                           __FUNCTION__, cropX, cropY, cropW, cropH);
@@ -2649,7 +2671,6 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                 jpegRect.h = m_orgPictureRect.h;
                 jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
 
-                m_requestManager->NotifyStreamOutput(m_jpegEncodingFrameCnt, selfThread->m_index);
                 if (yuv2Jpeg(&m_resizeBuf, &selfStreamParms->svcBuffers[selfStreamParms->svcBufIndex], &jpegRect) == false)
                     ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
                 cam_int_qbuf(&(selfStreamParms->node), index);
@@ -2690,8 +2711,8 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                 selfRecordParms->numSvcBufsInHal ++;
                 ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
                    selfRecordParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
+
                 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
-                
                 bool found = false;
                 int checkingIndex = 0;
                 for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
@@ -2701,14 +2722,18 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                     }
                 }
                 ALOGV("DEBUG(%s): recording dequeueed_buffer found index(%d)", __FUNCTION__, found);
-                if (!found) break;
+
+                if (!found) {
+                     break;
+                }
+
                 index = checkingIndex;
                 if (selfRecordParms->svcBufStatus[index] == ON_SERVICE) {
                     selfRecordParms->svcBufStatus[index] = ON_HAL;
                 }
                 else {
                     ALOGV("DEBUG(%s): record bufstatus abnormal [%d]  status = %d", __FUNCTION__,
-                        index,  selfRecordParms->svcBufStatus[index]);    
+                        index,  selfRecordParms->svcBufStatus[index]);
                 }
             } while (0);
         }
@@ -2750,8 +2775,8 @@ void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
                     v4l2_buf.length     = currentNode->planes;
 
                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
-                       v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
-                       v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
+                    v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
+                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
                     for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
                         v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
                         ALOGV("DEBUG(%s): plane(%d): fd(%d)  length(%d)",
@@ -2840,35 +2865,9 @@ bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
         ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
         goto jpeg_encode_done;
     }
-#if 0
-    if (m_curCameraInfo->thumbnailW != 0 && m_curCameraInfo->thumbnailH != 0) {
-        int thumbW = 0, thumbH = 0;
-        mExifInfo.enableThumb = true;
-        if (rect->w < 320 || rect->h < 240) {
-            thumbW = 160;
-            thumbH = 120;
-        } else {
-            thumbW = m_curCameraInfo->thumbnailW;
-            thumbH = m_curCameraInfo->thumbnailH;
-        }
-        if (jpegEnc.setThumbnailSize(thumbW, thumbH)) {
-            LOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, thumbW, thumbH);
-            goto jpeg_encode_done;
-        }
 
-        if (0 < m_jpegThumbnailQuality && m_jpegThumbnailQuality <= 100) {
-            if (jpegEnc.setThumbnailQuality(m_jpegThumbnailQuality)) {
-                LOGE("ERR(%s):jpegEnc.setThumbnailQuality(%d) fail", __FUNCTION__, m_jpegThumbnailQuality);
-                goto jpeg_encode_done;
-            }
-        }
+    mExifInfo.enableThumb = false;
 
-        m_setExifChangedAttribute(&mExifInfo, rect);
-    } else
-#endif
-    {
-        mExifInfo.enableThumb = false;
-    }
     ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
     if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), (int *)yuvSize)) {
         ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
@@ -2904,40 +2903,40 @@ jpeg_encode_done:
 
 ExynosCameraHWInterface2::MainThread::~MainThread()
 {
-    ALOGD("(%s):", __FUNCTION__);
+    ALOGV("(%s):", __FUNCTION__);
 }
 
 void ExynosCameraHWInterface2::MainThread::release()
 {
-    ALOGD("(%s):", __func__);
+    ALOGV("(%s):", __func__);
     SetSignal(SIGNAL_THREAD_RELEASE);
 }
 
 ExynosCameraHWInterface2::SensorThread::~SensorThread()
 {
-    ALOGD("(%s):", __FUNCTION__);
+    ALOGV("(%s):", __FUNCTION__);
 }
 
 void ExynosCameraHWInterface2::SensorThread::release()
 {
-    ALOGD("(%s):", __func__);
+    ALOGV("(%s):", __func__);
     SetSignal(SIGNAL_THREAD_RELEASE);
 }
 
 ExynosCameraHWInterface2::IspThread::~IspThread()
 {
-    ALOGD("(%s):", __FUNCTION__);
+    ALOGV("(%s):", __FUNCTION__);
 }
 
 void ExynosCameraHWInterface2::IspThread::release()
 {
-    ALOGD("(%s):", __func__);
+    ALOGV("(%s):", __func__);
     SetSignal(SIGNAL_THREAD_RELEASE);
 }
 
 ExynosCameraHWInterface2::StreamThread::~StreamThread()
 {
-    ALOGD("(%s):", __FUNCTION__);
+    ALOGV("(%s):", __FUNCTION__);
 }
 
 void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
@@ -3081,18 +3080,18 @@ ExynosCamera2 * g_camera2[2] = { NULL, NULL };
 
 static int HAL2_camera_device_close(struct hw_device_t* device)
 {
-    ALOGD("%s: ENTER", __FUNCTION__);
+    ALOGV("%s: ENTER", __FUNCTION__);
     if (device) {
 
         camera2_device_t *cam_device = (camera2_device_t *)device;
-        ALOGD("cam_device(0x%08x):", (unsigned int)cam_device);
-        ALOGD("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
+        ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
+        ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
         delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
-        g_cam2_device = NULL;        
+        g_cam2_device = NULL;
         free(cam_device);
         g_camera_vaild = false;
     }
-    ALOGD("%s: EXIT", __FUNCTION__);
+    ALOGV("%s: EXIT", __FUNCTION__);
     return 0;
 }
 
@@ -3172,7 +3171,7 @@ static int HAL2_device_release_stream(
         const struct camera2_device *dev,
             uint32_t stream_id)
 {
-    ALOGD("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
+    ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
     if (!g_camera_vaild)
         return 0;
     return obj(dev)->releaseStream(stream_id);
@@ -3245,9 +3244,9 @@ static int HAL2_getNumberOfCameras()
 
 static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
 {
-    ALOGD("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
+    ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
     static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
-    
+
     status_t res;
 
     if (cameraId == 0) {
@@ -3315,13 +3314,13 @@ static int HAL2_camera_device_open(const struct hw_module_t* module,
     int cameraId = atoi(id);
 
     g_camera_vaild = false;
-    ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
+    ALOGV("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
     if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
         ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
         return -EINVAL;
     }
 
-    ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
+    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
     if (g_cam2_device) {
         if (obj(g_cam2_device)->getCameraId() == cameraId) {
             ALOGV("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
@@ -3330,14 +3329,11 @@ static int HAL2_camera_device_open(const struct hw_module_t* module,
 
             while (g_cam2_device)
                 usleep(10000);
-            /*ALOGE("ERR(%s):Cannot open camera %d. camera %d is already running!",
-                    __FUNCTION__, cameraId, obj(g_cam2_device)->getCameraId());
-            return -ENOSYS;*/
         }
     }
 
     g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
-    ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
+    ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
 
     if (!g_cam2_device)
         return -ENOMEM;
index 68aa50e354846d71c19c58254614100add5dc736..c09f0ea559f03aa821c43b70a530fcfc55d75e0c 100644 (file)
@@ -58,9 +58,8 @@ namespace android {
 #define NODE_PREFIX     "/dev/video"
 
 #define NUM_MAX_STREAM_THREAD       (5)
-#define NUM_MAX_DEQUEUED_REQUEST    (6)
-/* #define NUM_MAX_REQUEST_MGR_ENTRY   NUM_MAX_DEQUEUED_REQUEST */
 #define NUM_MAX_REQUEST_MGR_ENTRY   (10)
+#define NUM_MAX_DEQUEUED_REQUEST NUM_MAX_REQUEST_MGR_ENTRY
 #define MAX_CAMERA_MEMORY_PLANE_NUM    (4)
 #define NUM_MAX_CAMERA_BUFFERS      (16)
 #define NUM_BAYER_BUFFERS           (8)
@@ -108,7 +107,6 @@ typedef struct node_info {
     enum v4l2_buf_type type;
     ion_client ionClient;
     ExynosBuffer buffer[NUM_MAX_CAMERA_BUFFERS];
-    bool streamOn;
 } node_info_t;
 
 
@@ -153,7 +151,7 @@ public:
     int   MarkProcessingRequest(ExynosBuffer *buf);
     void      NotifyStreamOutput(int frameCnt, int stream_id);
     void    DumpInfoWithIndex(int index);
-    void    ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext, int frameCnt);
+    void    ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext);
     void    CheckCompleted(int index);
     void    UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt);
     void    RegisterTimestamp(int frameCnt, nsecs_t *frameTime);
@@ -164,6 +162,9 @@ public:
     int     GetNextIndex(int index);
     void    SetDefaultParameters(int cropX);
     void    SetInitialSkip(int count);
+    int     GetSkipCnt();
+    void    SetFrameIndex(int index);
+    int    GetFrameIndex();
 private:
 
     MetadataConverter               *m_metadataConverter;
@@ -183,7 +184,7 @@ private:
 
     int                             m_sensorPipelineSkipCnt;
     int                             m_cropX;
-
+    int                         m_frameIndex;
 };
 
 
@@ -443,6 +444,8 @@ class MainThread : public SignalDrivenThread {
     bool            yuv2Jpeg(ExynosBuffer *yuvBuf,
                             ExynosBuffer *jpegBuf,
                             ExynosRect *rect);
+    void            InitializeISPChain();
+    void            StartISP();
     exif_attribute_t    mExifInfo;
     void               *m_exynosPictureCSC;
     void               *m_exynosVideoCSC;
@@ -467,8 +470,9 @@ class MainThread : public SignalDrivenThread {
 
     bool                                m_isSensorThreadOn;
     bool                                m_isSensorStarted;
+    bool                                m_isIspStarted;
 
-
+    int                                 m_need_streamoff;
 
     bool                                m_initFlag1;
     bool                                m_initFlag2;
@@ -488,7 +492,9 @@ class MainThread : public SignalDrivenThread {
     int                                            m_cameraId;
     bool                                m_scp_closing;
     bool                                m_scp_closed;
-    
+
+    mutable Mutex    m_qbufLock;
+
 };
 
 }; // namespace android
index 8da90300617db04a93c28cea427dd64df40962fd..42bdf6f7f950d63dc504991857c4b1a4ab7e2878 100644 (file)
@@ -27,7 +27,7 @@
  *   Initial Release
  */
 
-//#define LOG_NDEBUG 1
+//#define LOG_NDEBUG 0
 #define LOG_TAG "MetadataConverter"
 #include <utils/Log.h>
 
@@ -360,17 +360,6 @@ status_t MetadataConverter::ToInternalShot(camera_metadata_t * request, struct c
                 dst->ctl.aa.mode = (enum aa_mode)curr_entry.data.u8[0];
                 break;
 
-            case ANDROID_CONTROL_EFFECT_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.aa.effectMode = (enum aa_effect_mode)(curr_entry.data.u8[0] + 1);
-                break;
-
-            case ANDROID_CONTROL_SCENE_MODE:
-                if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
-                    break;
-                dst->ctl.aa.sceneMode = (enum aa_scene_mode)(curr_entry.data.u8[0] + 1);
-                break;
 
             case ANDROID_CONTROL_VIDEO_STABILIZATION_MODE:
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
@@ -455,10 +444,12 @@ status_t MetadataConverter::ToInternalShot(camera_metadata_t * request, struct c
             case ANDROID_REQUEST_OUTPUT_STREAMS:
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE))
                     break;
+
                 for (i=0 ; i<curr_entry.count ; i++) {
                     dst->ctl.request.outputStreams[i] = curr_entry.data.u8[i];
                     ALOGV("DEBUG(%s): OUTPUT_STREAM[%d] = %d ",  __FUNCTION__, i, (int)(dst->ctl.request.outputStreams[i]));
                 }
+
                 dst->ctl.request.outputStreams[15] = curr_entry.count;
                 break;
 
@@ -470,7 +461,7 @@ status_t MetadataConverter::ToInternalShot(camera_metadata_t * request, struct c
                 break;
 
             default:
-                ALOGD("DEBUG(%s):Bad Metadata tag (%d)",  __FUNCTION__, curr_entry.tag);
+                ALOGV("DEBUG(%s):Bad Metadata tag (%d)",  __FUNCTION__, curr_entry.tag);
                 break;
             }
         }
@@ -533,20 +524,15 @@ status_t MetadataConverter::ToDynamicMetadata(struct camera2_shot_ext * metadata
                 &byteData, 1))
         return NO_MEMORY;
 
-    byteData = metadata->ctl.aa.effectMode - 1;
-    if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_EFFECT_MODE,
-                &byteData, 1))
-        return NO_MEMORY;
-
     intData = metadata->ctl.aa.aeExpCompensation;
     if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_AE_EXP_COMPENSATION,
                 &intData, 1))
         return NO_MEMORY;
 
 
-    ALOGV("(%s): AWB(%d) AE(%d) SCENE(%d) EFFECT(%d) AEComp(%d)", __FUNCTION__,
-      metadata->dm.aa.awbMode - 1, metadata->dm.aa.aeMode - 1, metadata->ctl.aa.sceneMode - 1,
-      metadata->ctl.aa.effectMode - 1, metadata->ctl.aa.aeExpCompensation );
+    ALOGV("(%s): AWB(%d) AE(%d) SCENE(%d)  AEComp(%d)", __FUNCTION__,
+       metadata->dm.aa.awbMode - 1, metadata->dm.aa.aeMode - 1, metadata->ctl.aa.sceneMode - 1,
+       metadata->ctl.aa.aeExpCompensation );
 
 
     if (metadata->ctl.request.metadataMode == METADATA_MODE_NONE) {
index d9dbe4863f67d72ab8c326285541ac1a63465778..811341ff2b6e49922d37ada1a52a9c9ca5b6895c 100644 (file)
@@ -15,6 +15,7 @@
 /*2012.04.23 Version 0.2 Added static metadata (draft)*/
 /*2012.07.04 Version 0.3 Applied google's undocumented changes (draft)*/
 /*2012.07.11 Version 0.4 Added FD parameters */
+/*2012.07.27 Version 0.5 Modified HSB control and DM */
 
 
 #ifndef FIMC_IS_METADATA_H_
@@ -67,7 +68,7 @@ enum lens_facing {
 };
 
 struct camera2_lens_ctl {
-       float                                   focusDistance;
+       uint32_t                                focusDistance;
        float                                   aperture;
        float                                   focalLength;
        float                                   filterDensity;
@@ -76,7 +77,7 @@ struct camera2_lens_ctl {
 };
 
 struct camera2_lens_dm {
-       float                                   focusDistance;
+       uint32_t                                focusDistance;
        float                                   aperture;
        float                                   focalLength;
        float                                   filterDensity;
@@ -182,7 +183,8 @@ struct camera2_sensor_sm {
 enum flash_mode {
        CAM2_FLASH_MODE_OFF = 1,
        CAM2_FLASH_MODE_SINGLE,
-       CAM2_FLASH_MODE_TORCH
+       CAM2_FLASH_MODE_TORCH,
+       CAM2_FLASH_MODE_BEST
 };
 
 struct camera2_flash_ctl {
@@ -193,10 +195,12 @@ struct camera2_flash_ctl {
 
 struct camera2_flash_dm {
        enum flash_mode         flashMode;
-       uint8_t                 firingPower;
        /*10 is max power*/
-       uint64_t                firingTime;
+       uint8_t                 firingPower;
        /*unit : microseconds*/
+       uint64_t                firingTime;
+       /*1 : stable, 0 : unstable*/
+       uint32_t                firingStable;
 };
 
 struct camera2_flash_sm {
@@ -279,20 +283,41 @@ struct camera2_geometric_dm {
 enum colorcorrection_mode {
        COLORCORRECTION_MODE_FAST = 1,
        COLORCORRECTION_MODE_HIGH_QUALITY,
-       COLORCORRECTION_MODE_TRANSFORM_MATRIX
+       COLORCORRECTION_MODE_TRANSFORM_MATRIX,
+       COLORCORRECTION_MODE_EFFECT_MONO,
+       COLORCORRECTION_MODE_EFFECT_NEGATIVE,
+       COLORCORRECTION_MODE_EFFECT_SOLARIZE,
+       COLORCORRECTION_MODE_EFFECT_SEPIA,
+       COLORCORRECTION_MODE_EFFECT_POSTERIZE,
+       COLORCORRECTION_MODE_EFFECT_WHITEBOARD,
+       COLORCORRECTION_MODE_EFFECT_BLACKBOARD,
+       COLORCORRECTION_MODE_EFFECT_AQUA
 };
 
 
 struct camera2_colorcorrection_ctl {
        enum colorcorrection_mode       mode;
        float                           transform[9];
+       uint32_t                        hue;
+       uint32_t                        saturation;
+       uint32_t                        brightness;
 };
 
 struct camera2_colorcorrection_dm {
        enum colorcorrection_mode       mode;
        float                           transform[9];
+       uint32_t                        hue;
+       uint32_t                        saturation;
+       uint32_t                        brightness;
 };
 
+struct camera2_colorcorrection_sm {
+       /*assuming 10 supported modes*/
+       uint8_t                 availableModes[CAMERA2_MAX_AVAILABLE_MODE];
+       uint32_t                hueRange[2];
+       uint32_t                saturationRange[2];
+       uint32_t                brightnessRange[2];
+};
 
 
 /* android.tonemap */
@@ -455,7 +480,8 @@ enum aa_capture_intent {
 };
 
 enum aa_mode {
-       AA_CONTROL_OFF = 1,
+       AA_CONTROL_NONE = 0,
+       AA_CONTROL_OFF,
        AA_CONTROL_AUTO,
        AA_CONTROL_USE_SCENE_MODE
 };
@@ -501,6 +527,17 @@ enum aa_aemode {
        AA_AEMODE_ON_AUTO_FLASH_REDEYE
 };
 
+enum aa_ae_flashmode {
+       /*all flash control stop*/
+       AA_FLASHMODE_OFF = 1,
+       /*internal 3A can control flash*/
+       AA_FLASHMODE_ON,
+       /*internal 3A can do auto flash algorithm*/
+       AA_FLASHMODE_AUTO,
+       /*internal 3A can fire flash by auto result*/
+       AA_FLASHMODE_CAPTURE
+};
+
 enum aa_ae_antibanding_mode {
        AA_AE_ANTIBANDING_OFF = 1,
        AA_AE_ANTIBANDING_50HZ,
@@ -522,6 +559,7 @@ enum aa_awbmode {
 };
 
 enum aa_afmode {
+       NO_CHANGE = 0,
        AA_AFMODE_OFF = 1,
        AA_AFMODE_AUTO,
        AA_AFMODE_MACRO,
@@ -538,10 +576,14 @@ enum aa_afstate {
        AA_AFSTATE_AF_FAILED_FOCUS
 };
 
+enum aa_isomode {
+       AA_ISOMODE_AUTO = 1,
+       AA_ISOMODE_MANUAL,
+};
+
 struct camera2_aa_ctl {
        enum aa_capture_intent          captureIntent;
        enum aa_mode                    mode;
-       enum aa_effect_mode             effectMode;
        enum aa_scene_mode              sceneMode;
        uint8_t                         videoStabilizationMode;
        enum aa_aemode                  aeMode;
@@ -550,13 +592,16 @@ struct camera2_aa_ctl {
        int32_t                         aeExpCompensation;
        uint32_t                        aeTargetFpsRange[2];
        enum aa_ae_antibanding_mode     aeAntibandingMode;
+       enum aa_ae_flashmode            aeflashMode;
        enum aa_awbmode                 awbMode;
        uint32_t                        awbRegions[5];
        /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/
        enum aa_afmode                  afMode;
        uint32_t                        afRegions[5];
        /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/
-       uint8_t                         afState;
+       uint8_t                         afTrigger;
+       enum aa_isomode                 isoMode;
+       uint32_t                        isoValue;
 
 };
 
@@ -570,6 +615,7 @@ struct camera2_aa_dm {
        uint32_t                                aeRegions[5];
        /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/
        uint8_t                                 aeState;
+       enum aa_ae_flashmode                    aeflashMode;
        /*needs check*/
        enum aa_awbmode                         awbMode;
        uint32_t                                awbRegions[5];
@@ -579,6 +625,8 @@ struct camera2_aa_dm {
        uint32_t                                afRegions[5];
        /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region*/
        enum aa_afstate                         afState;
+       enum aa_isomode                         isoMode;
+       uint32_t                                isoValue;
 };
 
 struct camera2_aa_sm {
@@ -598,6 +646,7 @@ struct camera2_aa_sm {
        /*assuming # of afAvailableModes = 4*/
        uint8_t availableVideoStabilizationModes[4];
        /*assuming # of availableVideoStabilizationModes = 4*/
+       uint32_t        isoRange[2];
 };
 
 struct camera2_lens_usm {
@@ -661,6 +710,7 @@ struct camera2_sm {
        struct camera2_lens_sm                  lens;
        struct camera2_sensor_sm                sensor;
        struct camera2_flash_sm                 flash;
+       struct camera2_colorcorrection_sm       color;
        struct camera2_tonemap_sm               tonemap;
        struct camera2_scaler_sm                scaler;
        struct camera2_jpeg_sm                  jpeg;
@@ -734,8 +784,8 @@ struct camera2_uctl {
        Set sensor, lens, flash control for next frame.
        \remarks
        This flag can be combined.
-       [0 bit] sensor
-       [1 bit] lens
+       [0 bit] lens
+       [1 bit] sensor
        [2 bit] flash
        */
        uint32_t uUpdateBitMap;
@@ -772,6 +822,13 @@ struct camera2_shot {
        Structure for interfacing between HAL and driver.
 */
 struct camera2_shot_ext {
+       /**     \brief
+               setfile change
+               \remarks
+               [x] mode for setfile
+       */
+       uint32_t                setfile;
+
        /**     \brief
                stream control
                \remarks
@@ -797,8 +854,29 @@ struct camera2_shot_ext {
                [1] bypass on
        */
        uint32_t                dnr_bypass;
+
+       /**     \brief
+               post processing control(FD)
+               \remarks
+               [0] bypass off
+               [1] bypass on
+       */
+       uint32_t                fd_bypass;
+
+       /**     \brief
+               processing time debugging
+               \remarks
+               taken time(unit : struct timeval)
+               [0][x] flite start
+               [1][x] flite end
+               [2][x] DRV Shot
+               [3][x] DRV Shot done
+               [4][x] DRV Meta done
+       */
+       uint32_t                timeZone[10][2];
+
        struct camera2_shot     shot;
-    uint8_t            gpsProcessingMethod[32]; // HACK
+    uint8_t            gpsProcessingMethod[32];
 };
 
 #define CAM_LENS_CMD           (0x1 << 0x0)