hardware: samsung_slsi: libcamera2: Camera 2.0 2nd release
authorSungjoong Kang <sj3.kang@samsung.com>
Sat, 14 Jul 2012 01:20:39 +0000 (10:20 +0900)
committerAlex Ray <aray@google.com>
Mon, 30 Jul 2012 21:13:23 +0000 (14:13 -0700)
This version includes HAL implementation for supporting stillshot.
This version requires dma_buf enabled gscaler and jpeg encodere.
Fixed Color Inversion.

Change-Id: Id4386733d1f3e87ac6fa9b15bb3ed93b2f5f25a5
Signed-off-by: Sungjoong Kang <sj3.kang@samsung.com>
13 files changed:
libcamera2/Android.mk
libcamera2/ExynosCameraHWInterface2.cpp
libcamera2/ExynosCameraHWInterface2.h
libcamera2/ExynosExif.h [new file with mode: 0644]
libcamera2/ExynosJpegEncoderForCamera.cpp [new file with mode: 0644]
libcamera2/ExynosJpegEncoderForCamera.h [new file with mode: 0644]
libcamera2/MetadataConverter.cpp
libcamera2/MetadataConverter.h
libcamera2/SignalDrivenThread.cpp
libcamera2/SignalDrivenThread.h
libcamera2/StaticInformation_temp.cpp
libcamera2/camera2_internal_metadata.h
libcamera2/fimc-is-metadata.h

index d72fe389e997de6d15d06e094b20add2fb5f086d..7e6b9c8513555aec0e8f0b6f91575f846c10bd5b 100644 (file)
@@ -10,19 +10,13 @@ LOCAL_C_INCLUDES += \
        hardware/samsung_slsi/exynos5/include \
        frameworks/native/include \
        system/media/camera/include
-#      $(LOCAL_PATH)/../include \
-#      $(TOP)/system/media/camera/include \
-#      $(TOP)/system/media/camera/include/system \
-#      $(TOP)/device/samsung/$(TARGET_BOARD_PLATFORM)/include \
-#      $(TOP)/device/samsung/$(TARGET_BOARD_PLATFORM)/libcamera \
-#      $(TOP)/device/samsung/smdk_common/libexynosutils \
-#      $(TOP)/device/samsung/smdk_common/libcsc
 
 LOCAL_SRC_FILES:= \
        SignalDrivenThread.cpp \
        MetadataConverter.cpp \
        ExynosCameraHWInterface2.cpp \
-       StaticInformation_temp.cpp
+       StaticInformation_temp.cpp \
+       ExynosJpegEncoderForCamera.cpp
 
 
 LOCAL_SHARED_LIBRARIES:= libutils libcutils libbinder liblog libcamera_client libhardware
@@ -30,7 +24,6 @@ LOCAL_SHARED_LIBRARIES:= libutils libcutils libbinder liblog libcamera_client li
 LOCAL_CFLAGS += -DGAIA_FW_BETA
 
 LOCAL_SHARED_LIBRARIES += libexynosutils libhwjpeg libexynosv4l2 libcsc libion_exynos libcamera_metadata
-#LOCAL_SHARED_LIBRARIES += libexynosutils libhwjpeg libexynosv4l2 libcsc libion libexynoscamera libcamera_metadata
 
 LOCAL_MODULE := camera.exynos5
 
index ebd22cca4d8815f84a8b81d10de0747377114769..3f51918833840adceed489d566bedeb9b5d57ebd 100644 (file)
  * \file      ExynosCameraHWInterface2.cpp
  * \brief     source file for Android Camera API 2.0 HAL
  * \author    Sungjoong Kang(sj3.kang@samsung.com)
- * \date      2012/05/31
+ * \date      2012/07/10
  *
  * <b>Revision History: </b>
  * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
  *   Initial Release
+ *
+ * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
+ *   2nd Release
+ *
  */
 
 //#define LOG_NDEBUG 0
 namespace android {
 
 
+// temporarily copied from EmulatedFakeCamera2
+// TODO : implement our own codes
+status_t constructDefaultRequestInternal(
+        int request_template,
+        camera_metadata_t **request,
+        bool sizeRequest);
+
+status_t constructStaticInfo(
+        camera_metadata_t **info,
+        bool sizeRequest);
 
 int get_pixel_depth(uint32_t fmt)
 {
@@ -81,7 +95,7 @@ int get_pixel_depth(uint32_t fmt)
     }
 
     return depth;
-}   
+}
 
 int cam_int_s_fmt(node_info_t *node)
 {
@@ -100,14 +114,14 @@ int cam_int_s_fmt(node_info_t *node)
         v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
         v4l2_fmt.fmt.pix_mp.field       = V4L2_FIELD_ANY;
     } else {
-        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__func__);
+        ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
     }
 
     /* Set up for capture */
     ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
 
     if (ret < 0)
-        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__func__, ret); 
+        ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
 
     return ret;
 }
@@ -124,7 +138,7 @@ int cam_int_reqbufs(node_info_t *node)
     ret = exynos_v4l2_reqbufs(node->fd, &req);
 
     if (ret < 0)
-        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__func__,node->fd, ret);
+        ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
 
     return req.count;
 }
@@ -143,14 +157,14 @@ int cam_int_qbuf(node_info_t *node, int index)
     v4l2_buf.length     = node->planes;
 
     for(i = 0; i < node->planes; i++){
-        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].ionBuffer[i]);
-        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size[i]);
+        v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
+        v4l2_buf.m.planes[i].length  = (unsigned long)(node->buffer[index].size.extS[i]);
     }
 
     ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
 
     if (ret < 0)
-        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__func__, index, ret);
+        ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
 
     return ret;
 }
@@ -163,13 +177,27 @@ int cam_int_streamon(node_info_t *node)
     ret = exynos_v4l2_streamon(node->fd, type);
 
     if (ret < 0)
-        ALOGE("%s: VIDIOC_STREAMON failed (%d)",__func__, ret);
+        ALOGE("%s: VIDIOC_STREAMON failed (%d)",__FUNCTION__, ret);
 
     ALOGV("On streaming I/O... ... fd(%d)", node->fd);
 
     return ret;
 }
 
+int cam_int_streamoff(node_info_t *node)
+{
+       enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+       int ret;
+
+       ALOGV("Off streaming I/O... fd(%d)", node->fd);
+       ret = exynos_v4l2_streamoff(node->fd, type);
+
+    if (ret < 0)
+        ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
+
+       return ret;
+}
+
 int cam_int_dqbuf(node_info_t *node)
 {
     struct v4l2_buffer v4l2_buf;
@@ -183,7 +211,7 @@ int cam_int_dqbuf(node_info_t *node)
 
     ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
     if (ret < 0)
-        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__func__, ret);
+        ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
 
     return v4l2_buf.index;
 }
@@ -191,10 +219,10 @@ int cam_int_dqbuf(node_info_t *node)
 int cam_int_s_input(node_info_t *node, int index)
 {
     int ret;
-    
+
     ret = exynos_v4l2_s_input(node->fd, index);
     if (ret < 0)
-        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__func__, ret);
+        ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
 
     return ret;
 }
@@ -210,8 +238,11 @@ RequestManager::RequestManager(SignalDrivenThread* main_thread):
 {
     m_metadataConverter = new MetadataConverter;
     m_mainThread = main_thread;
-    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++)
-        entries[i].status = EMPTY;
+    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
+        //entries[i].status = EMPTY;
+        memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
+        entries[i].internal_shot.ctl.request.frameCount = -1;
+    }
     return;
 }
 
@@ -236,131 +267,334 @@ bool RequestManager::IsRequestQueueFull()
 
 void RequestManager::RegisterRequest(camera_metadata_t * new_request)
 {
-    ALOGV("DEBUG(%s):", __func__);
-    
+    ALOGV("DEBUG(%s):", __FUNCTION__);
+
     Mutex::Autolock lock(m_requestMutex);
-    
+
     request_manager_entry * newEntry = NULL;
     int newInsertionIndex = ++m_entryInsertionIndex;
     if (newInsertionIndex >= NUM_MAX_REQUEST_MGR_ENTRY)
         newInsertionIndex = 0;
-    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __func__,newInsertionIndex,m_numOfEntries );         
+    ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries );
+
 
-    
     newEntry = &(entries[newInsertionIndex]);
 
     if (newEntry->status!=EMPTY) {
-        ALOGE("ERROR(%s): Circular buffer abnormal ", __func__);
-        return;    
+        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
+        return;
     }
     newEntry->status = REGISTERED;
     newEntry->original_request = new_request;
     // TODO : allocate internal_request dynamically
-    m_metadataConverter->ToInternalCtl(new_request, &(newEntry->internal_request));
-    newEntry->output_stream_count = newEntry->internal_request.ctl.request.numOutputStream;
+    m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
+    newEntry->output_stream_count = newEntry->internal_shot.ctl.request.numOutputStream;
 
     m_numOfEntries++;
     m_entryInsertionIndex = newInsertionIndex;
 
-    
+
+        Dump();
     ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
-     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_request.ctl.request.frameCount);
-    
+     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.ctl.request.frameCount);
 }
 
 void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     Mutex::Autolock lock(m_requestMutex);
 
     request_manager_entry * currentEntry =  &(entries[m_entryFrameOutputIndex]);
-    
+
     if (currentEntry->status!=PROCESSING) {
-        ALOGE("ERROR(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __func__
+        ALOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
         , m_entryProcessingIndex, m_entryFrameOutputIndex,(int)(currentEntry->status));
-        return;    
+        return;
     }
-    *deregistered_request = currentEntry->original_request;
-    
+    if (deregistered_request)  *deregistered_request = currentEntry->original_request;
+
     currentEntry->status = EMPTY;
     currentEntry->original_request = NULL;
-    memset(&(currentEntry->internal_request), 0, sizeof(camera2_ctl_metadata_NEW_t));
+    memset(&(currentEntry->internal_shot), 0, sizeof(camera2_ctl_metadata_NEW_t));
+    currentEntry->internal_shot.ctl.request.frameCount = -1;
     currentEntry->output_stream_count = 0;
+    currentEntry->dynamic_meta_vaild = false;
     m_numOfEntries--;
+    Dump();
     ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
      m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
-    
+
     return;
-  
 }
 
-void RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size, 
+bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
                 camera_metadata_t ** prepared_frame)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     Mutex::Autolock lock(m_requestMutex);
     status_t res = NO_ERROR;
-    m_entryFrameOutputIndex++;
-    if (m_entryFrameOutputIndex >= NUM_MAX_REQUEST_MGR_ENTRY)
-        m_entryFrameOutputIndex = 0;
-    request_manager_entry * currentEntry =  &(entries[m_entryFrameOutputIndex]);
-    ALOGV("DEBUG(%s): processing(%d), frame(%d), insert(%d)", __func__, 
-        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex);
-    if (currentEntry->status!=PROCESSING) {
-        ALOGE("ERROR(%s): Circular buffer abnormal status(%d)", __func__, (int)(currentEntry->status));
-        return;    
+    int tempFrameOutputIndex = m_entryFrameOutputIndex + 1;
+    if (tempFrameOutputIndex >= NUM_MAX_REQUEST_MGR_ENTRY)
+        tempFrameOutputIndex = 0;
+    request_manager_entry * currentEntry =  &(entries[tempFrameOutputIndex]);
+    ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
+        m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
+
+    if (m_completedIndex != tempFrameOutputIndex) {
+        ALOGV("DEBUG(%s): frame left behind : completed(%d), preparing(%d)", __FUNCTION__, m_completedIndex,tempFrameOutputIndex);
+
+        request_manager_entry * currentEntry2 =  &(entries[tempFrameOutputIndex]);
+        currentEntry2->status = EMPTY;
+        currentEntry2->original_request = NULL;
+        memset(&(currentEntry2->internal_shot), 0, sizeof(camera2_ctl_metadata_NEW_t));
+        currentEntry2->internal_shot.ctl.request.frameCount = -1;
+        currentEntry2->output_stream_count = 0;
+        currentEntry2->dynamic_meta_vaild = false;
+        m_numOfEntries--;
+        Dump();
+        tempFrameOutputIndex = m_completedIndex;
+        currentEntry =  &(entries[tempFrameOutputIndex]);
+    }
+
+    if (currentEntry->output_stream_count!=0) {
+        ALOGD("DBG(%s): Circular buffer has remaining output : stream_count(%d)", __FUNCTION__, currentEntry->output_stream_count);
+        return false;
     }
 
+
+
+    if (currentEntry->status!=PROCESSING) {
+        ALOGD("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
+
+        return false;
+    }
+    m_entryFrameOutputIndex = tempFrameOutputIndex;
     m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 10, 500); //estimated
-    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_request),
+    res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
                 m_tempFrameMetadata);
     if (res!=NO_ERROR) {
-        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __func__, res);
-        return;    
+        ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
+        return false;
     }
     *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
     *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
     *prepared_frame = m_tempFrameMetadata;
-    ALOGV("## PrepareFrame DONE: frame(%d) frameCnt(%d)", m_entryFrameOutputIndex,\r
-        currentEntry->internal_request.ctl.request.frameCount);
-    
-    return;
+    ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d)", m_entryFrameOutputIndex,
+        currentEntry->internal_shot.ctl.request.frameCount);
+        Dump();
+    return true;
 }
 
-void RequestManager::MarkProcessingRequest(exynos_camera_memory_t* buf)
+int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     Mutex::Autolock lock(m_requestMutex);
-    camera2_shot_t * current_shot;
+    struct camera2_shot_ext * shot_ext;
+    int targetStreamIndex = 0;
+
+    // TODO : in the case of Request underrun, insert a bubble
+
+    if (m_numOfEntries == 0)  {
+        ALOGV("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
+        return -1;
+    }
+
+    if ((m_entryProcessingIndex == m_entryInsertionIndex)
+        && (entries[m_entryProcessingIndex].status == PROCESSING)) {
+        ALOGV("## MarkProcReq skipping(request underrun) -  num(%d), insert(%d), processing(%d), frame(%d)",
+         m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
+        return -1;
+    }
 
     request_manager_entry * newEntry = NULL;
     int newProcessingIndex = m_entryProcessingIndex + 1;
     if (newProcessingIndex >= NUM_MAX_REQUEST_MGR_ENTRY)
         newProcessingIndex = 0;
 
-    
     newEntry = &(entries[newProcessingIndex]);
 
     if (newEntry->status!=REGISTERED) {
-        ALOGE("ERROR(%s): Circular buffer abnormal ", __func__);
-        return;    
+        ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
+        Dump();
+        return -1;
     }
     newEntry->status = PROCESSING;
+    // TODO : replace the codes below with a single memcpy of pre-converted 'shot'
 
-    
-    m_entryProcessingIndex = newProcessingIndex; 
-    ALOGV("## MarkProcReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
-     m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
-    
-        
+    shot_ext = (struct camera2_shot_ext *)(buf->virt.extP[1]);
+    memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
+
+    shot_ext->request_sensor = 1;
+    for (int i = 0; i < newEntry->output_stream_count; i++) {
+        // TODO : match with actual stream index;
+        targetStreamIndex = newEntry->internal_shot.ctl.request.outputStreams[i];
+
+        if (targetStreamIndex==0) {
+            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
+            shot_ext->request_scp = 1;
+        }
+        else if (targetStreamIndex==1) {
+            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
+            shot_ext->request_scc = 1;
+        }
+        else {
+            ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
+        }
+    }
+       shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
+       shot_ext->shot.magicNumber = 0x23456789;
+       shot_ext->shot.ctl.sensor.exposureTime = 0;
+       shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
+       shot_ext->shot.ctl.sensor.sensitivity = 0;
+
+    shot_ext->shot.ctl.scaler.cropRegion[0] = 0;
+    shot_ext->shot.ctl.scaler.cropRegion[1] = 0;
+    shot_ext->shot.ctl.scaler.cropRegion[2] = 1920;
+
+    // HACK : use id field for identifier
+    shot_ext->shot.ctl.request.id = newEntry->internal_shot.ctl.request.frameCount;
+
+    //newEntry->request_serial_number = m_request_serial_number;
+
+    //m_request_serial_number++;
+
+    m_entryProcessingIndex = newProcessingIndex;
+
+    Dump();
+    ALOGV("## MarkProcReq DONE totalentry(%d), insert(%d), processing(%d), frame(%d) frameCnt(%d)",
+    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.ctl.request.frameCount);
+
+    return m_entryProcessingIndex;
 }
 
-void RequestManager::NotifyStreamOutput(uint32_t stream_id)
+void RequestManager::NotifyStreamOutput(int index, int stream_id)
 {
-    ALOGV("DEBUG(%s):", __func__);
-    m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
+    ALOGV("DEBUG(%s): reqIndex(%d), stream_id(%d)", __FUNCTION__, index, stream_id);
+    if (index < 0) return;
+    entries[index].output_stream_count--;  //TODO : match stream id also
+    CheckCompleted(index);
+
+    return;
+}
+
+void RequestManager::CheckCompleted(int index)
+{
+    ALOGV("DEBUG(%s): reqIndex(%d)", __FUNCTION__, index);
+    if (entries[index].output_stream_count==0 && entries[index].dynamic_meta_vaild) {
+        ALOGV("DEBUG(%s): index[%d] completed and sending SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__, index);
+        Dump();
+        m_completedIndex = index;
+        m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
+    }
     return;
 }
+/*
+int RequestManager::FindEntryIndexByRequestSerialNumber(int serial_num)
+{
+    for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
+        if (entries[i].internal_shot.ctl.request.frameCount == serial_num) {
+            if (entries[i].status == PROCESSING) {
+                return i;
+            }
+            else {
+                ALOGD("DBG(%s): abnormal entry[%d] status(%d)", __FUNCTION__, i, entries[i].status);
+
+            }
+        }
+    }
+    return -1;
+}
+*/
+void RequestManager::ApplyDynamicMetadata(int index)
+{
+    ALOGV("DEBUG(%s): reqIndex(%d)", __FUNCTION__, index);
+    entries[index].dynamic_meta_vaild = true;
+
+    // TODO : move some code of PrepareFrame here
+
+    CheckCompleted(index);
+}
+
+void RequestManager::DumpInfoWithIndex(int index)
+{
+    camera2_ctl_metadata_NEW_t * currMetadata = &(entries[index].internal_shot);
+
+    ALOGV("####   frameCount(%d) exposureTime(%lld) ISO(%d)",
+        currMetadata->ctl.request.frameCount,
+        currMetadata->ctl.sensor.exposureTime,
+        currMetadata->ctl.sensor.sensitivity);
+    if (currMetadata->ctl.request.numOutputStream==0)
+        ALOGV("####   No output stream selected");
+    else if (currMetadata->ctl.request.numOutputStream==1)
+        ALOGV("####   OutputStreamId : %d", currMetadata->ctl.request.outputStreams[0]);
+    else if (currMetadata->ctl.request.numOutputStream==2)
+        ALOGV("####   OutputStreamId : %d, %d", currMetadata->ctl.request.outputStreams[0],
+            currMetadata->ctl.request.outputStreams[1]);
+    else
+        ALOGV("####   OutputStream num (%d) abnormal ", currMetadata->ctl.request.numOutputStream);
+}
+
+void    RequestManager::UpdateOutputStreamInfo(struct camera2_shot_ext *shot_ext, int index)
+{
+    ALOGV("DEBUG(%s): updating info with reqIndex(%d)", __FUNCTION__, index);
+    if (index<0)
+        return;
+    int targetStreamIndex = 0;
+    request_manager_entry * newEntry = &(entries[index]);
+    shot_ext->request_sensor = 1;
+    shot_ext->request_scc = 0;
+    shot_ext->request_scp = 0;
+    for (int i = 0; i < newEntry->output_stream_count; i++) {
+        // TODO : match with actual stream index;
+        targetStreamIndex = newEntry->internal_shot.ctl.request.outputStreams[i];
+
+        if (targetStreamIndex==0) {
+            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
+            shot_ext->request_scp = 1;
+        }
+        else if (targetStreamIndex==1) {
+            ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
+            shot_ext->request_scc = 1;
+        }
+        else {
+            ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
+        }
+    }
+}
+
+void    RequestManager::RegisterTimestamp(int index, nsecs_t * frameTime)
+{
+    ALOGD("DEBUG(%s): updating timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, *frameTime);
+    request_manager_entry * currentEntry = &(entries[index]);
+    currentEntry->internal_shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
+    ALOGD("DEBUG(%s): applied timestamp for reqIndex(%d) (%lld)", __FUNCTION__,
+        index, currentEntry->internal_shot.dm.sensor.timeStamp);
+}
+
+uint64_t  RequestManager::GetTimestamp(int index)
+{
+    request_manager_entry * currentEntry = &(entries[index]);
+    uint64_t frameTime = currentEntry->internal_shot.dm.sensor.timeStamp;
+    ALOGD("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
+    return frameTime;
+}
+
+
+void RequestManager::Dump(void)
+{
+//    ALOGV("DEBUG(%s): updating timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, *frameTime);
+    int i = 0;
+    request_manager_entry * currentEntry;
+    ALOGV("## Dump  totalentry(%d), insert(%d), processing(%d), frame(%d)",
+    m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
+
+    for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
+        currentEntry =  &(entries[i]);
+        ALOGV("[%2d] status[%d] frameCnt[%3d] numOutput[%d]", i,
+        currentEntry->status, currentEntry->internal_shot.ctl.request.frameCount,
+            currentEntry->output_stream_count);
+    }
+}
 
 ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev):
             m_requestQueueOps(NULL),
@@ -368,105 +602,190 @@ ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_
             m_callbackCookie(NULL),
             m_numOfRemainingReqInSvc(0),
             m_isRequestQueuePending(false),
+            m_isRequestQueueNull(true),
             m_isSensorThreadOn(false),
-            m_isStreamStarted(false),
-            m_isBufferInit(false),
-            m_halDevice(dev),
-            m_ionCameraClient(0)
-{
-    ALOGV("DEBUG(%s):", __func__);
+            m_isSensorStarted(false),
+            m_ionCameraClient(0),
+            m_initFlag1(false),
+            m_initFlag2(false),
+            m_numExpRemainingOutScp(0),
+            m_numExpRemainingOutScc(0),
+            m_numBayerQueueList(0),
+            m_numBayerDequeueList(0),
+            m_numBayerQueueListRemainder(0),
+            m_scp_flushing(false),
+            m_closing(false),
+            m_ispInputIndex(-2),
+            m_lastTimeStamp(0),
+            m_halDevice(dev)
+{
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     int ret = 0;
 
+    for (int i=0 ; i < NUM_BAYER_BUFFERS ; i++) {
+        m_bayerBufStatus[i] = 0;
+        m_bayerDequeueList[i] = -1;
+    }
+    for (int i=0 ; i < NUM_BAYER_BUFFERS+SHOT_FRAME_DELAY ; i++) {
+        m_bayerQueueList[i] = -1;
+        m_bayerQueueRequestList[i] = -1;
+    }
+    m_exynosPictureCSC = NULL;
+
     if (!m_grallocHal) {
         ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
         if (ret)
-            ALOGE("ERR(%s):Fail on loading gralloc HAL", __func__);
-    }  
+            ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
+    }
 
     m_ionCameraClient = createIonClient(m_ionCameraClient);
     if(m_ionCameraClient == 0)
-        ALOGE("ERR(%s):Fail onion_client_create", __func__);
+        ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
 
     m_mainThread    = new MainThread(this);
     m_sensorThread  = new SensorThread(this);
-    usleep(200000);
+    m_ispThread     = new IspThread(this);
+    m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
+    ALOGV("DEBUG(%s): created sensorthread ################", __FUNCTION__);
+    usleep(1600000);
+
+    ALOGV("DEBUG(%s): sleep end            ################", __FUNCTION__);
     m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
-    m_streamThread  = new StreamThread(this, 0);
+    CSC_METHOD cscMethod = CSC_METHOD_HW;
+    m_exynosPictureCSC = csc_init(cscMethod);
+    if (m_exynosPictureCSC == NULL)
+        ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
+    csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
 
+    ALOGV("DEBUG(%s): END", __FUNCTION__);
 }
 
 ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     this->release();
 }
 
 void ExynosCameraHWInterface2::release()
 {
-    int i;
+    int i, res;
     ALOGV("DEBUG(%s):", __func__);
+    m_closing = true;
+    if (m_ispThread != NULL) {
+        m_ispThread->release();
+        m_ispThread->requestExitAndWait();
+        ALOGV("DEBUG(%s):Release ISPthread Done", __func__);
+        m_ispThread = NULL;
+    }
+
+    if (m_sensorThread != NULL) {
+        m_sensorThread->release();
+        m_sensorThread->requestExitAndWait();
+        ALOGV("DEBUG(%s):Release Sensorthread Done", __func__);
+        m_sensorThread = NULL;
+    }
 
     if (m_mainThread != NULL) {
-        
+        m_mainThread->release();
+        m_mainThread->requestExitAndWait();
+        ALOGV("DEBUG(%s):Release Mainthread Done", __func__);
+        m_mainThread = NULL;
+    }
+
+    if (m_streamThreads[0] != NULL) {
+        m_streamThreads[0]->release();
+        m_streamThreads[0]->requestExitAndWait();
+        ALOGV("DEBUG(%s):Release streamThread[0] Done", __FUNCTION__);
+        m_streamThreads[0] = NULL;
     }
-    
-    if (m_sensorThread != NULL){
 
+    if (m_streamThreads[1] != NULL) {
+        m_streamThreads[1]->release();
+        m_streamThreads[1]->requestExitAndWait();
+        ALOGV("DEBUG(%s):Release streamThread[1] Done", __FUNCTION__);
+        m_streamThreads[1] = NULL;
     }
 
+
+    if (m_exynosPictureCSC)
+        csc_deinit(m_exynosPictureCSC);
+    m_exynosPictureCSC = NULL;
+
     for(i = 0; i < m_camera_info.sensor.buffers; i++)
         freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
 
-    for(i = 0; i < m_camera_info.isp.buffers; i++)
-        freeCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
-
     for(i = 0; i < m_camera_info.capture.buffers; i++)
         freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
 
+    ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __func__);
+    res = exynos_v4l2_close(m_camera_info.sensor.fd);
+    if (res != NO_ERROR ) {
+        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__func__ , res);
+    }
+
+    ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __func__);
+    res = exynos_v4l2_close(m_camera_info.isp.fd);
+    if (res != NO_ERROR ) {
+        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__func__ , res);
+    }
+
+    ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __func__);
+    res = exynos_v4l2_close(m_camera_info.capture.fd);
+    if (res != NO_ERROR ) {
+        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__func__ , res);
+    }
+
+    ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __func__);
+    res = exynos_v4l2_close(m_fd_scp); // HACK
+    if (res != NO_ERROR ) {
+        ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__func__ , res);
+    }
+    ALOGV("DEBUG(%s): calling deleteIonClient", __func__);
     deleteIonClient(m_ionCameraClient);
-}    
-    
+    ALOGV("DEBUG(%s): DONE", __func__);
+}
+
 int ExynosCameraHWInterface2::getCameraId() const
 {
     return 0;
 }
-    
 
 int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
             && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
         m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
         return 0;
     }
     else {
-        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __func__);
+        ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
         return 1;
     }
 }
 
 int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
 {
-    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY]", __func__);
+    ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY]", __FUNCTION__);
     if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
-        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __func__);
+        ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
         return 0;
     }
+    m_isRequestQueueNull = false;
     m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
     return 0;
 }
 
 int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
             && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
         m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
         return 0;
     }
     else {
-        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __func__);
+        ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
         return 1;
     }
 }
@@ -474,7 +793,7 @@ int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_
 int ExynosCameraHWInterface2::getInProgressCount()
 {
     int inProgressCount = m_requestManager->GetNumEntries();
-    ALOGV("DEBUG(%s): # of dequeued req (%d)", __func__, inProgressCount);
+    ALOGV("DEBUG(%s): # of dequeued req (%d)", __FUNCTION__, inProgressCount);
     return inProgressCount;
 }
 
@@ -483,16 +802,9 @@ int ExynosCameraHWInterface2::flushCapturesInProgress()
     return 0;
 }
 
-// temporarily copied from EmulatedFakeCamera2
-// TODO : implement our own codes
-status_t constructDefaultRequestInternal(
-        int request_template,
-        camera_metadata_t **request,
-        bool sizeRequest);
-        
 int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
 {
-    ALOGV("DEBUG(%s): making template (%d) ", __func__, request_template);
+    ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
 
     if (request == NULL) return BAD_VALUE;
     if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
@@ -521,130 +833,270 @@ int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, came
 int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
                                     uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
 {
-    ALOGV("DEBUG(%s): allocate with width(%d) height(%d) format(%x)", __func__,  width, height, format);
+    ALOGD("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__,  width, height, format);
     char node_name[30];
-    int fd = 0, i, j;
-    *stream_id = 0;
-    *format_actual = HAL_PIXEL_FORMAT_YV12;
-    *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
-    *max_buffers = 8;
+    int fd = 0;
+    StreamThread *AllocatedStream;
+    stream_parameters_t newParameters;
 
+    if (format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE && width==1920 && height==1080) {
 
-     m_streamThread->SetParameter(*stream_id, width, height, *format_actual, stream_ops, *usage, fd, &(m_camera_info.preview));
-    return 0;
+        *stream_id = 0;
+
+        m_streamThreads[0]  = new StreamThread(this, *stream_id);
+        AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
+        memset(&node_name, 0x00, sizeof(char[30]));
+        sprintf(node_name, "%s%d", NODE_PREFIX, 44);
+        fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+        if (fd < 0) {
+            ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+        }
+        else {
+            ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+        }
+        m_fd_scp = fd; // HACK
+
+        usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
+
+        *format_actual = HAL_PIXEL_FORMAT_YV12;
+        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN | GRALLOC_USAGE_YUV_ADDR;
+        *max_buffers = 8;
+
+        newParameters.streamType    = 0;
+        newParameters.outputWidth   = width;
+        newParameters.outputHeight  = height;
+        newParameters.nodeWidth     = width;
+        newParameters.nodeHeight    = height;
+        newParameters.outputFormat  = *format_actual;
+        newParameters.nodeFormat    = HAL_PIXEL_FORMAT_2_V4L2_PIX(*format_actual);
+        newParameters.streamOps     = stream_ops;
+        newParameters.usage         = *usage;
+        newParameters.numHwBuffers  = *max_buffers;
+        newParameters.fd            = fd;
+        newParameters.nodePlanes    = 3;
+        newParameters.svcPlanes     = 3;
+        newParameters.halBuftype    = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+        newParameters.memory        = V4L2_MEMORY_DMABUF;
+
+        AllocatedStream->setParameter(&newParameters);
+        return 0;
+    }
+    else if (format == HAL_PIXEL_FORMAT_BLOB) {
+
+        *stream_id = 1;
+
+        m_streamThreads[1]  = new StreamThread(this, *stream_id);
+        AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
+/*
+        memset(&node_name, 0x00, sizeof(char[30]));
+        sprintf(node_name, "%s%d", NODE_PREFIX, 42);
+        fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+        if (fd < 0) {
+            ALOGE("DEBUG(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+        }
+        else {
+            ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+        }
+*/
+        fd = m_camera_info.capture.fd;
+        usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
+
+        *format_actual = HAL_PIXEL_FORMAT_BLOB;
+
+        *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
+        *max_buffers = 8;
+
+        newParameters.streamType    = 1;
+        newParameters.outputWidth   = width;
+        newParameters.outputHeight  = height;
+        newParameters.nodeWidth     = 2560;
+        newParameters.nodeHeight    = 1920;
+        newParameters.outputFormat  = *format_actual;
+        newParameters.nodeFormat    = V4L2_PIX_FMT_YUYV;
+        newParameters.streamOps     = stream_ops;
+        newParameters.usage         = *usage;
+        newParameters.numHwBuffers  = *max_buffers;
+        newParameters.fd            = fd;
+        newParameters.nodePlanes    = 1;
+        newParameters.svcPlanes     = 1;
+        newParameters.halBuftype    = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+        newParameters.memory        = V4L2_MEMORY_DMABUF;
+        newParameters.ionClient     = m_ionCameraClient;
+
+        AllocatedStream->setParameter(&newParameters);
+        return 0;
+    }
+    ALOGE("DEBUG(%s): Unsupported Pixel Format", __FUNCTION__);
+    return 1; // TODO : check proper error code
 }
 
-int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id, int num_buffers, buffer_handle_t *buffers)
+int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
+        int num_buffers, buffer_handle_t *registeringBuffers)
 {
-    int i,j;
-    void *virtAddr[3];
-    int fd = 0, plane_index = 0;;
-    char node_name[30];
+    int                     i,j;
+    void                    *virtAddr[3];
+    uint32_t                plane_index = 0;
+    stream_parameters_t     *targetStreamParms;
+    node_info_t             *currentNode;
+
     struct v4l2_buffer v4l2_buf;
     struct v4l2_plane  planes[VIDEO_MAX_PLANES];
-    
-    ALOGV("DEBUG(%s): streamID (%d), num_buff(%d), handle(%x) ", __func__, stream_id, num_buffers, (uint32_t)buffers);
+
+    ALOGV("DEBUG(%s): streamID (%d), num_buff(%d), handle(%x) ", __FUNCTION__,
+        stream_id, num_buffers, (uint32_t)registeringBuffers);
+
     if (stream_id == 0) {
+        targetStreamParms = &(m_streamThreads[0]->m_parameters);
+    }
+    else if (stream_id == 1) {
+        targetStreamParms = &(m_streamThreads[1]->m_parameters);
+    }
+    else {
+        ALOGE("ERR(%s) unregisterd stream id (%d)", __FUNCTION__, stream_id);
+        return 1; // TODO : proper error code?
+    }
 
-        memset(&node_name, 0x00, sizeof(char[30]));
-        sprintf(node_name, "%s%d", NODE_PREFIX, 44);
-        fd = exynos_v4l2_open(node_name, O_RDWR, 0);
-        if (fd < 0) {
-            ALOGV("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __func__,node_name, fd);
-        }
-        m_camera_info.preview.fd    = fd;
-        m_camera_info.preview.width = 1920;  // to modify
-        m_camera_info.preview.height = 1080; // to modify
-        m_camera_info.preview.format = V4L2_PIX_FMT_YVU420M;
-        m_camera_info.preview.planes = 3;
-        m_camera_info.preview.buffers = 8;  // to modify
-        m_camera_info.preview.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
-        m_camera_info.preview.memory = V4L2_MEMORY_DMABUF;
-
-
-        cam_int_s_input(&(m_camera_info.preview), m_camera_info.sensor_id); 
-        cam_int_s_fmt(&(m_camera_info.preview));
-        ALOGV("DEBUG(%s): preview calling reqbuf", __func__);
-        cam_int_reqbufs(&(m_camera_info.preview));
-        
-        for (i=0 ; i<m_camera_info.preview.buffers ; i++) {
-            ALOGV("Registering Stream Buffers[%d] (%x) width(%d), height(%d)", i, 
-                    (uint32_t)(buffers[i]), m_streamThread->m_parameters.width, m_streamThread->m_parameters.height);
-                    
-            if (m_grallocHal) {
-                if (m_grallocHal->lock(m_grallocHal, buffers[i],
-                       m_streamThread->m_parameters.usage,
-                       0, 0, m_streamThread->m_parameters.width, m_streamThread->m_parameters.height, virtAddr) != 0) {
-                       
-                    ALOGE("ERR(%s):could not obtain gralloc buffer", __func__);
-                }
-                else {
-                    v4l2_buf.m.planes = planes;
-                    v4l2_buf.type       = m_camera_info.preview.type;
-                    v4l2_buf.memory     = m_camera_info.preview.memory;
-                    v4l2_buf.index      = i;
-                    v4l2_buf.length     = 3;
+    if (targetStreamParms->streamType ==0) {
+        if (num_buffers < targetStreamParms->numHwBuffers) {
+            ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
+                __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
+            return 1; // TODO : proper error code?
+        }
+    }
+    ALOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
+            __FUNCTION__, targetStreamParms->outputFormat, targetStreamParms->outputWidth,
+            targetStreamParms->outputHeight, targetStreamParms->svcPlanes);
+
+    targetStreamParms->numSvcBuffers = num_buffers;
+    currentNode = &(targetStreamParms->node); // TO Remove
+
+    currentNode->fd         = targetStreamParms->fd;
+    currentNode->width      = targetStreamParms->nodeWidth;
+    currentNode->height     = targetStreamParms->nodeHeight;
+    currentNode->format     = targetStreamParms->nodeFormat;
+    currentNode->planes     = targetStreamParms->nodePlanes;
+    currentNode->buffers    = targetStreamParms->numHwBuffers;
+    currentNode->type       = targetStreamParms->halBuftype;
+    currentNode->memory     = targetStreamParms->memory;
+    currentNode->ionClient  = targetStreamParms->ionClient;
+
+    if (targetStreamParms->streamType == 0) {
+        cam_int_s_input(currentNode, m_camera_info.sensor_id);
+        cam_int_s_fmt(currentNode);
+        cam_int_reqbufs(currentNode);
+    }
+    else if (targetStreamParms->streamType == 1) {
+        for(i = 0; i < currentNode->buffers; i++){
+            memcpy(&(currentNode->buffer[i]), &(m_camera_info.capture.buffer[i]), sizeof(ExynosBuffer));
+        }
+    }
+
+    for (i = 0 ; i<targetStreamParms->numSvcBuffers ; i++) {
+        ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
+            i, (uint32_t)(registeringBuffers[i]));
+        if (m_grallocHal) {
+            if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
+                   targetStreamParms->usage, 0, 0,
+                   currentNode->width, currentNode->height, virtAddr) != 0) {
+                ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
+            }
+            else {
+                v4l2_buf.m.planes   = planes;
+                v4l2_buf.type       = currentNode->type;
+                v4l2_buf.memory     = currentNode->memory;
+                v4l2_buf.index      = i;
+                v4l2_buf.length     = currentNode->planes;
 
-                    const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(buffers[i]);
+                ExynosBuffer currentBuf;
+                const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
+
+                m_getAlignedYUVSize(currentNode->format,
+                    currentNode->width, currentNode->height, &currentBuf);
 
                     v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
-                    v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
-                    v4l2_buf.m.planes[2].m.fd = priv_handle->fd2;
-
-                    // HACK
-                    m_streamThread->m_parameters.grallocVirtAddr[i] = virtAddr[0];
-                    v4l2_buf.m.planes[0].length  = 1920*1088;
-                    v4l2_buf.m.planes[1].length  = 1920*1088/4;
-                    v4l2_buf.m.planes[2].length  = 1920*1088/4;
-
-                    if (exynos_v4l2_qbuf(m_camera_info.preview.fd, &v4l2_buf) < 0) {
-                        ALOGE("ERR(%s):preview exynos_v4l2_qbuf() fail", __func__);
-                        return false;
-                    }
+                    v4l2_buf.m.planes[2].m.fd = priv_handle->u_fd;
+                    v4l2_buf.m.planes[1].m.fd = priv_handle->v_fd;
+                for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
+//                    v4l2_buf.m.planes[plane_index].m.userptr = (unsigned long)(virtAddr[plane_index]);
+                    currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
+                    v4l2_buf.m.planes[plane_index].length  = currentBuf.size.extS[plane_index];
+                    ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)",
+                         __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
+                         (unsigned int)currentBuf.virt.extP[plane_index],
+                         v4l2_buf.m.planes[plane_index].length);
+                }
 
+                if (targetStreamParms->streamType == 0) {
+                    if (i < currentNode->buffers) {
+                        if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
+                            ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail",
+                                __FUNCTION__, stream_id);
+                            return false;
+                        }
+                        targetStreamParms->svcBufStatus[i]  = REQUIRES_DQ_FROM_SVC;
+                    }
+                    else {
+                        targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
+                    }
+                }
+                else if (targetStreamParms->streamType == 1) {
+                    targetStreamParms->svcBufStatus[i]  = ON_SERVICE;
                 }
-            }              
-            
-            
+                targetStreamParms->svcBuffers[i]       = currentBuf;
+                targetStreamParms->svcBufHandle[i]     = registeringBuffers[i];
+            }
         }
-        ALOGV("DEBUG(%s): preview initial QBUF done", __func__);
-
-
-
     }
-      ALOGV("DEBUG(%s): END registerStreamBuffers", __func__);  
+
+    ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
     return 0;
 }
 
 int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    StreamThread *targetStream;
+    ALOGV("DEBUG(%s):", __FUNCTION__);
+
+    if (stream_id==0) {
+        targetStream = (StreamThread*)(m_streamThreads[0].get());
+    }
+    else if (stream_id==1) {
+        targetStream = (StreamThread*)(m_streamThreads[1].get());
+    }
+    else {
+        ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
+        return 1; // TODO : proper error code?
+    }
+
+    targetStream->release();
+    ALOGV("DEBUG(%s): DONE", __FUNCTION__);
     return 0;
 }
 
 int ExynosCameraHWInterface2::allocateReprocessStream(
-    uint32_t width, uint32_t height, uint32_t format, const camera2_stream_in_ops_t *reprocess_stream_ops,
+    uint32_t width, uint32_t height, uint32_t format,
+    const camera2_stream_in_ops_t *reprocess_stream_ops,
     uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return 0;
 }
 
 int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return 0;
 }
 
 int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return 0;
 }
 
 int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     m_notifyCb = notify_cb;
     m_callbackCookie = user;
     return 0;
@@ -652,167 +1104,915 @@ int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_c
 
 int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return 0;
 }
 
 int ExynosCameraHWInterface2::dump(int fd)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return 0;
 }
 
+void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
+{
+    switch (colorFormat) {
+    // 1p
+    case V4L2_PIX_FMT_RGB565 :
+    case V4L2_PIX_FMT_YUYV :
+    case V4L2_PIX_FMT_UYVY :
+    case V4L2_PIX_FMT_VYUY :
+    case V4L2_PIX_FMT_YVYU :
+        buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
+        buf->size.extS[1] = 0;
+        buf->size.extS[2] = 0;
+        break;
+    // 2p
+    case V4L2_PIX_FMT_NV12 :
+    case V4L2_PIX_FMT_NV12T :
+    case V4L2_PIX_FMT_NV21 :
+        buf->size.extS[0] = ALIGN(w,   16) * ALIGN(h,   16);
+        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
+        buf->size.extS[2] = 0;
+        break;
+    case V4L2_PIX_FMT_NV12M :
+    case V4L2_PIX_FMT_NV12MT_16X16 :
+        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h,     16);
+        buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
+        buf->size.extS[2] = 0;
+        break;
+    case V4L2_PIX_FMT_NV16 :
+    case V4L2_PIX_FMT_NV61 :
+        buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
+        buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h,  16);
+        buf->size.extS[2] = 0;
+        break;
+     // 3p
+    case V4L2_PIX_FMT_YUV420 :
+    case V4L2_PIX_FMT_YVU420 :
+        buf->size.extS[0] = (w * h);
+        buf->size.extS[1] = (w * h) >> 2;
+        buf->size.extS[2] = (w * h) >> 2;
+        break;
+    case V4L2_PIX_FMT_YUV420M:
+    case V4L2_PIX_FMT_YVU420M :
+    case V4L2_PIX_FMT_YUV422P :
+        buf->size.extS[0] = ALIGN(w,  32) * ALIGN(h,  16);
+        buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
+        buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
+        break;
+    default:
+        ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
+        return;
+        break;
+    }
+}
 
-void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
+bool ExynosCameraHWInterface2::m_getRatioSize(int  src_w,  int   src_h,
+                                             int  dst_w,  int   dst_h,
+                                             int *crop_x, int *crop_y,
+                                             int *crop_w, int *crop_h,
+                                             int zoom)
 {
-    camera_metadata_t *currentRequest = NULL;
-    camera_metadata_t *currentFrame = NULL;
-    size_t numEntries = 0;
-    size_t frameSize = 0; 
-    camera_metadata_t * preparedFrame = NULL;
-    camera_metadata_t *deregisteredRequest = NULL;
-    uint32_t currentSignal = self->GetProcessingSignal();
-    int res = 0;
-    
-    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __func__, currentSignal);
-    
-    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
-        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __func__);
-        if (m_requestManager->IsRequestQueueFull()==false
-                && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST) {
-            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
-            if (NULL == currentRequest) {
-                ALOGV("DEBUG(%s): dequeue_request returned NULL ", __func__);    
+    *crop_w = src_w;
+    *crop_h = src_h;
+
+    if (   src_w != dst_w
+        || src_h != dst_h) {
+        float src_ratio = 1.0f;
+        float dst_ratio = 1.0f;
+
+        // ex : 1024 / 768
+        src_ratio = (float)src_w / (float)src_h;
+
+        // ex : 352  / 288
+        dst_ratio = (float)dst_w / (float)dst_h;
+
+        if (dst_w * dst_h < src_w * src_h) {
+            if (dst_ratio <= src_ratio) {
+                // shrink w
+                *crop_w = src_h * dst_ratio;
+                *crop_h = src_h;
+            } else {
+                // shrink h
+                *crop_w = src_w;
+                *crop_h = src_w / dst_ratio;
             }
-            else {
-                m_requestManager->RegisterRequest(currentRequest);
-                
-                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
-                ALOGV("DEBUG(%s): remaining req cnt (%d)", __func__, m_numOfRemainingReqInSvc);
-                if (m_requestManager->IsRequestQueueFull()==false
-                    && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST)
-                    self->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
-                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
+        } else {
+            if (dst_ratio <= src_ratio) {
+                // shrink w
+                *crop_w = src_h * dst_ratio;
+                *crop_h = src_h;
+            } else {
+                // shrink h
+                *crop_w = src_w;
+                *crop_h = src_w / dst_ratio;
             }
         }
-        else {
-            m_isRequestQueuePending = true;
-        }
     }
 
-    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
-        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __func__);
-        m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame);
-        m_requestManager->DeregisterRequest(&deregisteredRequest);
-        m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
-        m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
-        if (currentFrame==NULL) {
-            ALOGE("%s: frame dequeue returned NULL",__func__ );
+    if (zoom != 0) {
+        float zoomLevel = ((float)zoom + 10.0) / 10.0;
+        *crop_w = (int)((float)*crop_w / zoomLevel);
+        *crop_h = (int)((float)*crop_h / zoomLevel);
+    }
+
+    #define CAMERA_CROP_WIDTH_RESTRAIN_NUM  (0x2)
+    unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
+    if (w_align != 0) {
+        if (  (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
+            && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
+            *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
+        }
+        else
+            *crop_w -= w_align;
+    }
+
+    #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM  (0x2)
+    unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
+    if (h_align != 0) {
+        if (  (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
+            && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
+            *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
+        }
+        else
+            *crop_h -= h_align;
+    }
+
+    *crop_x = (src_w - *crop_w) >> 1;
+    *crop_y = (src_h - *crop_h) >> 1;
+
+    if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
+        *crop_x -= 1;
+
+    if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
+        *crop_y -= 1;
+
+    return true;
+}
+
+void ExynosCameraHWInterface2::RegisterBayerQueueList(int bufIndex, int requestIndex)
+{
+    if (m_bayerQueueList[m_numBayerQueueList+m_numBayerQueueListRemainder]!=-1) {
+        ALOGD("DBG(%s): entry(%d) not empty (%d, %d)", __FUNCTION__,
+            m_numBayerQueueList, m_bayerQueueList[m_numBayerQueueList+m_numBayerQueueListRemainder],
+            m_bayerQueueRequestList[m_numBayerQueueList+m_numBayerQueueListRemainder]);
+        return;
+    }
+    m_bayerQueueList[m_numBayerQueueList+m_numBayerQueueListRemainder] = bufIndex;
+    m_bayerQueueRequestList[m_numBayerQueueList+m_numBayerQueueListRemainder] = requestIndex;
+    m_numBayerQueueList++;
+    ALOGV("DEBUG(%s) END: bufIndex(%d) requestIndex(%d) - # of current entry(%d)",
+        __FUNCTION__, bufIndex, requestIndex, m_numBayerQueueList);
+#if 0
+    for (int i=0 ; i < NUM_BAYER_BUFFERS+SHOT_FRAME_DELAY ; i++) {
+        ALOGV("DEBUG(%s): QueuedEntry[%d] <bufIndex(%d) Request(%d)>", __FUNCTION__,
+            i, m_bayerQueueList[i], m_bayerQueueRequestList[i]);
+    }
+#endif
+}
+
+void ExynosCameraHWInterface2::DeregisterBayerQueueList(int bufIndex)
+{
+    ALOGV("DEBUG(%s): deregistering bufIndex(%d)", __FUNCTION__, bufIndex);
+    int i, j;
+    for (int i=0 ; i<NUM_BAYER_BUFFERS ; i++) {
+        if (m_bayerQueueList[i]==-1) {
+            if (m_bayerQueueRequestList[i]==-1) {
+                //ALOGE("ERR(%s): abnormal - entry(%d) should not empty", __FUNCTION__, i);
+            }
+            else {
+                ALOGV("DEBUG(%s): entry(%d) has remainder request(%d)",
+                    __FUNCTION__, i, m_bayerQueueRequestList[i]);
+                continue;
+            }
+        }
+        if (m_bayerQueueList[i]==bufIndex) {
+            if (m_bayerQueueRequestList[i]==-1 && i==0) {
+                ALOGV("DEBUG(%s): removing entry(%d)", __FUNCTION__, i);
+                for (j=i ; j < NUM_BAYER_BUFFERS+SHOT_FRAME_DELAY-1 ; j++) {
+                    m_bayerQueueList[j] = m_bayerQueueList[j+1];
+                    m_bayerQueueRequestList[j] = m_bayerQueueRequestList[j+1];
+                }
+                m_bayerQueueList[j] = -1;
+                m_bayerQueueRequestList[j] = -1;
+            }
+            else {
+                ALOGV("DEBUG(%s): entry(%d) is now remainder request(%d)",
+                    __FUNCTION__, i, m_bayerQueueRequestList[i]);
+                m_bayerQueueList[i] = -1;
+                m_numBayerQueueListRemainder++;
+            }
+            m_numBayerQueueList--;
+            break;
+        }
+    }
+    ALOGV("DEBUG(%s): numQueueList(%d), remainder(%d)", __FUNCTION__,
+                m_numBayerQueueList,m_numBayerQueueListRemainder);
+#if 0
+    for (int i=0 ; i < NUM_BAYER_BUFFERS+SHOT_FRAME_DELAY ; i++) {
+        ALOGV("DEBUG(%s): QueuedEntry[%d] <bufIndex(%d) Request(%d)>", __FUNCTION__,
+            i, m_bayerQueueList[i], m_bayerQueueRequestList[i]);
+    }
+#endif
+}
+
+
+void ExynosCameraHWInterface2::RegisterBayerDequeueList(int bufIndex)
+{
+    if (m_bayerDequeueList[m_numBayerDequeueList]!=-1) {
+        ALOGD("DBG(%s): entry(%d) not empty (%d)", __FUNCTION__,
+            m_numBayerDequeueList, m_bayerDequeueList[m_numBayerDequeueList]);
+        return;
+    }
+    m_bayerDequeueList[m_numBayerDequeueList] = bufIndex;
+    m_numBayerDequeueList++;
+    ALOGV("DEBUG(%s) END: bufIndex(%d) - # of current entry(%d)",
+        __FUNCTION__, bufIndex, m_numBayerDequeueList);
+}
+
+
+int ExynosCameraHWInterface2::DeregisterBayerDequeueList(void)
+{
+    ALOGV("DEBUG(%s): deregistering a buf, curr num(%d)", __FUNCTION__, m_numBayerDequeueList);
+    int ret = m_bayerDequeueList[0];
+    int i = 0;
+    if (m_numBayerDequeueList == 0) {
+        ALOGV("DEBUG(%s): no bayer buffer to deregister", __FUNCTION__);
+        return -1;
+    }
+
+    for (i=0; i < NUM_BAYER_BUFFERS-1 ; i++) {
+        m_bayerDequeueList[i] = m_bayerDequeueList[i+1];
+    }
+    m_bayerDequeueList[i] = -1;
+    m_numBayerDequeueList--;
+    ALOGV("DEBUG(%s) END: deregistered buf(%d), curr num(%d)", __FUNCTION__,
+        ret, m_numBayerDequeueList);
+
+#if 0
+    for (i=0 ; i < NUM_BAYER_BUFFERS ; i++) {
+        ALOGV("DEBUG(%s): QueuedEntry[%d] <bufIndex(%d)>", __FUNCTION__,
+            i, m_bayerDequeueList[i]);
+    }
+#endif
+    return ret;
+}
+
+
+int ExynosCameraHWInterface2::FindRequestEntryNumber(int bufIndex)
+{
+    bool found = false;
+    ALOGV("DEBUG(%s): finding entry# for bufindex(%d)", __FUNCTION__, bufIndex);
+    int i, j, ret;
+    // if driver supports shot mumber matching, just compare shot number
+#if 1
+    if (SHOT_FRAME_DELAY>m_numBayerQueueList+m_numBayerQueueListRemainder) {
+        ALOGE("ERR(%s): abnormal # of entry (%d) + (%d)", __FUNCTION__,
+            m_numBayerQueueList, m_numBayerQueueListRemainder);
+        return -1;
+    }
+
+    ALOGV("DEBUG(%s): numQueueList(%d), remainder(%d)", __FUNCTION__,
+                m_numBayerQueueList,m_numBayerQueueListRemainder);
+    for (i=0 ; i < NUM_BAYER_BUFFERS+SHOT_FRAME_DELAY ; i++) {
+        ALOGV("DEBUG(%s): QueuedEntry[%2d] <bufIndex(%3d) Request(%3d)>", __FUNCTION__,
+            i, m_bayerQueueList[i], m_bayerQueueRequestList[i]);
+    }
+
+    for (i=0 ; i<=(m_numBayerQueueList+m_numBayerQueueListRemainder); i++) {
+        if (m_bayerQueueList[i]==bufIndex) {
+            found = true;
+            break;
+        }
+    }
+    if (found) {
+        ALOGV("DEBUG(%s): found (%d) at Queue entry [%d]",
+        __FUNCTION__, bufIndex, i);
+        if (i != SHOT_FRAME_DELAY-1) {
+            ALOGV("DEBUG(%s):no match ?? ", __FUNCTION__);
+            return -1;
         }
         else {
-            ALOGV("%s: frame dequeue done. numEntries(%d) frameSize(%d)",__func__ , numEntries,frameSize);
+            ret = m_bayerQueueRequestList[0];
+            ALOGV("DEBUG(%s): removing entry[%d]", __FUNCTION__, i);
+            for (j=0 ; j < NUM_BAYER_BUFFERS+SHOT_FRAME_DELAY-1 ; j++) {
+                m_bayerQueueList[j] = m_bayerQueueList[j+1];
+                m_bayerQueueRequestList[j] = m_bayerQueueRequestList[j+1];
+            }
+            m_bayerQueueList[j] = -1;
+            m_bayerQueueRequestList[j] = -1;
+            m_numBayerQueueListRemainder--;
+            return ret;
+        }
+    }
+    return -1;
+#else
+    if (SHOT_FRAME_DELAY>m_numBayerQueueList+m_numBayerQueueListRemainder) {
+        ALOGE("ERR(%s): abnormal # of entry (%d) + (%d)", __FUNCTION__,
+            m_numBayerQueueList, m_numBayerQueueListRemainder);
+        return -1;
+    }
+
+    for (int i=SHOT_FRAME_DELAY ; i<=(m_numBayerQueueList+m_numBayerQueueListRemainder); i--) {
+        if (m_bayerQueueList[i]==bufIndex) {
+            ALOGV("DEBUG(%s): found entry number(%d)", __FUNCTION__, m_bayerQueueRequestList[i-SHOT_FRAME_DELAY]);
+            ret = m_bayerQueueRequestList[i-SHOT_FRAME_DELAY];
+            m_bayerQueueRequestList[i-SHOT_FRAME_DELAY] = -1;
+            return ret;
         }
-            
-        res = append_camera_metadata(currentFrame, preparedFrame);
-        if (res==0) {
-            ALOGV("%s: frame metadata append success",__func__);
-            m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);            
+    }
+    return -1;
+
+#endif
+}
+
+void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
+{
+    camera_metadata_t *currentRequest = NULL;
+    camera_metadata_t *currentFrame = NULL;
+    size_t numEntries = 0;
+    size_t frameSize = 0;
+    camera_metadata_t * preparedFrame = NULL;
+    camera_metadata_t *deregisteredRequest = NULL;
+    uint32_t currentSignal = self->GetProcessingSignal();
+    MainThread *  selfThread      = ((MainThread*)self);
+    int res = 0;
+
+    ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
+
+    if (currentSignal & SIGNAL_THREAD_RELEASE) {
+        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
+
+        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
+        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
+        return;
+    }
+
+    if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
+        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
+        if (m_requestManager->IsRequestQueueFull()==false
+                && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST) {
+            m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
+            if (NULL == currentRequest) {
+                ALOGV("DEBUG(%s): dequeue_request returned NULL ", __FUNCTION__);
+                m_isRequestQueueNull = true;
+            }
+            else {
+                m_requestManager->RegisterRequest(currentRequest);
+
+                m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
+                ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
+                if (m_requestManager->IsRequestQueueFull()==false
+                    && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST)
+                    selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
+                m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
+            }
         }
         else {
-            ALOGE("%s: frame metadata append fail (%d)",__func__, res);
+            m_isRequestQueuePending = true;
+        }
+    }
+
+    if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
+        ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
+        /*while (1)*/ {
+            m_lastTimeStamp = 0;
+            m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame);
+            m_requestManager->DeregisterRequest(&deregisteredRequest);
+            m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
+            m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
+            if (currentFrame==NULL) {
+                ALOGD("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
+            }
+            else {
+                ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries,frameSize);
+            }
+            res = append_camera_metadata(currentFrame, preparedFrame);
+            if (res==0) {
+                ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
+                m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
+            }
+            else {
+                ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
+            }
+        }
+        if (!m_isRequestQueueNull) {
+            selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
+        }
+        // temp code only before removing auto mode
+        if (getInProgressCount()>0) {
+            ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
+            m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
         }
-        self->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
     }
+    ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
     return;
 }
+
 void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
 {
-    ALOGV("DEBUG(%s): ", __func__ );
-    SensorThread * selfSensor = ((SensorThread*)self);
+    ALOGV("DEBUG(%s): ", __FUNCTION__ );
+    SensorThread * selfThread = ((SensorThread*)self);
     char node_name[30];
     int fd = 0;
     int i =0, j=0;
 
-
     m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
-    
-    memset(&m_camera_info.current_shot, 0x00, sizeof(camera2_shot_t));
-    m_camera_info.current_shot.ctl.request.metadataMode = METADATA_MODE_FULL;
-    m_camera_info.current_shot.magicNumber = 0x23456789;
+    memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
+       m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
+       m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
+
+    /*sensor setting*/
+       m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
+       m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
+       m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
 
-    m_camera_info.current_shot.ctl.scaler.cropRegion[0] = 0;
-    m_camera_info.current_shot.ctl.scaler.cropRegion[1] = 0;
-    m_camera_info.current_shot.ctl.scaler.cropRegion[2] = 1920;
+    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
+    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
+    m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = 1920;
 
-    /*sensor init*/ 
+       /*request setting*/
+       m_camera_info.dummy_shot.request_sensor = 1;
+       m_camera_info.dummy_shot.request_scc = 0;
+       m_camera_info.dummy_shot.request_scp = 0;
+
+    /*sensor init*/
     memset(&node_name, 0x00, sizeof(char[30]));
     sprintf(node_name, "%s%d", NODE_PREFIX, 40);
     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
-    
+
     if (fd < 0) {
-        ALOGV("DEBUG(%s): failed to open sensor video node (%s) fd (%d)", __func__,node_name, fd);
+        ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
     }
     else {
-        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __func__,node_name, fd);
+        ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
     }
     m_camera_info.sensor.fd = fd;
     m_camera_info.sensor.width = 2560 + 16;
     m_camera_info.sensor.height = 1920 + 10;
     m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
     m_camera_info.sensor.planes = 2;
-    m_camera_info.sensor.buffers = 8;
+    m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
     m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
     m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
     m_camera_info.sensor.ionClient = m_ionCameraClient;
 
     for(i = 0; i < m_camera_info.sensor.buffers; i++){
         initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
-        m_camera_info.sensor.buffer[i].size[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
-        m_camera_info.sensor.buffer[i].size[1] = 5*1024; // HACK, driver use 5*1024, should be use predefined value
+        m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
+        m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
         allocCameraMemory(m_camera_info.sensor.ionClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
     }
 
+    m_initFlag1 = true;
+
+#if 0
     /*isp init*/
     memset(&node_name, 0x00, sizeof(char[30]));
     sprintf(node_name, "%s%d", NODE_PREFIX, 41);
     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
-    
+
     if (fd < 0) {
-        ALOGV("DEBUG(%s): failed to open isp video node (%s) fd (%d)", __func__,node_name, fd);
+        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
     }
     else {
-        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __func__,node_name, fd);
+        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    m_camera_info.isp.fd = fd;
+
+    m_camera_info.isp.width = m_camera_info.sensor.width;
+    m_camera_info.isp.height = m_camera_info.sensor.height;
+    m_camera_info.isp.format = m_camera_info.sensor.format;
+    m_camera_info.isp.planes = m_camera_info.sensor.planes;
+    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
+    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
+    //m_camera_info.isp.ionClient = m_ionCameraClient;
+
+    for(i = 0; i < m_camera_info.isp.buffers; i++){
+        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
+        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
+        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
+        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
+        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
+        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
+        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
+    };
+    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
+
+#endif
+#if 0
+    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
+    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
+
+    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
+        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
+    }
+    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
+    cam_int_reqbufs(&(m_camera_info.sensor));
+    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
+    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
+        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
+        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.current_shot),
+                sizeof(camera2_shot_ext));
+        cam_int_qbuf(&(m_camera_info.sensor), i);
+    }
+    cam_int_streamon(&(m_camera_info.sensor));
+    m_camera_info.sensor.currentBufferIndex = 0;
+#endif
+#if 0
+    cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
+    cam_int_s_fmt(&(m_camera_info.isp));
+    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
+    cam_int_reqbufs(&(m_camera_info.isp));
+    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
+
+    for (i = 0; i < m_camera_info.isp.buffers; i++) {
+        ALOGV("DEBUG(%s): isp initial QBUF [%d]",  __FUNCTION__, i);
+        cam_int_qbuf(&(m_camera_info.isp), i);
+    }
+    cam_int_streamon(&(m_camera_info.isp));
+
+    for (i = 0; i < m_camera_info.isp.buffers; i++) {
+        ALOGV("DEBUG(%s): isp initial DQBUF [%d]",  __FUNCTION__, i);
+        cam_int_dqbuf(&(m_camera_info.isp));
+    }
+#endif
+
+
+    while (!m_initFlag2) // temp
+        usleep(100000);
+    ALOGV("DEBUG(%s): END of SensorThreadInitialize ", __FUNCTION__);
+    return;
+}
+
+
+
+void ExynosCameraHWInterface2::DumpFrameinfoWithBufIndex(int bufIndex)
+{
+    bool found = false;
+    int i;
+    struct camera2_shot_ext *shot_ext;
+    for (i=0 ; i < NUM_BAYER_BUFFERS+SHOT_FRAME_DELAY ; i++) {
+        if (m_bayerQueueList[i] == bufIndex) {
+            found = true;
+            break;
+        }
+    }
+    if (!found) {
+        ALOGD("DEBUG(%s): dumping bufIndex[%d] not found", __FUNCTION__, bufIndex);
+    }
+    else {
+         ALOGD("DEBUG(%s): bufIndex[%d] found at [%d]. reqIndex=[%d]",
+            __FUNCTION__, bufIndex, i, m_bayerQueueRequestList[i]);
+         ALOGD("#### info : reqManager ####");
+            m_requestManager->DumpInfoWithIndex(m_bayerQueueRequestList[i]);
+    }
+
+    ALOGD("#### info : shot on sensorBuffer ####");
+       shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[bufIndex].virt.extP[1]);
+       DumpInfoWithShot(shot_ext);
+
+}
+
+void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
+{
+    ALOGV("####  common Section");
+    ALOGV("####                 magic(%x) ",
+        shot_ext->shot.magicNumber);
+    ALOGV("####  ctl Section");
+    ALOGV("####     metamode(%d) exposureTime(%lld) duration(%lld) ISO(%d) ",
+        shot_ext->shot.ctl.request.metadataMode,
+        shot_ext->shot.ctl.sensor.exposureTime,
+        shot_ext->shot.ctl.sensor.frameDuration,
+        shot_ext->shot.ctl.sensor.sensitivity);
+
+    ALOGV("####                 OutputStream Sensor(%d) SCP(%d) SCC(%d)",shot_ext->request_sensor,
+        shot_ext->request_scp, shot_ext->request_scc);
+
+    ALOGV("####  DM Section");
+    ALOGV("####     metamode(%d) exposureTime(%lld) duration(%lld) ISO(%d) frameCnt(%d) timestamp(%lld)",
+        shot_ext->shot.dm.request.metadataMode,
+        shot_ext->shot.dm.sensor.exposureTime,
+        shot_ext->shot.dm.sensor.frameDuration,
+        shot_ext->shot.dm.sensor.sensitivity,
+        shot_ext->shot.dm.sensor.frameCount,
+        shot_ext->shot.dm.sensor.timeStamp);
+}
+
+void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
+{
+    uint32_t        currentSignal = self->GetProcessingSignal();
+    SensorThread *  selfThread      = ((SensorThread*)self);
+    int index;
+    status_t res;
+    nsecs_t frameTime;
+    int bayersOnSensor = 0, bayersOnIsp = 0;
+    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
+
+    if (currentSignal & SIGNAL_THREAD_RELEASE) {
+        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
+
+        for (int i = 0 ; i < NUM_BAYER_BUFFERS ;  i++) {
+            ALOGV("DEBUG(%s):###  Bayer Buf[%d] Status (%d)", __FUNCTION__, i, m_bayerBufStatus[i]);
+            if (m_bayerBufStatus[i]==BAYER_ON_SENSOR) {
+                bayersOnSensor++;
+            }
+            else if (m_bayerBufStatus[i]==BAYER_ON_ISP) {
+                bayersOnIsp++;
+            }
+        }
+        for (int i = 0 ; i < bayersOnSensor ; i++) {
+            index = cam_int_dqbuf(&(m_camera_info.sensor));
+            ALOGV("DEBUG(%s):###  sensor dqbuf done index(%d)", __FUNCTION__, index);
+            m_bayerBufStatus[index] = BAYER_ON_HAL_EMPTY;
+        }
+        for (int i = 0 ; i < bayersOnIsp ; i++) {
+            index = cam_int_dqbuf(&(m_camera_info.isp));
+            ALOGV("DEBUG(%s):###  isp dqbuf done index(%d)", __FUNCTION__, index);
+            m_bayerBufStatus[index] = BAYER_ON_HAL_EMPTY;
+        }
+
+        for (int i = 0 ; i < NUM_BAYER_BUFFERS ;  i++) {
+            ALOGV("DEBUG(%s):###  Bayer Buf[%d] Status (%d)", __FUNCTION__, i, m_bayerBufStatus[i]);
+        }
+        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
+        ALOGV("DEBUG(%s): calling sensor streamoff", __FUNCTION__);
+        cam_int_streamoff(&(m_camera_info.sensor));
+        ALOGV("DEBUG(%s): calling sensor streamoff done", __FUNCTION__);
+        exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
+        /*
+        ALOGV("DEBUG(%s): calling sensor s_ctrl done", __FUNCTION__);
+        m_camera_info.sensor.buffers = 0;
+        cam_int_reqbufs(&(m_camera_info.sensor));
+        ALOGV("DEBUG(%s): calling sensor reqbuf 0 done", __FUNCTION__);
+        */
+/*
+        ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
+        res = exynos_v4l2_close(m_camera_info.sensor.fd);
+        if (res != NO_ERROR ) {
+            ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
+        }
+  */
+        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
+
+        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
+        return;
+    }
+
+    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
+    {
+        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
+        int targetStreamIndex = 0;
+        int matchedEntryNumber, processingReqIndex;
+        struct camera2_shot_ext *shot_ext;
+        if (!m_isSensorStarted)
+        {
+            m_isSensorStarted = true;
+            ALOGV("DEBUG(%s): calling preview streamon", __FUNCTION__);
+            cam_int_streamon(&(m_streamThreads[0]->m_parameters.node));
+            ALOGV("DEBUG(%s): calling preview streamon done", __FUNCTION__);
+            exynos_v4l2_s_ctrl(m_camera_info.isp.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
+            ALOGV("DEBUG(%s): calling isp sctrl done", __FUNCTION__);
+            exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
+            ALOGV("DEBUG(%s): calling sensor sctrl done", __FUNCTION__);
+
+        }
+        else
+        {
+            ALOGV("DEBUG(%s): sensor started already", __FUNCTION__);
+        }
+
+        ALOGV("### Sensor DQBUF start");
+        index = cam_int_dqbuf(&(m_camera_info.sensor));
+        frameTime = systemTime();
+        ALOGV("### Sensor DQBUF done index(%d)", index);
+
+        if (m_lastTimeStamp!=0 && (frameTime-m_lastTimeStamp)>100000000) {
+            ALOGV("########## lost frame detected ########");
+            m_lastTimeStamp = 0;
+        }
+        if (m_bayerBufStatus[index]!=BAYER_ON_SENSOR)
+            ALOGD("DBG(%s): bayer buf status abnormal index[%d] status(%d)",
+                __FUNCTION__, index, m_bayerBufStatus[index]);
+
+        matchedEntryNumber = FindRequestEntryNumber(index);
+        DeregisterBayerQueueList(index);
+
+        if (m_ispInputIndex != -1) {
+            ALOGV("####### sensor delay sleep");
+            usleep(5000);
+        }
+        if (matchedEntryNumber != -1) {
+            m_bayerBufStatus[index] = BAYER_ON_HAL_FILLED;
+            m_ispInputIndex = index;
+            m_processingRequest = matchedEntryNumber;
+            m_requestManager->RegisterTimestamp(m_processingRequest, &frameTime);
+            ALOGD("### Sensor DQed buf index(%d) passing to ISP. req(%d) timestamp(%lld)", index,matchedEntryNumber, frameTime);
+            if (!(m_ispThread.get())) return;
+            m_ispThread->SetSignal(SIGNAL_ISP_START_BAYER_INPUT);
+            //RegisterBayerDequeueList(index);  this will be done in ispthread
+        }
+        else {
+            m_bayerBufStatus[index] = BAYER_ON_HAL_FILLED;
+            m_ispInputIndex = index;
+            m_processingRequest = -1;
+            ALOGV("### Sensor DQed buf index(%d) passing to ISP. BUBBLE", index);
+            if (!(m_ispThread.get())) return;
+            m_ispThread->SetSignal(SIGNAL_ISP_START_BAYER_INPUT);
+            //RegisterBayerDequeueList(index);
+        }
+
+        while (m_numBayerQueueList<SHOT_FRAME_DELAY) {
+
+            index = DeregisterBayerDequeueList();
+            if (index == -1) {
+                ALOGE("ERR(%s) No free Bayer buffer", __FUNCTION__);
+                break;
+            }
+            processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
+
+            if (processingReqIndex == -1) {
+                ALOGV("DEBUG(%s) req underrun => inserting bubble to index(%d)", __FUNCTION__, index);
+                       shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
+                memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(struct camera2_shot_ext));
+            }
+
+            RegisterBayerQueueList(index, processingReqIndex);
+
+            ALOGV("### Sensor QBUF start index(%d)", index);
+            /* if (processingReqIndex != -1)
+                DumpFrameinfoWithBufIndex(index); */
+            cam_int_qbuf(&(m_camera_info.sensor), index);
+            m_bayerBufStatus[index] = BAYER_ON_SENSOR;
+            ALOGV("### Sensor QBUF done");
+        }
+        if (!m_closing) selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
+        return;
+#if 0
+        if (m_numBayerQueueList==3) {
+            selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
+            ALOGV("### Sensor will not QBUF num(%d) [%d] [%d] [%d] ", bufsOnHal,indexToQueue[0],indexToQueue[1],indexToQueue[2] );
+            return;
+        }
+
+
+
+        while (m_bayerBufStatus[index] != BAYER_ON_HAL_EMPTY) // TODO : use signal
+            usleep(5000);
+
+        // TODO : instead of re-using 'index', query reqManager about free entry
+
+        if (m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]))!=NO_ERROR) {
+            ALOGV("DEBUG(%s) inserting bubble to index(%d)", __FUNCTION__, index);
+               shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
+            memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(camera2_shot_ext));
+        }
+        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000;
+/*
+               shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
+               shot_ext->request_sensor = m_camera_info.current_shot.request_sensor;
+               shot_ext->request_scc = m_camera_info.current_shot.request_scc;
+               shot_ext->request_scp = m_camera_info.current_shot.request_scp;
+               shot_ext->shot.magicNumber = m_camera_info.current_shot.shot.magicNumber;
+               memcpy(&shot_ext->shot.ctl, &m_camera_info.current_shot.shot.ctl,
+                               sizeof(struct camera2_ctl));
+*/
+        // FOR DEBUG
+        //shot_ext->shot.ctl.request.id = m_camera_info.sensor_frame_count;
+
+        //ALOGV("### isp QBUF start index(%d)", index);
+        //cam_int_qbuf(&(m_camera_info.isp), index);
+        //ALOGV("### isp QBUF done and calling DQBUF");
+        //index = cam_int_dqbuf(&(m_camera_info.isp));
+        //ALOGV("### isp DQBUF done index(%d)", index);
+
+               {
+//                     m_camera_info.current_shot.shot.ctl.sensor.frameDuration = 33*1000*1000;
+                       //m_camera_info.current_shot.shot.ctl.sensor.frameDuration = 66*1000*1000;
+//                     m_camera_info.current_shot.request_scp = 1;
+                       //m_camera_info.sensor_frame_count++;
+               }
+/*             memcpy(&shot_ext->shot.ctl.sensor,
+               &m_camera_info.current_shot.shot.ctl.sensor,
+               sizeof(struct camera2_sensor_ctl));*/
+        ALOGV("### Sensor QBUF start index(%d)", index);
+        cam_int_qbuf(&(m_camera_info.sensor), index);
+        m_bayerBufStatus[index] = BAYER_ON_SENSOR;
+        ALOGV("### Sensor QBUF done");
+
+
+        selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
+#endif
+        }
+    return;
+}
+
+
+void ExynosCameraHWInterface2::m_ispThreadInitialize(SignalDrivenThread * self)
+{
+    ALOGV("DEBUG(%s): ", __FUNCTION__ );
+    IspThread * selfThread = ((IspThread*)self);
+    char node_name[30];
+    int fd = 0;
+    int i =0, j=0;
+
+
+    while (!m_initFlag1) //temp
+        usleep(100000);
+
+    /*isp init*/
+    memset(&node_name, 0x00, sizeof(char[30]));
+    sprintf(node_name, "%s%d", NODE_PREFIX, 41);
+    fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+
+    if (fd < 0) {
+        ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    else {
+        ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+    }
+    m_camera_info.isp.fd = fd;
+
+    m_camera_info.isp.width = m_camera_info.sensor.width;
+    m_camera_info.isp.height = m_camera_info.sensor.height;
+    m_camera_info.isp.format = m_camera_info.sensor.format;
+    m_camera_info.isp.planes = m_camera_info.sensor.planes;
+    m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
+    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
+    //m_camera_info.isp.ionClient = m_ionCameraClient;
+/*
+    for(i = 0; i < m_camera_info.isp.buffers; i++){
+        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
+        m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.isp.width*m_camera_info.isp.height*2;
+        allocCameraMemory(m_camera_info.isp.ionClient, &m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
+    };
+*/
+
+    for(i = 0; i < m_camera_info.isp.buffers; i++){
+        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
+        m_camera_info.isp.buffer[i].size.extS[0]    = m_camera_info.sensor.buffer[i].size.extS[0];
+        m_camera_info.isp.buffer[i].size.extS[1]    = m_camera_info.sensor.buffer[i].size.extS[1];
+        m_camera_info.isp.buffer[i].fd.extFd[0]     = m_camera_info.sensor.buffer[i].fd.extFd[0];
+        m_camera_info.isp.buffer[i].fd.extFd[1]     = m_camera_info.sensor.buffer[i].fd.extFd[1];
+        m_camera_info.isp.buffer[i].virt.extP[0]    = m_camera_info.sensor.buffer[i].virt.extP[0];
+        m_camera_info.isp.buffer[i].virt.extP[1]    = m_camera_info.sensor.buffer[i].virt.extP[1];
+    };
+
+    ALOGV("DEBUG(%s): isp mem alloc done",  __FUNCTION__);
+    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
+    ALOGV("DEBUG(%s): sensor s_input done",  __FUNCTION__);
+
+    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
+        ALOGE("ERR(%s): sensor s_fmt fail",  __FUNCTION__);
+    }
+    ALOGV("DEBUG(%s): sensor s_fmt done",  __FUNCTION__);
+    cam_int_reqbufs(&(m_camera_info.sensor));
+    ALOGV("DEBUG(%s): sensor reqbuf done",  __FUNCTION__);
+    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
+        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __FUNCTION__, i);
+        memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
+                sizeof(struct camera2_shot_ext));
+        m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
+
+        cam_int_qbuf(&(m_camera_info.sensor), i);
+        m_bayerBufStatus[i] = BAYER_ON_SENSOR;
+        RegisterBayerQueueList(i, -1);
     }
+    cam_int_streamon(&(m_camera_info.sensor));
 
-    m_camera_info.isp.fd = fd;
-    m_camera_info.isp.width = 2560;
-    m_camera_info.isp.height = 1920;
-    m_camera_info.isp.format = V4L2_PIX_FMT_SBGGR10;
-    m_camera_info.isp.planes = 1;
-    m_camera_info.isp.buffers = 1;
-    m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
-    m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
-    m_camera_info.isp.ionClient = m_ionCameraClient;
+//    m_camera_info.sensor.currentBufferIndex = 0;
 
-    for(i = 0; i < m_camera_info.isp.buffers; i++){
-        initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
-        m_camera_info.isp.buffer[i].size[0] = m_camera_info.isp.width*m_camera_info.isp.height*2;
-        allocCameraMemory(m_camera_info.isp.ionClient, &m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
-    };
+    cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
+    cam_int_s_fmt(&(m_camera_info.isp));
+    ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
+    cam_int_reqbufs(&(m_camera_info.isp));
+    ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
+
+    for (i = 0; i < m_camera_info.isp.buffers; i++) {
+        ALOGV("DEBUG(%s): isp initial QBUF [%d]",  __FUNCTION__, i);
+        cam_int_qbuf(&(m_camera_info.isp), i);
+    }
+    cam_int_streamon(&(m_camera_info.isp));
 
+    for (i = 0; i < m_camera_info.isp.buffers; i++) {
+        ALOGV("DEBUG(%s): isp initial DQBUF [%d]",  __FUNCTION__, i);
+        cam_int_dqbuf(&(m_camera_info.isp));
+    }
 
-    /*capture init*/
+/*capture init*/
     memset(&node_name, 0x00, sizeof(char[30]));
     sprintf(node_name, "%s%d", NODE_PREFIX, 42);
     fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+
     if (fd < 0) {
-        ALOGV("DEBUG(%s): failed to open capture video node (%s) fd (%d)", __func__,node_name, fd);
+        ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
     }
     else {
-        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __func__,node_name, fd);
+        ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
     }
-
-    
     m_camera_info.capture.fd = fd;
     m_camera_info.capture.width = 2560;
     m_camera_info.capture.height = 1920;
@@ -825,254 +2025,756 @@ void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * sel
 
     for(i = 0; i < m_camera_info.capture.buffers; i++){
         initCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
-        m_camera_info.capture.buffer[i].size[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
+        m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
         allocCameraMemory(m_camera_info.capture.ionClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
     }
 
-    cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
-    
-    if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
-        ALOGE("DEBUG(%s): sensor s_fmt fail",  __func__);
-    }
-    cam_int_reqbufs(&(m_camera_info.sensor));
-
-    for (i = 0; i < m_camera_info.sensor.buffers; i++) {
-        ALOGV("DEBUG(%s): sensor initial QBUF [%d]",  __func__, i);
-        memcpy( m_camera_info.sensor.buffer[i].virBuffer[1], &(m_camera_info.current_shot),
-                sizeof(camera2_shot_t));
-        
-        cam_int_qbuf(&(m_camera_info.sensor), i);
-    }
-    cam_int_streamon(&(m_camera_info.sensor));
-    
-    m_camera_info.sensor.currentBufferIndex = 0;
-
-    cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id); 
-    cam_int_s_fmt(&(m_camera_info.isp));
-    ALOGV("DEBUG(%s): isp calling reqbuf", __func__);
-    cam_int_reqbufs(&(m_camera_info.isp));
-    ALOGV("DEBUG(%s): isp calling querybuf", __func__);
+    cam_int_s_input(&(m_camera_info.capture), m_camera_info.sensor_id);
+    cam_int_s_fmt(&(m_camera_info.capture));
+    ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
+    cam_int_reqbufs(&(m_camera_info.capture));
+    ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
 
-    for (i = 0; i < m_camera_info.isp.buffers; i++) {
-        ALOGV("DEBUG(%s): isp initial QBUF [%d]",  __func__, i);    
-        cam_int_qbuf(&(m_camera_info.isp), i);
+    for (i = 0; i < m_camera_info.capture.buffers; i++) {
+        ALOGV("DEBUG(%s): capture initial QBUF [%d]",  __FUNCTION__, i);
+        cam_int_qbuf(&(m_camera_info.capture), i);
     }
+    cam_int_streamon(&(m_camera_info.capture));
 
-    cam_int_streamon(&(m_camera_info.isp));
-
-
-    ALOGV("DEBUG(%s): END of SensorThreadInitialize ", __func__);
+    m_initFlag2 = true;
+    ALOGV("DEBUG(%s): END of IspThreadInitialize ", __FUNCTION__);
     return;
 }
 
-void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
+
+void ExynosCameraHWInterface2::m_ispThreadFunc(SignalDrivenThread * self)
 {
-    uint32_t currentSignal = self->GetProcessingSignal();
+    uint32_t        currentSignal = self->GetProcessingSignal();
+    IspThread *  selfThread      = ((IspThread*)self);
     int index;
-    ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __func__, currentSignal);
-    
-    if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
+    status_t res;
+    ALOGV("DEBUG(%s): m_ispThreadFunc (%x)", __FUNCTION__, currentSignal);
+
+    if (currentSignal & SIGNAL_THREAD_RELEASE) {
+        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
+
+        ALOGV("DEBUG(%s): calling capture streamoff", __FUNCTION__);
+        cam_int_streamoff(&(m_camera_info.capture));
+        ALOGV("DEBUG(%s): calling capture streamoff done", __FUNCTION__);
+        /*
+        ALOGV("DEBUG(%s): calling capture s_ctrl done", __FUNCTION__);
+        m_camera_info.capture.buffers = 0;
+        cam_int_reqbufs(&(m_camera_info.capture));
+        ALOGV("DEBUG(%s): calling capture reqbuf 0 done", __FUNCTION__);
+*/
+        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
+        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
+        return;
+    }
+    if (currentSignal & SIGNAL_ISP_START_BAYER_INPUT)
     {
-        ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __func__);
-
-        m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[m_camera_info.sensor.currentBufferIndex]));
-        if (!m_isStreamStarted)
-        {
-            m_isStreamStarted = true;
-            ALOGV("DEBUG(%s):  calling preview streamon", __func__);
-            cam_int_streamon(&(m_camera_info.preview));   
-            ALOGV("DEBUG(%s):  calling preview streamon done", __func__);
-            exynos_v4l2_s_ctrl(m_camera_info.isp.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
-            ALOGV("DEBUG(%s):  calling isp sctrl done", __func__);
-            exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
-            ALOGV("DEBUG(%s):  calling sensor sctrl done", __func__);
-            //sleep(3);
+        struct camera2_shot_ext *shot_ext =
+            (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[m_ispInputIndex].virt.extP[1]);
+/*
+//        int targetStreamIndex = 0;
+        struct camera2_shot_ext *shot_ext;
+
+        shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[m_ispInputIndex].virt.extP[1]);
+               shot_ext->request_sensor = m_camera_info.current_shot.request_sensor;
+               shot_ext->request_scc = m_camera_info.current_shot.request_scc;
+               shot_ext->request_scp = m_camera_info.current_shot.request_scp;
+               shot_ext->shot.magicNumber = m_camera_info.current_shot.shot.magicNumber;
+               memcpy(&shot_ext->shot.ctl, &m_camera_info.current_shot.shot.ctl,
+                               sizeof(struct camera2_ctl));
+        ALOGV("DEBUG(%s): IspThread processing SIGNAL_ISP_START_BAYER_INPUT id-dm(%d) id-ctl(%d) frameCnt-dm(%d) scp(%d) scc(%d) magic(%x)",
+                __FUNCTION__, shot_ext->shot.dm.request.id, shot_ext->shot.ctl.request.id, shot_ext->shot.dm.sensor.frameCount,
+                 shot_ext->request_scp, shot_ext->request_scc, shot_ext->shot.magicNumber);
+        ALOGV("DEBUG(%s): m_numExpRemainingOutScp = %d  m_numExpRemainingOutScc = %d", __FUNCTION__, m_numExpRemainingOutScp, m_numExpRemainingOutScc);
+       */
+           ALOGV("DEBUG(%s): IspThread processing SIGNAL_ISP_START_BAYER_INPUT", __FUNCTION__);
+        m_ispProcessingIndex = m_ispInputIndex;
+        m_ispThreadProcessingReq = m_processingRequest;
+        m_ispInputIndex = -1;
+        ALOGV("### isp QBUF start index(%d) => for request(%d)", m_ispProcessingIndex, m_ispThreadProcessingReq);
+
+        if (m_ispThreadProcessingReq != -1) {
+            // HACK : re-write request info here
+            ALOGV("### Re-writing output stream info");
+            m_requestManager->UpdateOutputStreamInfo(shot_ext, m_ispThreadProcessingReq);
+               DumpInfoWithShot(shot_ext);
         }
-        else
+        if (m_scp_flushing) {
+            shot_ext->request_scp = 1;
+        }
+        cam_int_qbuf(&(m_camera_info.isp), m_ispProcessingIndex);
+        m_bayerBufStatus[m_ispProcessingIndex] = BAYER_ON_ISP;
+        ALOGV("### isp QBUF done and calling DQBUF");
+        if (m_ispThreadProcessingReq != -1) // bubble
         {
-            ALOGV("DEBUG(%s):  streaming started already", __func__);
+            if (shot_ext->request_scc) {
+                m_numExpRemainingOutScc++;
+                m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
+            }
+            if (shot_ext->request_scp) {
+                m_numExpRemainingOutScp++;
+                m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
+            }
+            m_lastTimeStamp = systemTime();
+
         }
-        ALOGV("### Sensor DQBUF start");
-        index = cam_int_dqbuf(&(m_camera_info.sensor));
-        ALOGV("### Sensor DQBUF done index(%d), calling QBUF", index);
-        cam_int_qbuf(&(m_camera_info.sensor), index);
-        ALOGV("### Sensor QBUF done index(%d)", index);
-        m_streamThread->SetSignal(SIGNAL_STREAM_DATA_COMING);
-      
-        
+/*
+#if 1 // for test
+
+        if (shot_ext->request_scp) {
+            m_numExpRemainingOutScp++;
+            m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
+        }
+        if (shot_ext->request_scc) {
+            m_numExpRemainingOutScc++;
+            m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
+        }
+
+#else
+        if (currentEntry) {
+            for (int i = 0; i < currentEntry->output_stream_count; i++) {
+                targetStreamIndex = currentEntry->internal_shot.ctl.request.outputStreams[i];
+                    // TODO : match with actual stream index;
+                 ALOGV("### outputstream(%d) sending data signal to stream [%d]", i, targetStreamIndex);
+                m_streamThreads[targetStreamIndex]->SetSignal(SIGNAL_STREAM_DATA_COMING);
+            }
+        }
+#endif
+
+shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
+        ALOGV("DEBUG(%s): information of DQed buffer id-dm(%d) id-ctl(%d) frameCnt-dm(%d) scp(%d) magic(%x)",
+                __FUNCTION__, shot_ext->shot.dm.request.id, shot_ext->shot.ctl.request.id, shot_ext->shot.dm.sensor.frameCount,
+                    shot_ext->request_scp, shot_ext->shot.magicNumber);
+*/
+        index = cam_int_dqbuf(&(m_camera_info.isp));
+        ALOGD("### isp DQBUF done index(%d) => for request(%d)", index, m_ispThreadProcessingReq);
+        if (m_ispThreadProcessingReq != -1) { // bubble
+            //DumpFrameinfoWithBufIndex(index);
+            shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
+               DumpInfoWithShot(shot_ext);
+            m_requestManager->ApplyDynamicMetadata(m_ispThreadProcessingReq);
+        }
+        m_bayerBufStatus[index] = BAYER_ON_HAL_EMPTY;
+        RegisterBayerDequeueList(index);
     }
     return;
 }
 
 void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
 {
-    ALOGV("DEBUG(%s): ", __func__ );
-    memset(&(((StreamThread*)self)->m_parameters), 0, sizeof(stream_parameters_t));
-    memset(&(((StreamThread*)self)->m_tempParameters), 0, sizeof(stream_parameters_t));
+    StreamThread *          selfThread      = ((StreamThread*)self);
+    ALOGV("DEBUG(%s): ", __FUNCTION__ );
+    memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
+    selfThread->m_isBufferInit = false;
+
     return;
 }
 
-int ExynosCameraHWInterface2::matchBuffer(void * bufAddr)
-{
-    int j;
-    for (j=0 ; j < 8 ; j++) {
-        if (m_streamThread->m_parameters.grallocVirtAddr[j]== bufAddr)
-            return j;
-    }
-    
-    return -1;
-}
+
 void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
 {
-    uint32_t currentSignal = self->GetProcessingSignal();
-    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __func__, ((StreamThread*)self)->m_index, currentSignal);
+    uint32_t                currentSignal   = self->GetProcessingSignal();
+    StreamThread *          selfThread      = ((StreamThread*)self);
+    stream_parameters_t     *selfStreamParms =  &(selfThread->m_parameters);
+    node_info_t             *currentNode    = &(selfStreamParms->node);
 
-    if (currentSignal & SIGNAL_STREAM_CHANGE_PARAMETER)
-    {
-        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER", __func__);
-        ALOGV("DEBUG(%s): [1] node width(%d), height(%d), fd(%d), buffers(%d)", __func__, 
-            m_camera_info.preview.width, m_camera_info.preview.height, m_camera_info.preview.fd, m_camera_info.preview.buffers); 
+    ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
 
+    if (currentSignal & SIGNAL_STREAM_CHANGE_PARAMETER) {
+        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER", __FUNCTION__);
+        selfThread->applyChange();
+        if (selfStreamParms->streamType==1) {
+            m_resizeBuf.size.extS[0] = ALIGN(selfStreamParms->outputWidth, 16) * ALIGN(selfStreamParms->outputHeight, 16) * 2;
+            m_resizeBuf.size.extS[1] = 0;
+            m_resizeBuf.size.extS[2] = 0;
 
-        ALOGV("DEBUG(%s): [2] node width(%d), height(%d), fd(%d), buffers(%d)", __func__, 
-            m_camera_info.preview.width, m_camera_info.preview.height, m_camera_info.preview.fd, m_camera_info.preview.buffers);            
-        ((StreamThread*)self)->ApplyChange();
+            if (allocCameraMemory(selfStreamParms->ionClient, &m_resizeBuf, 1) == -1) {
+                ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
+            }
+        }
+        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER DONE", __FUNCTION__);
     }
-    if (currentSignal & SIGNAL_STREAM_DATA_COMING)
-    {
+
+    if (currentSignal & SIGNAL_THREAD_RELEASE) {
+        int i, index = -1, cnt_to_dq=0;
+        status_t res;
+        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
+
+
+
+        if (selfThread->m_isBufferInit) {
+            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
+                ALOGV("DEBUG(%s): checking buffer index[%d] - status(%d)",
+                    __FUNCTION__, i, selfStreamParms->svcBufStatus[i]);
+                if (selfStreamParms->svcBufStatus[i] ==ON_DRIVER) cnt_to_dq++;
+            }
+            m_scp_flushing = true;
+            for ( i=0 ; i < cnt_to_dq ; i++) {
+                ALOGV("@@@@@@ dq start");
+                index = cam_int_dqbuf(&(selfStreamParms->node));
+                ALOGV("@@@@@@ dq done, index(%d)", index);
+                if (index >=0 && index < selfStreamParms->numSvcBuffers) {
+                    selfStreamParms->svcBufStatus[index] = ON_HAL;
+                }
+            }
+            m_scp_flushing = false;
+            ALOGV("DEBUG(%s): calling stream(%d) streamoff (fd:%d)", __FUNCTION__,
+            selfThread->m_index, selfStreamParms->fd);
+            cam_int_streamoff(&(selfStreamParms->node));
+            ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
+
+            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
+                ALOGV("DEBUG(%s): releasing buffer index[%d] - status(%d)",
+                    __FUNCTION__, i, selfStreamParms->svcBufStatus[i]);
+
+                switch (selfStreamParms->svcBufStatus[i]) {
+
+                case ON_DRIVER:
+                    ALOGV("@@@@@@ this should not happen");
+                case ON_HAL:
+                    res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
+                            &(selfStreamParms->svcBufHandle[i]));
+                    if (res != NO_ERROR ) {
+                        ALOGE("ERR(%s): unable to cancel buffer : %d",__FUNCTION__ , res);
+                         // TODO : verify after service is ready
+                         // return;
+                    }
+                    break;
+                case ON_SERVICE:
+                default:
+                    break;
+
+                }
+            }
+        }
+        if (selfStreamParms->streamType==1) {
+            if (m_resizeBuf.size.s != 0) {
+                freeCameraMemory(&m_resizeBuf, 1);
+            }
+        }
+
+        selfThread->m_index = 255;
+        ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
+        selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
+        return;
+    }
+
+    if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
         buffer_handle_t * buf = NULL;
         status_t res;
         void *virtAddr[3];
         int i, j;
         int index;
-        int ret;
-        StreamThread * selfStream = ((StreamThread*)self);
-        ALOGV("DEBUG(%s): processing SIGNAL_STREAM_DATA_COMING", __func__);
-        if (!m_isBufferInit) {
-            for ( i=0 ; i < 8 ; i++)
-            {
-                res = selfStream->m_parameters.streamOps->dequeue_buffer(selfStream->m_parameters.streamOps, &buf);
+        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING",
+            __FUNCTION__,selfThread->m_index);
+        if (!(selfThread->m_isBufferInit)) {
+            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
+                res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
                 if (res != NO_ERROR || buf == NULL) {
-                    ALOGE("%s: Unable to dequeue buffer : %d",__func__ , res);
+                    ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
                     return;
                 }
-                
-                ALOGV("DEBUG(%s):got buf(%x) version(%d), numFds(%d), numInts(%d)", __func__, (uint32_t)(*buf),
+                ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
                    ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
-                   
+
                 if (m_grallocHal->lock(m_grallocHal, *buf,
-                           selfStream->m_parameters.usage,
-                           0, 0, selfStream->m_parameters.width, selfStream->m_parameters.height, virtAddr) != 0) {
-                           
-                    ALOGE("ERR(%s):could not obtain gralloc buffer", __func__);
+                           selfStreamParms->usage,
+                           0, 0, selfStreamParms->outputWidth, selfStreamParms->outputHeight, virtAddr) != 0) {
+                    ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
+                    return;
                 }
-                ALOGV("DEBUG(%s) locked img buf plane0(%x) plane1(%x) plane2(%x)", __func__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]); 
-                ret = matchBuffer(virtAddr[0]);
-                if (ret==-1) {
-                    ALOGE("##### could not find matched buffer");
+                ALOGV("DEBUG(%s): locked img buf plane0(%x) plane1(%x) plane2(%x)",
+                __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
+
+                index = selfThread->findBufferIndex(virtAddr[0]);
+                if (index == -1) {
+                    ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
                 }
                 else {
-                    ALOGV("##### found matched buffer[%d]", ret);
-                    m_streamThread->m_parameters.bufHandle[i] = buf;
+                    ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
+                        __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
+                    if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
+                        selfStreamParms->svcBufStatus[index] = ON_DRIVER;
+                    else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
+                        selfStreamParms->svcBufStatus[index] = ON_HAL;
+                    else {
+                        ALOGD("DBG(%s): buffer status abnormal (%d) "
+                            , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
+                    }
+                    if (*buf != selfStreamParms->svcBufHandle[index])
+                        ALOGD("DBG(%s): different buf_handle index ", __FUNCTION__);
+                    else
+                        ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
                 }
-                
+                m_svcBufIndex = 0;
             }
-            m_isBufferInit = true;
+            selfThread->m_isBufferInit = true;
         }
-        ALOGV("##### buffer init done[1]");
 
-        ALOGV("### preview DQBUF start");
-        index = cam_int_dqbuf(&(m_camera_info.preview));
-        ALOGV("### preview DQBUF done index(%d)", index);
 
-        res = selfStream->m_parameters.streamOps->enqueue_buffer(selfStream->m_parameters.streamOps, systemTime(), m_streamThread->m_parameters.bufHandle[index]);
-        ALOGV("### preview enqueue_buffer to svc done res(%d)", res);
-        
-         res = selfStream->m_parameters.streamOps->dequeue_buffer(selfStream->m_parameters.streamOps, &buf);
-        if (res != NO_ERROR || buf == NULL) {
-            ALOGE("%s: Unable to dequeue buffer : %d",__func__ , res);
-            return;
-        }
-        
-        ALOGV("DEBUG(%s):got buf(%x) version(%d), numFds(%d), numInts(%d)", __func__, (uint32_t)(*buf),
-           ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
-           
-        if (m_grallocHal->lock(m_grallocHal, *buf,
-                   selfStream->m_parameters.usage,
-                   0, 0, selfStream->m_parameters.width, selfStream->m_parameters.height, virtAddr) != 0) {
-                   
-            ALOGE("ERR(%s):could not obtain gralloc buffer", __func__);
-        }
-        ALOGV("DEBUG(%s) locked img buf plane0(%x) plane1(%x) plane2(%x)", __func__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]); 
-        ret = matchBuffer(virtAddr[0]);
-        if (ret==-1) {
-            ALOGE("##### could not find matched buffer");
+        if (selfStreamParms->streamType==0) {
+            ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
+                selfThread->m_index, selfStreamParms->streamType);
+/*
+            for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
+                ALOGV("DEBUG(%s): STREAM BUF status index[%d] - status(%d)",
+                    __FUNCTION__, i, selfStreamParms->svcBufStatus[i]);
+            }
+*/
+            index = cam_int_dqbuf(&(selfStreamParms->node));
+            ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
+                selfThread->m_index, selfStreamParms->streamType, index);
+
+            m_numExpRemainingOutScp--;
+
+            if (selfStreamParms->svcBufStatus[index] !=  ON_DRIVER)
+                ALOGD("DBG(%s): DQed buffer status abnormal (%d) ",
+                       __FUNCTION__, selfStreamParms->svcBufStatus[index]);
+            selfStreamParms->svcBufStatus[index] = ON_HAL;
+            res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
+                    m_requestManager->GetTimestamp(m_ispThreadProcessingReq), &(selfStreamParms->svcBufHandle[index]));
+            ALOGV("DEBUG(%s): stream(%d) enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
+            if (res == 0) {
+                selfStreamParms->svcBufStatus[index] = ON_SERVICE;
+            }
+            else {
+                selfStreamParms->svcBufStatus[index] = ON_HAL;
+            }
+            m_requestManager->NotifyStreamOutput(m_ispThreadProcessingReq, selfThread->m_index);
         }
-        else {
-            ALOGV("##### found matched buffer[%d]", ret);
-            int plane_index;
-            struct v4l2_buffer v4l2_buf;
-            struct v4l2_plane  planes[VIDEO_MAX_PLANES];
+        else if (selfStreamParms->streamType==1) {
+            ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
+                selfThread->m_index, selfStreamParms->streamType);
+            index = cam_int_dqbuf(&(selfStreamParms->node));
+            ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
+                selfThread->m_index, selfStreamParms->streamType, index);
+
+            m_numExpRemainingOutScc--;
+            m_jpegEncodingRequestIndex = m_ispThreadProcessingReq;
+
+            bool ret = false;
+            int pictureW, pictureH, pictureFramesize = 0;
+            int pictureFormat;
+            int cropX, cropY, cropW, cropH = 0;
+
+
+            ExynosBuffer jpegBuf;
+
+            ExynosRect   m_orgPictureRect;
+
+            m_orgPictureRect.w = selfStreamParms->outputWidth;
+            m_orgPictureRect.h = selfStreamParms->outputHeight;
+
+            ExynosBuffer* m_pictureBuf = &(m_camera_info.capture.buffer[index]);
+
+            pictureW = 2560;
+            pictureH = 1920;
+
+            pictureFormat = V4L2_PIX_FMT_YUYV;
+            pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
+
+            // resize from pictureBuf(max size) to rawHeap(user's set size)
+            if (m_exynosPictureCSC) {
+                m_getRatioSize(pictureW, pictureH,
+                               m_orgPictureRect.w, m_orgPictureRect.h,
+                               &cropX, &cropY,
+                               &cropW, &cropH,
+                               0); //m_secCamera->getZoom());
+
+                ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
+                      __FUNCTION__, cropX, cropY, cropW, cropH);
+
+                csc_set_src_format(m_exynosPictureCSC,
+                                   ALIGN(pictureW, 16), ALIGN(pictureH, 16),
+                                   cropX, cropY, cropW, cropH,
+                                   V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
+                                   0);
+
+                csc_set_dst_format(m_exynosPictureCSC,
+                                   m_orgPictureRect.w, m_orgPictureRect.h,
+                                   0, 0, m_orgPictureRect.w, m_orgPictureRect.h,
+                                   V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
+                                   0);
+                csc_set_src_buffer(m_exynosPictureCSC,
+                                   (void **)&m_pictureBuf->fd.fd);
+
+                csc_set_dst_buffer(m_exynosPictureCSC,
+                                   (void **)&m_resizeBuf.fd.fd);
+                for (int i=0 ; i < 3 ; i++)
+                    ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%x m_resizeBuf.size.extS[%d]=%d",
+                        __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
+
+                if (csc_convert(m_exynosPictureCSC) != 0)
+                    ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
+
+                for (int i=0 ; i < 3 ; i++)
+                    ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%x m_resizeBuf.size.extS[%d]=%d",
+                        __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
+            }
+            else {
+                ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
+            }
+
+            m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_orgPictureRect.w, m_orgPictureRect.h, &m_resizeBuf);
+
+            for (int i=0 ; i < 3 ; i++) {
+                ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%x m_resizeBuf.size.extS[%d]=%d",
+                            __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
+            }
+
+            for (int i = 1; i < 3; i++) {
+                if (m_resizeBuf.size.extS[i] != 0)
+                    m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
+
+                ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
+            }
 
 
+            ExynosRect jpegRect;
+            bool found = false;
+            jpegRect.w = m_orgPictureRect.w;
+            jpegRect.h = m_orgPictureRect.h;
+            jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
 
-            v4l2_buf.m.planes = planes;
-            v4l2_buf.type       = m_camera_info.preview.type;
-            v4l2_buf.memory     = m_camera_info.preview.memory;
-            v4l2_buf.index      = ret;
-            v4l2_buf.length     = 3;
+            jpegBuf.size.extS[0] = 5*1024*1024;
+            jpegBuf.size.extS[1] = 0;
+            jpegBuf.size.extS[2] = 0;
 
-            const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
+            allocCameraMemory(currentNode->ionClient, &jpegBuf, 1);
 
-            v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
-            v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
-            v4l2_buf.m.planes[2].m.fd = priv_handle->fd2;
+            ALOGV("DEBUG(%s): jpegBuf.size.s = %d , jpegBuf.virt.p = %x", __FUNCTION__,
+                jpegBuf.size.s, jpegBuf.virt.p);
 
-            // HACK
-            v4l2_buf.m.planes[0].length  = 1920*1088;
-            v4l2_buf.m.planes[1].length  = 1920*1088/4;
-            v4l2_buf.m.planes[2].length  = 1920*1088/4;
 
+            if (yuv2Jpeg(&m_resizeBuf, &jpegBuf, &jpegRect) == false)
+                ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
+            cam_int_qbuf(&(selfStreamParms->node), index);
+            ALOGV("DEBUG(%s): stream(%d) type(%d) QBUF DONE ",__FUNCTION__,
+                selfThread->m_index, selfStreamParms->streamType);
 
-            if (exynos_v4l2_qbuf(m_camera_info.preview.fd, &v4l2_buf) < 0) {
-                ALOGE("ERR(%s):preview exynos_v4l2_qbuf() fail", __func__);
-                return;
+            for (int i = 0; i < selfStreamParms->numSvcBuffers ; i++) {
+                if (selfStreamParms->svcBufStatus[m_svcBufIndex] == ON_HAL) {
+                    found = true;
+                    break;
+                }
+                m_svcBufIndex++;
+                if (m_svcBufIndex >= selfStreamParms->numSvcBuffers) m_svcBufIndex = 0;
+            }
+            if (!found) {
+                ALOGE("ERR(%s): NO free SVC buffer for JPEG", __FUNCTION__);
+            }
+            else {
+                memcpy(selfStreamParms->svcBuffers[m_svcBufIndex].virt.extP[0], jpegBuf.virt.extP[0], 5*1024*1024);
+
+                res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
+                        m_requestManager->GetTimestamp(m_jpegEncodingRequestIndex), &(selfStreamParms->svcBufHandle[m_svcBufIndex]));
+
+                freeCameraMemory(&jpegBuf, 1);
+                ALOGV("DEBUG(%s): stream(%d) enqueue_buffer index(%d) to svc done res(%d)",
+                        __FUNCTION__, selfThread->m_index, m_svcBufIndex, res);
+                if (res == 0) {
+                    selfStreamParms->svcBufStatus[m_svcBufIndex] = ON_SERVICE;
+                }
+                else {
+                    selfStreamParms->svcBufStatus[m_svcBufIndex] = ON_HAL;
+                }
+                m_requestManager->NotifyStreamOutput(m_jpegEncodingRequestIndex, selfThread->m_index);
             }
 
-            
-            ALOGV("### preview QBUF done index(%d)", index);
         }
+        while(1) {
+            res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
+            if (res != NO_ERROR || buf == NULL) {
+                ALOGV("DEBUG(%s): stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index,  res);
+                break;
+            }
+
+            ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
+               ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
+
+            if (m_grallocHal->lock(m_grallocHal, *buf,
+                       selfStreamParms->usage,
+                       0, 0, selfStreamParms->outputWidth, selfStreamParms->outputHeight, virtAddr) != 0) {
+
+                ALOGE("ERR(%s):could not obtain gralloc buffer", __FUNCTION__);
+            }
+            ALOGV("DEBUG(%s): locked img buf plane0(%x) plane1(%x) plane2(%x)", __FUNCTION__,
+                (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
+
+            index = selfThread->findBufferIndex(virtAddr[0]);
+            if (index == -1) {
+                ALOGD("DBG(%s): could not find buffer index", __FUNCTION__);
+            }
+            else {
+                ALOGV("DEBUG(%s): found buffer index[%d]", __FUNCTION__, index);
+
+                if (selfStreamParms->svcBufStatus[index] != ON_SERVICE)
+                    ALOGD("DBG(%s): dequeued buf status abnormal (%d)", __FUNCTION__, selfStreamParms->svcBufStatus[index]);
+                else {
+                    selfStreamParms->svcBufStatus[index] = ON_HAL;
+                    if (index < selfStreamParms->numHwBuffers) {
+
+                        uint32_t    plane_index = 0;
+                        ExynosBuffer*  currentBuf = &(selfStreamParms->svcBuffers[index]);
+                        const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
+                        struct v4l2_buffer v4l2_buf;
+                        struct v4l2_plane  planes[VIDEO_MAX_PLANES];
+
+                        v4l2_buf.m.planes   = planes;
+                        v4l2_buf.type       = currentNode->type;
+                        v4l2_buf.memory     = currentNode->memory;
+                        v4l2_buf.index      = index;
+                        v4l2_buf.length     = currentNode->planes;
+
+                        v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
+                        v4l2_buf.m.planes[1].m.fd = priv_handle->u_fd;
+                        v4l2_buf.m.planes[2].m.fd = priv_handle->v_fd;
+                        for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
+                            v4l2_buf.m.planes[plane_index].length  = currentBuf->size.extS[plane_index];
+                            ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)",
+                                 __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
+                                (unsigned long)(virtAddr[plane_index]), v4l2_buf.m.planes[plane_index].length);
+                        }
+
+                        if (selfStreamParms->streamType == 0) {
+                            if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
+                                ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail",
+                                    __FUNCTION__, selfThread->m_index);
+                                return;
+                            }
+                            selfStreamParms->svcBufStatus[index] = ON_DRIVER;
+                            ALOGV("DEBUG(%s): stream id(%d) type0 QBUF done index(%d)",
+                                __FUNCTION__, selfThread->m_index, index);
+                        }
+                        else if (selfStreamParms->streamType == 1) {
+                            selfStreamParms->svcBufStatus[index]  = ON_HAL;
+                            ALOGV("DEBUG(%s): stream id(%d) type1 DQBUF done index(%d)",
+                                __FUNCTION__, selfThread->m_index, index);
+                        }
 
-        m_requestManager->NotifyStreamOutput(selfStream->m_parameters.id);
-        
+                    }
+                }
+            }
+        }
+        ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING DONE",
+            __FUNCTION__,selfThread->m_index);
     }
     return;
 }
 
-void ExynosCameraHWInterface2::StreamThread::SetParameter(uint32_t id, uint32_t width, uint32_t height,
-        int format, const camera2_stream_ops_t* stream_ops, uint32_t usage, int fd, node_info_t * node)
-{
-    ALOGV("DEBUG(%s): id(%d) width(%d) height(%d) format(%x) fd(%d)", __func__, id, width, height, format, fd); 
-    m_tempParameters.id = id;
-    m_tempParameters.width = width;
-    m_tempParameters.height = height;
-    m_tempParameters.format = format;
-    m_tempParameters.streamOps = stream_ops;
-    m_tempParameters.usage = usage;
-    m_tempParameters.fd = fd;
-    m_tempParameters.node = node;
-    ALOGV("DEBUG(%s): node width(%d), height(%d), fd(%d), buffers(%d)", __func__, 
-     (m_tempParameters.node)->width, (m_tempParameters.node)->height, (m_tempParameters.node)->fd, (m_tempParameters.node)->buffers); 
-   
+bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
+                            ExynosBuffer *jpegBuf,
+                            ExynosRect *rect)
+{
+    unsigned char *addr;
+
+    ExynosJpegEncoderForCamera jpegEnc;
+    bool ret = false;
+    int res = 0;
+
+    unsigned int *yuvSize = yuvBuf->size.extS;
+
+    if (jpegEnc.create()) {
+        ALOGE("ERR(%s):jpegEnc.create() fail", __func__);
+        goto jpeg_encode_done;
+    }
+
+    if (jpegEnc.setQuality(100)) {
+        ALOGE("ERR(%s):jpegEnc.setQuality() fail", __func__);
+        goto jpeg_encode_done;
+    }
+
+    if (jpegEnc.setSize(rect->w, rect->h)) {
+        ALOGE("ERR(%s):jpegEnc.setSize() fail", __func__);
+        goto jpeg_encode_done;
+    }
+    ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
+
+    if (jpegEnc.setColorFormat(rect->colorFormat)) {
+        ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __func__);
+        goto jpeg_encode_done;
+    }
+    ALOGV("%s : color = %s\n", __FUNCTION__, &(rect->colorFormat));
+
+    if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
+        ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __func__);
+        goto jpeg_encode_done;
+    }
+#if 0
+    if (m_curCameraInfo->thumbnailW != 0 && m_curCameraInfo->thumbnailH != 0) {
+        int thumbW = 0, thumbH = 0;
+        mExifInfo.enableThumb = true;
+        if (rect->w < 320 || rect->h < 240) {
+            thumbW = 160;
+            thumbH = 120;
+        } else {
+            thumbW = m_curCameraInfo->thumbnailW;
+            thumbH = m_curCameraInfo->thumbnailH;
+        }
+        if (jpegEnc.setThumbnailSize(thumbW, thumbH)) {
+            LOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __func__, thumbW, thumbH);
+            goto jpeg_encode_done;
+        }
+
+        if (0 < m_jpegThumbnailQuality && m_jpegThumbnailQuality <= 100) {
+            if (jpegEnc.setThumbnailQuality(m_jpegThumbnailQuality)) {
+                LOGE("ERR(%s):jpegEnc.setThumbnailQuality(%d) fail", __func__, m_jpegThumbnailQuality);
+                goto jpeg_encode_done;
+            }
+        }
+
+        m_setExifChangedAttribute(&mExifInfo, rect);
+    } else
+#endif
+    {
+        mExifInfo.enableThumb = false;
+    }
+    ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __func__, *yuvSize);
+    for (int i=0 ; i < 3 ; i++)
+            ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() virt.extP[%d]=%x extS[%d]=%d",
+                __FUNCTION__, i, yuvBuf->fd.extFd[i], i, yuvBuf->size.extS[i]);
+    if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), (int *)yuvSize)) {
+        ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __func__);
+        goto jpeg_encode_done;
+    }
+
+    if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
+        ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __func__);
+        goto jpeg_encode_done;
+    }
+    for (int i=0 ; i < 3 ; i++)
+        ALOGV("DEBUG(%s): jpegBuf->virt.extP[%d]=%x   jpegBuf->size.extS[%d]=%d",
+                __func__, i, jpegBuf->fd.extFd[i], i, jpegBuf->size.extS[i]);
+    memset(jpegBuf->virt.p,0,jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2]);
+
+    if (jpegEnc.updateConfig()) {
+        ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __func__);
+        goto jpeg_encode_done;
+    }
+
+    if (res = jpegEnc.encode((int *)&jpegBuf->size.s, NULL)) {
+        ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __func__, res);
+        goto jpeg_encode_done;
+    }
+
+    ret = true;
+
+jpeg_encode_done:
+
+    if (jpegEnc.flagCreate() == true)
+        jpegEnc.destroy();
+
+    return ret;
+}
+
+
+ExynosCameraHWInterface2::MainThread::~MainThread()
+{
+    ALOGV("DEBUG(%s):", __func__);
+}
+
+void ExynosCameraHWInterface2::MainThread::release()
+{
+    ALOGV("DEBUG(%s):", __func__);
+
+    SetSignal(SIGNAL_THREAD_RELEASE);
+
+    // TODO : return synchronously (after releasing asynchronously)
+    usleep(400000);
+    //while (m_index != 255)  // temp.. To make smarter..
+    //    usleep(200000);
+    SetSignal(SIGNAL_THREAD_TERMINATE);
+    ALOGV("DEBUG(%s): DONE", __func__);
+}
+
+ExynosCameraHWInterface2::SensorThread::~SensorThread()
+{
+    ALOGV("DEBUG(%s):", __FUNCTION__);
+}
+
+void ExynosCameraHWInterface2::SensorThread::release()
+{
+    ALOGV("DEBUG(%s):", __func__);
+
+    SetSignal(SIGNAL_THREAD_RELEASE);
+
+    // TODO : return synchronously (after releasing asynchronously)
+    usleep(400000);
+    //while (m_index != 255)  // temp.. To make smarter..
+    //    usleep(200000);
+    SetSignal(SIGNAL_THREAD_TERMINATE);
+    ALOGV("DEBUG(%s): DONE", __func__);
+}
+
+ExynosCameraHWInterface2::IspThread::~IspThread()
+{
+    ALOGV("DEBUG(%s):", __FUNCTION__);
+}
+
+void ExynosCameraHWInterface2::IspThread::release()
+{
+    ALOGV("DEBUG(%s):", __func__);
+
+    SetSignal(SIGNAL_THREAD_RELEASE);
+
+    // TODO : return synchronously (after releasing asynchronously)
+    usleep(400000);
+    //while (m_index != 255)  // temp.. To make smarter..
+    //    usleep(200000);
+    SetSignal(SIGNAL_THREAD_TERMINATE);
+    ALOGV("DEBUG(%s): DONE", __func__);
+}
+
+ExynosCameraHWInterface2::StreamThread::~StreamThread()
+{
+    ALOGV("DEBUG(%s):", __FUNCTION__);
+}
+
+void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
+{
+    ALOGV("DEBUG(%s):", __FUNCTION__);
+
+    m_tempParameters = new_parameters;
+
     SetSignal(SIGNAL_STREAM_CHANGE_PARAMETER);
+
+    // TODO : return synchronously (after setting parameters asynchronously)
+    usleep(50000);
+}
+
+void ExynosCameraHWInterface2::StreamThread::applyChange()
+{
+    memcpy(&m_parameters, m_tempParameters, sizeof(stream_parameters_t));
+
+    ALOGD("DEBUG(%s):  Applying Stream paremeters  width(%d), height(%d)",
+            __FUNCTION__, m_parameters.outputWidth, m_parameters.outputHeight);
 }
 
-void ExynosCameraHWInterface2::StreamThread::ApplyChange()
+void ExynosCameraHWInterface2::StreamThread::release()
 {
     ALOGV("DEBUG(%s):", __func__);
-    memcpy(&m_parameters, &m_tempParameters, sizeof(stream_parameters_t));
+
+    SetSignal(SIGNAL_THREAD_RELEASE);
+
+    // TODO : return synchronously (after releasing asynchronously)
+    usleep(200000);
+    while (m_index != 255)  // temp.. To make smarter..
+        usleep(200000);
+    ALOGV("DEBUG(%s): DONE", __func__);
+}
+
+int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
+{
+    int index;
+    for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
+        if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
+            return index;
+    }
+    return -1;
 }
 
 int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
@@ -1080,7 +2782,7 @@ int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
     if (ionClient == 0) {
         ionClient = ion_client_create();
         if (ionClient < 0) {
-            ALOGE("[%s]src ion client create failed, value = %d\n", __func__, ionClient);
+            ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
             return 0;
         }
     }
@@ -1100,77 +2802,80 @@ int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
     return ionClient;
 }
 
-int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, exynos_camera_memory_t *buf, int iMemoryNum)
+int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
 {
     int ret = 0;
     int i = 0;
 
     if (ionClient == 0) {
-        ALOGE("[%s] ionClient is zero (%d)\n", __func__, ionClient);
+        ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
         return -1;
     }
 
     for (i=0;i<iMemoryNum;i++) {
-        if (buf->size[i] == 0) {
+        if (buf->size.extS[i] == 0) {
             break;
         }
 
-        buf->ionBuffer[i] = ion_alloc(ionClient, \
-                                      buf->size[i], 0, ION_HEAP_EXYNOS_MASK,0);
-        if ((buf->ionBuffer[i] == -1) ||(buf->ionBuffer[i] == 0)) {
-            ALOGE("[%s]ion_alloc(%d) failed\n", __func__, buf->size[i]);
-            buf->ionBuffer[i] = -1;
+        buf->fd.extFd[i] = ion_alloc(ionClient, \
+                                      buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK,0);
+        if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
+            ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
+            buf->fd.extFd[i] = -1;
             freeCameraMemory(buf, iMemoryNum);
             return -1;
         }
 
-        buf->virBuffer[i] = (char *)ion_map(buf->ionBuffer[i], \
-                                        buf->size[i], 0);
-        if ((buf->virBuffer[i] == (char *)MAP_FAILED) || (buf->virBuffer[i] == NULL)) {
-            ALOGE("[%s]src ion map failed(%d)\n", __func__, buf->size[i]);
-            buf->virBuffer[i] = (char *)MAP_FAILED;
+        buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
+                                        buf->size.extS[i], 0);
+        if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
+            ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
+            buf->virt.extP[i] = (char *)MAP_FAILED;
             freeCameraMemory(buf, iMemoryNum);
             return -1;
         }
-        ALOGV("allocCameraMem : [%d][0x%08x]", i, buf->virBuffer[i]);
+        ALOGV("allocCameraMem : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
     }
 
     return ret;
 }
 
-void ExynosCameraHWInterface2::freeCameraMemory(exynos_camera_memory_t *buf, int iMemoryNum)
+void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
 {
+
     int i =0 ;
 
     for (i=0;i<iMemoryNum;i++) {
-        if (buf->ionBuffer[i] != -1) {
-            if (buf->virBuffer[i] != (char *)MAP_FAILED) {
-                ion_unmap(buf->virBuffer[i], buf->size[i]);
+        if (buf->fd.extFd[i] != -1) {
+            if (buf->virt.extP[i] != (char *)MAP_FAILED) {
+                ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
             }
-            ion_free(buf->ionBuffer[i]);
+            ion_free(buf->fd.extFd[i]);
         }
-        buf->ionBuffer[i] = -1;
-        buf->virBuffer[i] = (char *)MAP_FAILED;
-        buf->size[i] = 0;
+        buf->fd.extFd[i] = -1;
+        buf->virt.extP[i] = (char *)MAP_FAILED;
+        buf->size.extS[i] = 0;
     }
 }
 
-void ExynosCameraHWInterface2::initCameraMemory(exynos_camera_memory_t *buf, int iMemoryNum)
+void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
 {
     int i =0 ;
     for (i=0;i<iMemoryNum;i++) {
-        buf->virBuffer[i] = (char *)MAP_FAILED;
-        buf->ionBuffer[i] = -1;
-        buf->size[i] = 0;
+        buf->virt.extP[i] = (char *)MAP_FAILED;
+        buf->fd.extFd[i] = -1;
+        buf->size.extS[i] = 0;
     }
 }
 
 
+
+
 static camera2_device_t *g_cam2_device;
 
 static int HAL2_camera_device_close(struct hw_device_t* device)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     if (device) {
         camera2_device_t *cam_device = (camera2_device_t *)device;
         delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
@@ -1188,39 +2893,39 @@ static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
 static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
             const camera2_request_queue_src_ops_t *request_src_ops)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->setRequestQueueSrcOps(request_src_ops);
 }
 
 static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->notifyRequestQueueNotEmpty();
 }
 
 static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
             const camera2_frame_queue_dst_ops_t *frame_dst_ops)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
 }
 
 static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->getInProgressCount();
 }
 
 static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->flushCapturesInProgress();
 }
 
 static int HAL2_device_construct_default_request(const struct camera2_device *dev,
             int request_template, camera_metadata_t **request)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->constructDefaultRequest(request_template, request);
 }
 
@@ -1237,7 +2942,7 @@ static int HAL2_device_allocate_stream(
             uint32_t *usage,
             uint32_t *max_buffers)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->allocateStream(width, height, format, stream_ops,
                                     stream_id, format_actual, usage, max_buffers);
 }
@@ -1248,7 +2953,7 @@ static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
             int num_buffers,
             buffer_handle_t *buffers)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
 }
 
@@ -1256,7 +2961,7 @@ static int HAL2_device_release_stream(
         const struct camera2_device *dev,
             uint32_t stream_id)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->releaseStream(stream_id);
 }
 
@@ -1271,7 +2976,7 @@ static int HAL2_device_allocate_reprocess_stream(
             uint32_t *consumer_usage,
             uint32_t *max_buffers)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
                                     stream_id, consumer_usage, max_buffers);
 }
@@ -1280,7 +2985,7 @@ static int HAL2_device_release_reprocess_stream(
         const struct camera2_device *dev,
             uint32_t stream_id)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->releaseReprocessStream(stream_id);
 }
 
@@ -1289,7 +2994,7 @@ static int HAL2_device_trigger_action(const struct camera2_device *dev,
             int ext1,
             int ext2)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->triggerAction(trigger_id, ext1, ext2);
 }
 
@@ -1297,20 +3002,20 @@ static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
             camera2_notify_callback notify_cb,
             void *user)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->setNotifyCallback(notify_cb, user);
 }
 
 static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
             vendor_tag_query_ops_t **ops)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->getMetadataVendorTagOps(ops);
 }
 
 static int HAL2_device_dump(const struct camera2_device *dev, int fd)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return obj(dev)->dump(fd);
 }
 
@@ -1320,23 +3025,17 @@ static int HAL2_device_dump(const struct camera2_device *dev, int fd)
 
 static int HAL2_getNumberOfCameras()
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return 1;
 }
 
 
-// temporarily copied from EmulatedFakeCamera2
-// TODO : implement our own codes
-status_t constructStaticInfo(
-        camera_metadata_t **info,
-        bool sizeRequest);
-
 static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     static camera_metadata_t * mCameraInfo = NULL;
     status_t res;
-    
+
     info->facing = CAMERA_FACING_BACK;
     info->orientation = 0;
     info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
@@ -1344,18 +3043,18 @@ static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
         res = constructStaticInfo(&mCameraInfo, true);
         if (res != OK) {
             ALOGE("%s: Unable to allocate static info: %s (%d)",
-                    __func__, strerror(-res), res);
+                    __FUNCTION__, strerror(-res), res);
             return res;
         }
         res = constructStaticInfo(&mCameraInfo, false);
         if (res != OK) {
             ALOGE("%s: Unable to fill in static info: %s (%d)",
-                    __func__, strerror(-res), res);
+                    __FUNCTION__, strerror(-res), res);
             return res;
         }
     }
     info->static_camera_characteristics = mCameraInfo;
-    return NO_ERROR;    
+    return NO_ERROR;
 }
 
 #define SET_METHOD(m) m : HAL2_device_##m
@@ -1385,21 +3084,21 @@ static int HAL2_camera_device_open(const struct hw_module_t* module,
                                   const char *id,
                                   struct hw_device_t** device)
 {
-    ALOGD(">>> I'm Samsung's CameraHAL_2 <<<");
+    ALOGE(">>> I'm Samsung's CameraHAL_2 <<<");
 
     int cameraId = atoi(id);
     if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
-        ALOGE("ERR(%s):Invalid camera ID %s", __func__, id);
+        ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
         return -EINVAL;
     }
 
     if (g_cam2_device) {
         if (obj(g_cam2_device)->getCameraId() == cameraId) {
-            ALOGV("DEBUG(%s):returning existing camera ID %s", __func__, id);
+            ALOGV("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
             goto done;
         } else {
             ALOGE("ERR(%s):Cannot open camera %d. camera %d is already running!",
-                    __func__, cameraId, obj(g_cam2_device)->getCameraId());
+                    __FUNCTION__, cameraId, obj(g_cam2_device)->getCameraId());
             return -ENOSYS;
         }
     }
@@ -1415,13 +3114,13 @@ static int HAL2_camera_device_open(const struct hw_module_t* module,
 
     g_cam2_device->ops = &camera2_device_ops;
 
-    ALOGV("DEBUG(%s):open camera2 %s", __func__, id);
+    ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
 
     g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device);
 
 done:
     *device = (hw_device_t *)g_cam2_device;
-    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __func__, id, *device);
+    ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
 
     return 0;
 }
index 0e7097515adece1def30a55ea95e4400848ef789..8a9c5a3ea250013073d6b8fc2f235307f36d49c4 100644 (file)
  * \file      ExynosCameraHWInterface2.h
  * \brief     header file for Android Camera API 2.0 HAL
  * \author    Sungjoong Kang(sj3.kang@samsung.com)
- * \date      2012/05/31
+ * \date      2012/07/10
  *
  * <b>Revision History: </b>
  * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
  *   Initial Release
+  *
+ * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
+ *   2nd Release
+ *
  */
+
 #ifndef EXYNOS_CAMERA_HW_INTERFACE_2_H
 #define EXYNOS_CAMERA_HW_INTERFACE_2_H
 
 #include "SignalDrivenThread.h"
 #include "MetadataConverter.h"
 #include "exynos_v4l2.h"
+#include "ExynosRect.h"
+#include "ExynosBuffer.h"
 #include "videodev2_exynos_camera.h"
 #include "gralloc_priv.h"
-
+#include "ExynosJpegEncoderForCamera.h"
 #include <fcntl.h>
 #include "fimc-is-metadata.h"
 #include "ion.h"
+#include "ExynosExif.h"
+#include "csc.h"
 
 namespace android {
 
@@ -49,24 +57,31 @@ namespace android {
 #define NODE_PREFIX     "/dev/video"
 
 #define NUM_MAX_STREAM_THREAD       (5)
-#define NUM_MAX_DEQUEUED_REQUEST    (4)
-#define NUM_MAX_REQUEST_MGR_ENTRY   NUM_MAX_DEQUEUED_REQUEST
-#define NUM_OF_STREAM_BUF           (15)
+#define NUM_MAX_DEQUEUED_REQUEST    (8)
+/* #define NUM_MAX_REQUEST_MGR_ENTRY   NUM_MAX_DEQUEUED_REQUEST */
+#define NUM_MAX_REQUEST_MGR_ENTRY   (10)
+/* #define NUM_OF_STREAM_BUF           (15) */
 #define MAX_CAMERA_MEMORY_PLANE_NUM    (4)
+#define NUM_MAX_CAMERA_BUFFERS      (16)
+#define NUM_BAYER_BUFFERS           (8)
+#define SHOT_FRAME_DELAY            (3)
+
+#define PICTURE_GSC_NODE_NUM (2)
 
-#define SIGNAL_MAIN_REQ_Q_NOT_EMPTY             (SIGNAL_THREAD_COMMON_LAST<<1) 
-#define SIGNAL_MAIN_REPROCESS_Q_NOT_EMPTY       (SIGNAL_THREAD_COMMON_LAST<<2) 
-#define SIGNAL_MAIN_STREAM_OUTPUT_DONE          (SIGNAL_THREAD_COMMON_LAST<<3) 
-#define SIGNAL_SENSOR_START_REQ_PROCESSING      (SIGNAL_THREAD_COMMON_LAST<<4) 
+#define SIGNAL_MAIN_REQ_Q_NOT_EMPTY             (SIGNAL_THREAD_COMMON_LAST<<1)
+#define SIGNAL_MAIN_REPROCESS_Q_NOT_EMPTY       (SIGNAL_THREAD_COMMON_LAST<<2)
+#define SIGNAL_MAIN_STREAM_OUTPUT_DONE          (SIGNAL_THREAD_COMMON_LAST<<3)
+#define SIGNAL_SENSOR_START_REQ_PROCESSING      (SIGNAL_THREAD_COMMON_LAST<<4)
 #define SIGNAL_STREAM_GET_BUFFER                (SIGNAL_THREAD_COMMON_LAST<<5)
 #define SIGNAL_STREAM_PUT_BUFFER                (SIGNAL_THREAD_COMMON_LAST<<6)
 #define SIGNAL_STREAM_CHANGE_PARAMETER          (SIGNAL_THREAD_COMMON_LAST<<7)
-
+#define SIGNAL_THREAD_RELEASE                   (SIGNAL_THREAD_COMMON_LAST<<8)
+#define SIGNAL_ISP_START_BAYER_INPUT            (SIGNAL_THREAD_COMMON_LAST<<9)
 
 #define SIGNAL_STREAM_DATA_COMING               (SIGNAL_THREAD_COMMON_LAST<<15)
 
 
-#define MAX_NUM_CAMERA_BUFFERS (16)
+
 enum sensor_name {
     SENSOR_NAME_S5K3H2  = 1,
     SENSOR_NAME_S5K6A3  = 2,
@@ -76,11 +91,13 @@ enum sensor_name {
     SENSOR_NAME_END
 };
 
+/*
 typedef struct exynos_camera_memory {
        ion_buffer ionBuffer[MAX_CAMERA_MEMORY_PLANE_NUM];
        char *virBuffer[MAX_CAMERA_MEMORY_PLANE_NUM];
        int size[MAX_CAMERA_MEMORY_PLANE_NUM];
 } exynos_camera_memory_t;
+*/
 
 typedef struct node_info {
     int fd;
@@ -89,42 +106,43 @@ typedef struct node_info {
     int format;
     int planes;
     int buffers;
-    int currentBufferIndex;
+    //int currentBufferIndex;
     enum v4l2_memory memory;
     enum v4l2_buf_type type;
        ion_client ionClient;
-       exynos_camera_memory_t  buffer[MAX_NUM_CAMERA_BUFFERS];
+       ExynosBuffer    buffer[NUM_MAX_CAMERA_BUFFERS];
 } node_info_t;
 
 
 typedef struct camera_hw_info {
     int sensor_id;
+    //int sensor_frame_count; // includes bubble
 
     node_info_t sensor;
-    node_info_t isp;   
-    node_info_t capture;   
-    node_info_t preview; 
-    
+    node_info_t isp;
+    node_info_t capture;
+
+    /*shot*/  // temp
+    struct camera2_shot_ext dummy_shot;
 
-    /*shot*/
-    camera2_shot_t current_shot;
 } camera_hw_info_t;
 
-typedef enum request_entry_status
-{
+typedef enum request_entry_status {
     EMPTY,
     REGISTERED,
     PROCESSING
 } request_entry_status_t;
 
 typedef struct request_manager_entry {
-    request_entry_status_t  status;
-    int                     id;
-    camera_metadata_t       *original_request;
+    request_entry_status_t      status;
+    //int                         id;
+    camera_metadata_t           *original_request;
     // TODO : allocate memory dynamically
     // camera2_ctl_metadata_t  *internal_request;
-    camera2_ctl_metadata_NEW_t  internal_request;
-    int                     output_stream_count;
+    camera2_ctl_metadata_NEW_t  internal_shot;
+    int                         output_stream_count;
+    bool                         dynamic_meta_vaild;
+    //int                         request_serial_number;
 } request_manager_entry_t;
 
 class RequestManager {
@@ -133,14 +151,25 @@ public:
     ~RequestManager();
     int     GetNumEntries();
     bool    IsRequestQueueFull();
-    
-    void    RegisterRequest(camera_metadata_t * new_request);
-    void    DeregisterRequest(camera_metadata_t ** deregistered_request);
-    void    PrepareFrame(size_t* num_entries, size_t* frame_size, 
-                camera_metadata_t ** prepared_frame);
-    void    MarkProcessingRequest(exynos_camera_memory_t* buf);
-    void    NotifyStreamOutput(uint32_t stream_id);
-    
+
+    void    RegisterRequest(camera_metadata_t *new_request);
+    void    DeregisterRequest(camera_metadata_t **deregistered_request);
+    bool    PrepareFrame(size_t *num_entries, size_t *frame_size,
+                camera_metadata_t **prepared_frame);
+    //void    MarkProcessingRequest(exynos_camera_memory_t* buf);
+    //void    MarkProcessingRequest(ExynosBuffer* buf);
+    int   MarkProcessingRequest(ExynosBuffer *buf);
+    //void    NotifyStreamOutput(uint32_t stream_id, int isp_processing_index);
+    //void      NotifyStreamOutput(ExynosBuffer* buf, uint32_t stream_id);
+    void      NotifyStreamOutput(int index, int stream_id);
+    //int     FindEntryIndexByRequestSerialNumber(int serial_num);
+    void    DumpInfoWithIndex(int index);
+    void    ApplyDynamicMetadata(int index);
+    void    CheckCompleted(int index);
+    void    UpdateOutputStreamInfo(struct camera2_shot_ext *shot_ext, int index);
+    void    RegisterTimestamp(int index, nsecs_t *frameTime);
+    uint64_t  GetTimestamp(int index);
+    void  Dump(void);
 private:
 
     MetadataConverter               *m_metadataConverter;
@@ -150,39 +179,66 @@ private:
     int                             m_entryProcessingIndex;
     int                             m_entryFrameOutputIndex;
     request_manager_entry_t         entries[NUM_MAX_REQUEST_MGR_ENTRY];
+    int                             m_completedIndex;
 
     Mutex                           m_requestMutex;
 
     //TODO : alloc dynamically
     char                            m_tempFrameMetadataBuf[2000];
     camera_metadata_t               *m_tempFrameMetadata;
-    int32_t             frame_seq_number;
+    //int32_t                         m_request_serial_number;
 };
 
-typedef struct stream_parameters
-{
-            uint32_t                id;
-            uint32_t                width;
-            uint32_t                height;
-            int                     format;            
+#define NOT_AVAILABLE           (0)
+#define REQUIRES_DQ_FROM_SVC    (1)
+#define ON_DRIVER               (2)
+#define ON_HAL                  (3)
+#define ON_SERVICE              (4)
+
+#define BAYER_NOT_AVAILABLE     (0)
+#define BAYER_ON_SENSOR         (1)
+#define BAYER_ON_HAL_FILLED     (2)
+#define BAYER_ON_ISP            (3)
+#define BAYER_ON_SERVICE        (4)
+#define BAYER_ON_HAL_EMPTY      (5)
+
+typedef struct stream_parameters {
+            int                     streamType;
+            uint32_t                outputWidth;
+            uint32_t                outputHeight;
+            uint32_t                nodeWidth;
+            uint32_t                nodeHeight;
+            int                     outputFormat;
+            int                     nodeFormat;
     const   camera2_stream_ops_t*   streamOps;
             uint32_t                usage;
-            uint32_t                max_buffers;
+            int                     numHwBuffers;
+            int                     numSvcBuffers;
             int                     fd;
-            void                    *grallocVirtAddr[NUM_OF_STREAM_BUF];
-            bool                    availableBufHandle[NUM_OF_STREAM_BUF];
-            buffer_handle_t         *bufHandle[NUM_OF_STREAM_BUF];
-            node_info_t             *node;
+            int                     svcPlanes;
+            int                     nodePlanes;
+    enum v4l2_memory                memory;
+    enum v4l2_buf_type              halBuftype;
+
+            buffer_handle_t         svcBufHandle[NUM_MAX_CAMERA_BUFFERS];
+            ExynosBuffer            svcBuffers[NUM_MAX_CAMERA_BUFFERS];
+            int                     svcBufStatus[NUM_MAX_CAMERA_BUFFERS];
+
+            //buffer_handle_t         halBufHandle[NUM_MAX_CAMERA_BUFFERS];
+            //ExynosBuffer            halBuffers[NUM_MAX_CAMERA_BUFFERS];
+            //int                     halBufStatus[NUM_MAX_CAMERA_BUFFERS];
+               ion_client              ionClient;
+            node_info_t             node;
 } stream_parameters_t;
 
-            
+
 class ExynosCameraHWInterface2 : public virtual RefBase {
 public:
     ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev);
     virtual             ~ExynosCameraHWInterface2();
 
     virtual void        release();
-    
+
     inline  int         getCameraId() const;
 
     virtual int         setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops);
@@ -191,7 +247,7 @@ public:
     virtual int         getInProgressCount();
     virtual int         flushCapturesInProgress();
     virtual int         constructDefaultRequest(int request_template, camera_metadata_t **request);
-    virtual int         allocateStream(uint32_t width, uint32_t height, 
+    virtual int         allocateStream(uint32_t width, uint32_t height,
                                     int format, const camera2_stream_ops_t *stream_ops,
                                     uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers);
     virtual int         registerStreamBuffers(uint32_t stream_id, int num_buffers, buffer_handle_t *buffers);
@@ -205,24 +261,44 @@ public:
     virtual int         getMetadataVendorTagOps(vendor_tag_query_ops_t **ops);
     virtual int         dump(int fd);
 private:
-
+class MainThread : public SignalDrivenThread {
+        ExynosCameraHWInterface2 *mHardware;
+    public:
+        MainThread(ExynosCameraHWInterface2 *hw):
+            SignalDrivenThread(),
+            mHardware(hw) {
+//            Start("MainThread", PRIORITY_DEFAULT, 0);
+        }
+        ~MainThread();
+        status_t readyToRunInternal()
+       {
+            return NO_ERROR;
+        }
+        void threadFunctionInternal()
+       {
+            mHardware->m_mainThreadFunc(this);
+            return;
+        }
+        void        release(void);
+    };
+/*
     class MainThread : public SignalDrivenThread {
         ExynosCameraHWInterface2 *mHardware;
     public:
         MainThread(ExynosCameraHWInterface2 *hw):
             SignalDrivenThread("MainThread", PRIORITY_DEFAULT, 0),
             mHardware(hw) { }
-        virtual void onFirstRef() {
-        }
+        ~MainThread();
         status_t readyToRunInternal() {
             return NO_ERROR;
         }
-        void threadLoopInternal() {
+        void threadFunctionInternal() {
             mHardware->m_mainThreadFunc(this);
             return;
         }
+        void        release(void);
     };
-
+*/
     class SensorThread : public SignalDrivenThread {
         ExynosCameraHWInterface2 *mHardware;
     public:
@@ -230,20 +306,38 @@ private:
             SignalDrivenThread("SensorThread", PRIORITY_DEFAULT, 0),
             mHardware(hw),
             m_isBayerOutputEnabled(false) { }
-        virtual void onFirstRef() {
-            mHardware->m_sensorThreadInitialize(this);
-        }
+        ~SensorThread();
         status_t readyToRunInternal() {
-            mHardware->m_sensorThreadInitialize(this);            
+            mHardware->m_sensorThreadInitialize(this);
             return NO_ERROR;
         }
-        void threadLoopInternal() {
+        void threadFunctionInternal() {
             mHardware->m_sensorThreadFunc(this);
             return;
         }
+        void            release(void); 
     //private:
         bool            m_isBayerOutputEnabled;
         int             m_sensorFd;
+    };
+
+    class IspThread : public SignalDrivenThread {
+        ExynosCameraHWInterface2 *mHardware;
+    public:
+        IspThread(ExynosCameraHWInterface2 *hw):
+            SignalDrivenThread("IspThread", PRIORITY_DEFAULT, 0),
+            mHardware(hw) { }
+        ~IspThread();
+        status_t readyToRunInternal() {
+            mHardware->m_ispThreadInitialize(this);
+            return NO_ERROR;
+        }
+        void threadFunctionInternal() {
+            mHardware->m_ispThreadFunc(this);
+            return;
+        }
+        void            release(void);
+    //private:
         int             m_ispFd;
     };
 
@@ -254,47 +348,84 @@ private:
             SignalDrivenThread("StreamThread", PRIORITY_DEFAULT, 0),
             mHardware(hw),
             m_index(new_index) { }
-        virtual void onFirstRef() {
-            mHardware->m_streamThreadInitialize(this);            
-        }
+        ~StreamThread();
         status_t readyToRunInternal() {
-            mHardware->m_streamThreadInitialize(this);             
+            mHardware->m_streamThreadInitialize(this);
             return NO_ERROR;
         }
-        void threadLoopInternal() {
+        void threadFunctionInternal() {
             mHardware->m_streamThreadFunc(this);
             return;
         }
-        void SetParameter(uint32_t id, uint32_t width, uint32_t height, int format,
-                    const camera2_stream_ops_t* stream_ops, uint32_t usage, int fd, node_info_t * node);
-        void ApplyChange(void);
+        void        setParameter(stream_parameters_t * new_parameters);
+        void        applyChange(void);
+        void        release(void);
+        int         findBufferIndex(void * bufAddr);
 
-                uint8_t                 m_index;
-    //private:                
-        stream_parameters_t             m_parameters;
-        stream_parameters_t             m_tempParameters; 
 
-    };    
+        uint8_t                         m_index;
+    //private:
+        stream_parameters_t             m_parameters;
+        stream_parameters_t             *m_tempParameters; 
+        bool                            m_isBufferInit;
+     };
 
     sp<MainThread>      m_mainThread;
     sp<SensorThread>    m_sensorThread;
-    sp<StreamThread>    m_streamThread;
-
-
-
+    sp<IspThread>       m_ispThread;
+    sp<StreamThread>    m_streamThreads[NUM_MAX_STREAM_THREAD];
+
+
+    int                 m_bayerBufStatus[NUM_BAYER_BUFFERS];
+    int                 m_bayerQueueList[NUM_BAYER_BUFFERS+SHOT_FRAME_DELAY];
+    int                 m_bayerQueueRequestList[NUM_BAYER_BUFFERS+SHOT_FRAME_DELAY];
+    int                 m_bayerDequeueList[NUM_BAYER_BUFFERS];
+    int                 m_numBayerQueueList;
+    int                 m_numBayerQueueListRemainder;
+    int                 m_numBayerDequeueList;
+
+    void                RegisterBayerQueueList(int bufIndex, int requestIndex);
+    void                DeregisterBayerQueueList(int bufIndex);
+    void                RegisterBayerDequeueList(int bufIndex);
+    int                 DeregisterBayerDequeueList(void);
+    int                 FindRequestEntryNumber(int bufIndex);
+    void                DumpFrameinfoWithBufIndex(int bufIndex);
+    
     RequestManager      *m_requestManager;
 
     void                m_mainThreadFunc(SignalDrivenThread * self);
     void                m_sensorThreadFunc(SignalDrivenThread * self);
-    void                m_sensorThreadInitialize(SignalDrivenThread * self);    
+    void                m_sensorThreadInitialize(SignalDrivenThread * self);
+    void                m_ispThreadFunc(SignalDrivenThread * self);
+    void                m_ispThreadInitialize(SignalDrivenThread * self);
     void                m_streamThreadFunc(SignalDrivenThread * self);
-    void                m_streamThreadInitialize(SignalDrivenThread * self);      
-
-       int                 createIonClient(ion_client ionClient);
-       int                 deleteIonClient(ion_client ionClient);
-    int                 allocCameraMemory(ion_client ionClient, exynos_camera_memory_t *buf, int iMemoryNum);
-       void                freeCameraMemory(exynos_camera_memory_t *buf, int iMemoryNum);
-       void                initCameraMemory(exynos_camera_memory_t *buf, int iMemoryNum);
+    void                m_streamThreadInitialize(SignalDrivenThread * self);
+
+    void                m_getAlignedYUVSize(int colorFormat, int w, int h,
+                                                ExynosBuffer *buf);
+    bool                m_getRatioSize(int  src_w,  int   src_h,
+                                             int  dst_w,  int   dst_h,
+                                             int *crop_x, int *crop_y,
+                                             int *crop_w, int *crop_h,
+                                             int zoom);
+       int                             createIonClient(ion_client ionClient);
+       int                                     deleteIonClient(ion_client ionClient);
+    //int                              allocCameraMemory(ion_client ionClient, exynos_camera_memory_t *buf, int iMemoryNum);
+       //void                          freeCameraMemory(exynos_camera_memory_t *buf, int iMemoryNum);
+       //void                          initCameraMemory(exynos_camera_memory_t *buf, int iMemoryNum);
+
+    int                                allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum);
+       void                            freeCameraMemory(ExynosBuffer *buf, int iMemoryNum);
+       void                            initCameraMemory(ExynosBuffer *buf, int iMemoryNum);
+
+    void            DumpInfoWithShot(struct camera2_shot_ext * shot_ext);
+    bool            yuv2Jpeg(ExynosBuffer *yuvBuf,
+                            ExynosBuffer *jpegBuf,
+                            ExynosRect *rect);
+    exif_attribute_t    mExifInfo;
+    void               *m_exynosPictureCSC;
+
+    int                 m_jpegEncodingRequestIndex;
 
     camera2_request_queue_src_ops_t     *m_requestQueueOps;
     camera2_frame_queue_dst_ops_t       *m_frameQueueOps;
@@ -303,19 +434,38 @@ private:
 
     int                                 m_numOfRemainingReqInSvc;
     bool                                m_isRequestQueuePending;
+    bool                                m_isRequestQueueNull;
+    camera2_device_t                    *m_halDevice;
+    static gralloc_module_t const*      m_grallocHal;
 
-    camera2_device_t *m_halDevice;
-    static gralloc_module_t const* m_grallocHal;
 
-
-    camera_hw_info_t m_camera_info;
+    camera_hw_info_t                     m_camera_info;
 
        ion_client m_ionCameraClient;
 
-    bool        m_isSensorThreadOn;
-    bool        m_isStreamStarted;
-    int         matchBuffer(void * bufAddr);
-    bool        m_isBufferInit;
+    bool                                m_isSensorThreadOn;
+    bool                                m_isSensorStarted;
+
+
+
+    bool                                m_initFlag1;
+    bool                                m_initFlag2;
+    int                                 m_ispInputIndex;
+    int                                 m_ispProcessingIndex;
+    int                                 m_ispThreadProcessingReq;
+    int                                 m_processingRequest;
+
+    int                                 m_numExpRemainingOutScp;
+    int                                 m_numExpRemainingOutScc;
+
+    int                                 indexToQueue[3+1];
+    int                                 m_fd_scp;
+
+    bool                                m_scp_flushing;
+    bool                                m_closing;
+    ExynosBuffer                        m_resizeBuf;
+    int                                 m_svcBufIndex;
+    nsecs_t                             m_lastTimeStamp;
 };
 
 }; // namespace android
diff --git a/libcamera2/ExynosExif.h b/libcamera2/ExynosExif.h
new file mode 100644 (file)
index 0000000..5437b68
--- /dev/null
@@ -0,0 +1,231 @@
+/*
+ * Copyright Samsung Electronics Co.,LTD.
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+#ifndef EXYNOS_EXIF_H_
+#define EXYNOS_EXIF_H_
+
+#include <math.h>
+
+#define EXIF_LOG2(x)                    (log((double)(x)) / log(2.0))
+#define APEX_FNUM_TO_APERTURE(x)        ((int)(EXIF_LOG2((double)(x)) * 2 + 0.5))
+#define APEX_EXPOSURE_TO_SHUTTER(x)     ((x) >= 1 ?                                 \
+                                        (int)(-(EXIF_LOG2((double)(x)) + 0.5)) :    \
+                                        (int)(-(EXIF_LOG2((double)(x)) - 0.5)))
+#define APEX_ISO_TO_FILMSENSITIVITY(x)  ((int)(EXIF_LOG2((x) / 3.125) + 0.5))
+
+#define NUM_SIZE                    2
+#define IFD_SIZE                    12
+#define OFFSET_SIZE                 4
+
+#define NUM_0TH_IFD_TIFF            10
+#define NUM_0TH_IFD_EXIF            22
+#define NUM_0TH_IFD_GPS             10
+#define NUM_1TH_IFD_TIFF            9
+
+/* Type */
+#define EXIF_TYPE_BYTE              1
+#define EXIF_TYPE_ASCII             2
+#define EXIF_TYPE_SHORT             3
+#define EXIF_TYPE_LONG              4
+#define EXIF_TYPE_RATIONAL          5
+#define EXIF_TYPE_UNDEFINED         7
+#define EXIF_TYPE_SLONG             9
+#define EXIF_TYPE_SRATIONAL         10
+
+#define EXIF_FILE_SIZE              28800
+
+/* 0th IFD TIFF Tags */
+#define EXIF_TAG_IMAGE_WIDTH                    0x0100
+#define EXIF_TAG_IMAGE_HEIGHT                   0x0101
+#define EXIF_TAG_MAKE                           0x010f
+#define EXIF_TAG_MODEL                          0x0110
+#define EXIF_TAG_ORIENTATION                    0x0112
+#define EXIF_TAG_SOFTWARE                       0x0131
+#define EXIF_TAG_DATE_TIME                      0x0132
+#define EXIF_TAG_YCBCR_POSITIONING              0x0213
+#define EXIF_TAG_EXIF_IFD_POINTER               0x8769
+#define EXIF_TAG_GPS_IFD_POINTER                0x8825
+
+/* 0th IFD Exif Private Tags */
+#define EXIF_TAG_EXPOSURE_TIME                  0x829A
+#define EXIF_TAG_FNUMBER                        0x829D
+#define EXIF_TAG_EXPOSURE_PROGRAM               0x8822
+#define EXIF_TAG_ISO_SPEED_RATING               0x8827
+#define EXIF_TAG_EXIF_VERSION                   0x9000
+#define EXIF_TAG_DATE_TIME_ORG                  0x9003
+#define EXIF_TAG_DATE_TIME_DIGITIZE             0x9004
+#define EXIF_TAG_SHUTTER_SPEED                  0x9201
+#define EXIF_TAG_APERTURE                       0x9202
+#define EXIF_TAG_BRIGHTNESS                     0x9203
+#define EXIF_TAG_EXPOSURE_BIAS                  0x9204
+#define EXIF_TAG_MAX_APERTURE                   0x9205
+#define EXIF_TAG_METERING_MODE                  0x9207
+#define EXIF_TAG_FLASH                          0x9209
+#define EXIF_TAG_FOCAL_LENGTH                   0x920A
+#define EXIF_TAG_USER_COMMENT                   0x9286
+#define EXIF_TAG_COLOR_SPACE                    0xA001
+#define EXIF_TAG_PIXEL_X_DIMENSION              0xA002
+#define EXIF_TAG_PIXEL_Y_DIMENSION              0xA003
+#define EXIF_TAG_EXPOSURE_MODE                  0xA402
+#define EXIF_TAG_WHITE_BALANCE                  0xA403
+#define EXIF_TAG_SCENCE_CAPTURE_TYPE            0xA406
+
+/* 0th IFD GPS Info Tags */
+#define EXIF_TAG_GPS_VERSION_ID                 0x0000
+#define EXIF_TAG_GPS_LATITUDE_REF               0x0001
+#define EXIF_TAG_GPS_LATITUDE                   0x0002
+#define EXIF_TAG_GPS_LONGITUDE_REF              0x0003
+#define EXIF_TAG_GPS_LONGITUDE                  0x0004
+#define EXIF_TAG_GPS_ALTITUDE_REF               0x0005
+#define EXIF_TAG_GPS_ALTITUDE                   0x0006
+#define EXIF_TAG_GPS_TIMESTAMP                  0x0007
+#define EXIF_TAG_GPS_PROCESSING_METHOD          0x001B
+#define EXIF_TAG_GPS_DATESTAMP                  0x001D
+
+/* 1th IFD TIFF Tags */
+#define EXIF_TAG_COMPRESSION_SCHEME             0x0103
+#define EXIF_TAG_X_RESOLUTION                   0x011A
+#define EXIF_TAG_Y_RESOLUTION                   0x011B
+#define EXIF_TAG_RESOLUTION_UNIT                0x0128
+#define EXIF_TAG_JPEG_INTERCHANGE_FORMAT        0x0201
+#define EXIF_TAG_JPEG_INTERCHANGE_FORMAT_LEN    0x0202
+
+typedef enum {
+    EXIF_ORIENTATION_UP     = 1,
+    EXIF_ORIENTATION_90     = 6,
+    EXIF_ORIENTATION_180    = 3,
+    EXIF_ORIENTATION_270    = 8,
+} ExifOrientationType;
+
+typedef enum {
+    EXIF_SCENE_STANDARD,
+    EXIF_SCENE_LANDSCAPE,
+    EXIF_SCENE_PORTRAIT,
+    EXIF_SCENE_NIGHT,
+} CamExifSceneCaptureType;
+
+typedef enum {
+    EXIF_METERING_UNKNOWN,
+    EXIF_METERING_AVERAGE,
+    EXIF_METERING_CENTER,
+    EXIF_METERING_SPOT,
+    EXIF_METERING_MULTISPOT,
+    EXIF_METERING_PATTERN,
+    EXIF_METERING_PARTIAL,
+    EXIF_METERING_OTHER     = 255,
+} CamExifMeteringModeType;
+
+typedef enum {
+    EXIF_EXPOSURE_AUTO,
+    EXIF_EXPOSURE_MANUAL,
+    EXIF_EXPOSURE_AUTO_BRACKET,
+} CamExifExposureModeType;
+
+typedef enum {
+    EXIF_WB_AUTO,
+    EXIF_WB_MANUAL,
+} CamExifWhiteBalanceType;
+
+/* Values */
+#define EXIF_DEF_MAKER          "SAMSUNG"
+#define EXIF_DEF_MODEL          "SAMSUNG"
+#define EXIF_DEF_SOFTWARE       "SAMSUNG"
+#define EXIF_DEF_EXIF_VERSION   "0220"
+#define EXIF_DEF_USERCOMMENTS   "User comments"
+
+#define EXIF_DEF_YCBCR_POSITIONING  1   /* centered */
+#define EXIF_DEF_FNUMBER_NUM        26  /* 2.6 */
+#define EXIF_DEF_FNUMBER_DEN        10
+#define EXIF_DEF_EXPOSURE_PROGRAM   3   /* aperture priority */
+#define EXIF_DEF_FOCAL_LEN_NUM      278 /* 2.78mm */
+#define EXIF_DEF_FOCAL_LEN_DEN      100
+#define EXIF_DEF_FLASH              0   /* O: off, 1: on*/
+#define EXIF_DEF_COLOR_SPACE        1
+#define EXIF_DEF_EXPOSURE_MODE      EXIF_EXPOSURE_AUTO
+#define EXIF_DEF_APEX_DEN           10
+
+#define EXIF_DEF_COMPRESSION        6
+#define EXIF_DEF_RESOLUTION_NUM     72
+#define EXIF_DEF_RESOLUTION_DEN     1
+#define EXIF_DEF_RESOLUTION_UNIT    2   /* inches */
+
+typedef struct {
+    uint32_t num;
+    uint32_t den;
+} rational_t;
+
+typedef struct {
+    int32_t num;
+    int32_t den;
+} srational_t;
+
+typedef struct {
+    bool enableGps;
+    bool enableThumb;
+
+    unsigned char maker[32];
+    unsigned char model[32];
+    unsigned char software[32];
+    unsigned char exif_version[4];
+    unsigned char date_time[20];
+    unsigned char user_comment[150];
+
+    uint32_t width;
+    uint32_t height;
+    uint32_t widthThumb;
+    uint32_t heightThumb;
+
+    uint16_t orientation;
+    uint16_t ycbcr_positioning;
+    uint16_t exposure_program;
+    uint16_t iso_speed_rating;
+    uint16_t metering_mode;
+    uint16_t flash;
+    uint16_t color_space;
+    uint16_t exposure_mode;
+    uint16_t white_balance;
+    uint16_t scene_capture_type;
+
+    rational_t exposure_time;
+    rational_t fnumber;
+    rational_t aperture;
+    rational_t max_aperture;
+    rational_t focal_length;
+
+    srational_t shutter_speed;
+    srational_t brightness;
+    srational_t exposure_bias;
+
+    unsigned char gps_latitude_ref[2];
+    unsigned char gps_longitude_ref[2];
+
+    uint8_t gps_version_id[4];
+    uint8_t gps_altitude_ref;
+
+    rational_t gps_latitude[3];
+    rational_t gps_longitude[3];
+    rational_t gps_altitude;
+    rational_t gps_timestamp[3];
+    unsigned char gps_datestamp[11];
+    unsigned char gps_processing_method[100];
+
+    rational_t x_resolution;
+    rational_t y_resolution;
+    uint16_t resolution_unit;
+    uint16_t compression_scheme;
+} exif_attribute_t;
+
+#endif /* EXYNOS_EXIF_H_ */
diff --git a/libcamera2/ExynosJpegEncoderForCamera.cpp b/libcamera2/ExynosJpegEncoderForCamera.cpp
new file mode 100644 (file)
index 0000000..7da9967
--- /dev/null
@@ -0,0 +1,1058 @@
+/*
+ * Copyright Samsung Electronics Co.,LTD.
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#include <utils/Log.h>
+
+#include "ExynosJpegEncoderForCamera.h"
+
+static const char ExifAsciiPrefix[] = { 0x41, 0x53, 0x43, 0x49, 0x49, 0x0, 0x0, 0x0 };
+
+#define JPEG_ERROR_LOG(fmt,...)
+
+#define JPEG_THUMBNAIL_QUALITY (60)
+#define EXIF_LIMIT_SIZE (64*1024)
+#define THUMBNAIL_IMAGE_PIXEL_SIZE (4)
+#define MAX_JPG_WIDTH (8192)
+#define MAX_JPG_HEIGHT (8192)
+
+#define MAX_INPUT_BUFFER_PLANE_NUM (1)
+#define MAX_OUTPUT_BUFFER_PLANE_NUM (1)
+
+ExynosJpegEncoderForCamera::ExynosJpegEncoderForCamera()
+{
+    m_flagCreate = false;
+    m_jpegMain = NULL;
+    m_jpegThumb = NULL;
+    m_thumbnailW = 0;
+    m_thumbnailH = 0;
+    m_thumbnailQuality = JPEG_THUMBNAIL_QUALITY;
+    m_ionJpegClient = 0;
+    initJpegMemory(&m_stThumbInBuf, MAX_IMAGE_PLANE_NUM);
+    initJpegMemory(&m_stThumbOutBuf, MAX_IMAGE_PLANE_NUM);
+}
+
+ExynosJpegEncoderForCamera::~ExynosJpegEncoderForCamera()
+{
+    if (m_flagCreate == true) {
+        this->destroy();
+    }
+}
+
+bool ExynosJpegEncoderForCamera::flagCreate(void)
+{
+    return m_flagCreate;
+}
+
+int ExynosJpegEncoderForCamera::create(void)
+{
+    int ret = ERROR_NONE;
+    if (m_flagCreate == true) {
+        return ERROR_ALREADY_CREATE;
+    }
+
+    if (m_jpegMain == NULL) {
+        m_jpegMain = new ExynosJpegEncoder;
+
+        if (m_jpegMain == NULL) {
+            JPEG_ERROR_LOG("ERR(%s):Cannot create ExynosJpegEncoder class\n", __func__);
+            return ERROR_CANNOT_CREATE_EXYNOS_JPEG_ENC_HAL;
+        }
+
+        ret = m_jpegMain->create();
+        if (ret) {
+            return ret;
+        }
+
+        ret = m_jpegMain->setCache(JPEG_CACHE_ON);
+
+        if (ret) {
+            m_jpegMain->destroy();
+            return ret;
+        }
+    }
+
+    m_ionJpegClient = createIonClient(m_ionJpegClient);
+    if(m_ionJpegClient == 0) {
+        return ERROR_CANNOT_CREATE_EXYNOS_JPEG_ENC_HAL;
+    }
+    m_stThumbInBuf.ionClient = m_stThumbOutBuf.ionClient = m_ionJpegClient;
+
+    m_flagCreate = true;
+
+    return ERROR_NONE;
+}
+
+int ExynosJpegEncoderForCamera::destroy(void)
+{
+    if (m_flagCreate == false) {
+        return ERROR_ALREADY_DESTROY;
+    }
+
+    if (m_jpegMain != NULL) {
+        m_jpegMain->destroy();
+        delete m_jpegMain;
+        m_jpegMain = NULL;
+    }
+
+    if (m_jpegThumb != NULL) {
+        int iSize = sizeof(char)*m_thumbnailW*m_thumbnailH*4;
+
+        freeJpegMemory(&m_stThumbInBuf, MAX_IMAGE_PLANE_NUM);
+        freeJpegMemory(&m_stThumbOutBuf, MAX_IMAGE_PLANE_NUM);
+        m_ionJpegClient = deleteIonClient(m_ionJpegClient);
+        m_stThumbInBuf.ionClient = m_stThumbOutBuf.ionClient = m_ionJpegClient;
+        m_jpegThumb->destroy();
+        delete m_jpegThumb;
+        m_jpegThumb = NULL;
+    }
+
+    m_flagCreate = false;
+    m_thumbnailW = 0;
+    m_thumbnailH = 0;
+    m_thumbnailQuality = JPEG_THUMBNAIL_QUALITY;
+    return ERROR_NONE;
+}
+
+int ExynosJpegEncoderForCamera::setSize(int w, int h)
+{
+    if (m_flagCreate == false) {
+        return ERROR_NOT_YET_CREATED;
+    }
+
+    return m_jpegMain->setSize(w, h);
+}
+
+
+int ExynosJpegEncoderForCamera::setQuality(int quality)
+{
+    if (m_flagCreate == false) {
+        return ERROR_NOT_YET_CREATED;
+    }
+
+    return m_jpegMain->setQuality(quality);
+}
+
+int ExynosJpegEncoderForCamera::setColorFormat(int colorFormat)
+{
+    if (m_flagCreate == false) {
+        return ERROR_NOT_YET_CREATED;
+    }
+
+    return m_jpegMain->setColorFormat(colorFormat);
+}
+
+int ExynosJpegEncoderForCamera::setJpegFormat(int jpegFormat)
+{
+    if (m_flagCreate == false) {
+        return ERROR_NOT_YET_CREATED;
+    }
+
+    return m_jpegMain->setJpegFormat(jpegFormat);
+}
+
+int ExynosJpegEncoderForCamera::updateConfig(void)
+{
+    if (m_flagCreate == false) {
+        return ERROR_NOT_YET_CREATED;
+    }
+
+    return m_jpegMain->updateConfig();
+}
+
+int  ExynosJpegEncoderForCamera::setInBuf(int *buf, int *size)
+{
+    if (m_flagCreate == false) {
+        return ERROR_NOT_YET_CREATED;
+    }
+
+    if (buf == NULL) {
+        return ERROR_BUFFR_IS_NULL;
+    }
+
+    if (size == NULL) {
+        return ERROR_BUFFR_IS_NULL;
+    }
+
+    int ret = ERROR_NONE;
+
+    ret = m_jpegMain->setInBuf(buf, size);
+    if (ret) {
+        JPEG_ERROR_LOG("%s::Fail to JPEG input buffer!!\n", __func__);
+        return ret;
+    }
+
+    return ERROR_NONE;
+}
+
+int  ExynosJpegEncoderForCamera::setOutBuf(int buf, int size)
+{
+    if (m_flagCreate == false) {
+        return ERROR_NOT_YET_CREATED;
+    }
+
+    if (buf == NULL) {
+        return ERROR_BUFFR_IS_NULL;
+    }
+
+    if (size<=0) {
+        return ERROR_BUFFER_TOO_SMALL;
+    }
+
+    int ret = ERROR_NONE;
+    ret = m_jpegMain->setOutBuf(buf, size);
+    if (ret) {
+        JPEG_ERROR_LOG("%s::Fail to JPEG output buffer!!\n", __func__);
+        return ret;
+    }
+
+    return ERROR_NONE;
+}
+
+int ExynosJpegEncoderForCamera::encode(int *size, exif_attribute_t *exifInfo)
+{
+    int ret = ERROR_NONE;
+    unsigned char *exifOut = NULL;
+
+    if (m_flagCreate == false) {
+        return ERROR_NOT_YET_CREATED;
+    }
+
+
+    ret = m_jpegMain->encode();
+    if (ret) {
+        JPEG_ERROR_LOG("encode failed\n");
+        return ret;
+    }
+
+    int iJpegSize = m_jpegMain->getJpegSize();
+
+    if (iJpegSize<=0) {
+        JPEG_ERROR_LOG("%s:: output_size is too small(%d)!!\n", __func__, iJpegSize);
+        return ERROR_OUT_BUFFER_SIZE_TOO_SMALL;
+    }
+
+    int iOutputSize = 0;
+    int iJpegBuffer = 0;
+    char *pcJpegBuffer = NULL;
+    ret = m_jpegMain->getOutBuf((int *)&iJpegBuffer, &iOutputSize);
+    if (ret != ERROR_NONE) {
+        return ret;
+    }
+
+    mmapJpegMemory(&iJpegBuffer, &pcJpegBuffer, &iOutputSize, MAX_OUTPUT_BUFFER_PLANE_NUM);
+
+    if (pcJpegBuffer[0] == NULL) {
+        JPEG_ERROR_LOG("%s::pcJpegBuffer[0] is null!!\n", __func__);
+        return ERROR_OUT_BUFFER_CREATE_FAIL;
+    }
+
+    if (exifInfo != NULL) {
+        unsigned int thumbLen, exifLen;
+
+        unsigned int bufSize = 0;
+        if (exifInfo->enableThumb) {
+            if (encodeThumbnail(&thumbLen)) {
+                bufSize = EXIF_FILE_SIZE;
+                exifInfo->enableThumb = false;
+            } else {
+                if (thumbLen > EXIF_LIMIT_SIZE) {
+                    bufSize = EXIF_FILE_SIZE;
+                    exifInfo->enableThumb = false;
+                }
+                else {
+                    bufSize = EXIF_FILE_SIZE + thumbLen;
+                }
+            }
+        } else {
+            bufSize = EXIF_FILE_SIZE;
+            exifInfo->enableThumb = false;
+        }
+
+        exifOut = new unsigned char[bufSize];
+        if (exifOut == NULL) {
+            JPEG_ERROR_LOG("%s::Failed to allocate for exifOut\n", __func__);
+            delete[] exifOut;
+            return ERROR_EXIFOUT_ALLOC_FAIL;
+        }
+        memset(exifOut, 0, bufSize);
+
+        if (makeExif (exifOut, exifInfo, &exifLen)) {
+            JPEG_ERROR_LOG("%s::Failed to make EXIF\n", __func__);
+            delete[] exifOut;
+            return ERROR_MAKE_EXIF_FAIL;
+        }
+
+        if (exifLen <= EXIF_LIMIT_SIZE) {
+            memmove(pcJpegBuffer+exifLen+2, pcJpegBuffer+2, iJpegSize - 2);
+            memcpy(pcJpegBuffer+2, exifOut, exifLen);
+            iJpegSize += exifLen;
+        }
+
+        delete[] exifOut;
+    }
+
+    unmapJpegMemory(&iJpegBuffer, &pcJpegBuffer, &iOutputSize, MAX_OUTPUT_BUFFER_PLANE_NUM);
+
+    *size = iJpegSize;
+
+    return ERROR_NONE;
+}
+
+int ExynosJpegEncoderForCamera::makeExif (unsigned char *exifOut,
+                              exif_attribute_t *exifInfo,
+                              unsigned int *size,
+                              bool useMainbufForThumb)
+{
+    unsigned char *pCur, *pApp1Start, *pIfdStart, *pGpsIfdPtr, *pNextIfdOffset;
+    unsigned int tmp, LongerTagOffest = 0, exifSizeExceptThumb;
+    pApp1Start = pCur = exifOut;
+
+    //2 Exif Identifier Code & TIFF Header
+    pCur += 4;  // Skip 4 Byte for APP1 marker and length
+    unsigned char ExifIdentifierCode[6] = { 0x45, 0x78, 0x69, 0x66, 0x00, 0x00 };
+    memcpy(pCur, ExifIdentifierCode, 6);
+    pCur += 6;
+
+    /* Byte Order - little endian, Offset of IFD - 0x00000008.H */
+    unsigned char TiffHeader[8] = { 0x49, 0x49, 0x2A, 0x00, 0x08, 0x00, 0x00, 0x00 };
+    memcpy(pCur, TiffHeader, 8);
+    pIfdStart = pCur;
+    pCur += 8;
+
+    //2 0th IFD TIFF Tags
+    if (exifInfo->enableGps)
+        tmp = NUM_0TH_IFD_TIFF;
+    else
+        tmp = NUM_0TH_IFD_TIFF - 1;
+
+    memcpy(pCur, &tmp, NUM_SIZE);
+    pCur += NUM_SIZE;
+
+    LongerTagOffest += 8 + NUM_SIZE + tmp*IFD_SIZE + OFFSET_SIZE;
+
+    writeExifIfd(&pCur, EXIF_TAG_IMAGE_WIDTH, EXIF_TYPE_LONG,
+                 1, exifInfo->width);
+    writeExifIfd(&pCur, EXIF_TAG_IMAGE_HEIGHT, EXIF_TYPE_LONG,
+                 1, exifInfo->height);
+    writeExifIfd(&pCur, EXIF_TAG_MAKE, EXIF_TYPE_ASCII,
+                 strlen((char *)exifInfo->maker) + 1, exifInfo->maker, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_MODEL, EXIF_TYPE_ASCII,
+                 strlen((char *)exifInfo->model) + 1, exifInfo->model, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_ORIENTATION, EXIF_TYPE_SHORT,
+                 1, exifInfo->orientation);
+    writeExifIfd(&pCur, EXIF_TAG_SOFTWARE, EXIF_TYPE_ASCII,
+                 strlen((char *)exifInfo->software) + 1, exifInfo->software, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_DATE_TIME, EXIF_TYPE_ASCII,
+                 20, exifInfo->date_time, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_YCBCR_POSITIONING, EXIF_TYPE_SHORT,
+                 1, exifInfo->ycbcr_positioning);
+    writeExifIfd(&pCur, EXIF_TAG_EXIF_IFD_POINTER, EXIF_TYPE_LONG,
+                 1, LongerTagOffest);
+    if (exifInfo->enableGps) {
+        pGpsIfdPtr = pCur;
+        pCur += IFD_SIZE;   // Skip a ifd size for gps IFD pointer
+    }
+
+    pNextIfdOffset = pCur;  // Skip a offset size for next IFD offset
+    pCur += OFFSET_SIZE;
+
+    //2 0th IFD Exif Private Tags
+    pCur = pIfdStart + LongerTagOffest;
+
+    tmp = NUM_0TH_IFD_EXIF;
+    memcpy(pCur, &tmp , NUM_SIZE);
+    pCur += NUM_SIZE;
+
+    LongerTagOffest += NUM_SIZE + NUM_0TH_IFD_EXIF*IFD_SIZE + OFFSET_SIZE;
+
+    writeExifIfd(&pCur, EXIF_TAG_EXPOSURE_TIME, EXIF_TYPE_RATIONAL,
+                 1, &exifInfo->exposure_time, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_FNUMBER, EXIF_TYPE_RATIONAL,
+                 1, &exifInfo->fnumber, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_EXPOSURE_PROGRAM, EXIF_TYPE_SHORT,
+                 1, exifInfo->exposure_program);
+    writeExifIfd(&pCur, EXIF_TAG_ISO_SPEED_RATING, EXIF_TYPE_SHORT,
+                 1, exifInfo->iso_speed_rating);
+    writeExifIfd(&pCur, EXIF_TAG_EXIF_VERSION, EXIF_TYPE_UNDEFINED,
+                 4, exifInfo->exif_version);
+    writeExifIfd(&pCur, EXIF_TAG_DATE_TIME_ORG, EXIF_TYPE_ASCII,
+                 20, exifInfo->date_time, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_DATE_TIME_DIGITIZE, EXIF_TYPE_ASCII,
+                 20, exifInfo->date_time, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_SHUTTER_SPEED, EXIF_TYPE_SRATIONAL,
+                 1, (rational_t *)&exifInfo->shutter_speed, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_APERTURE, EXIF_TYPE_RATIONAL,
+                 1, &exifInfo->aperture, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_BRIGHTNESS, EXIF_TYPE_SRATIONAL,
+                 1, (rational_t *)&exifInfo->brightness, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_EXPOSURE_BIAS, EXIF_TYPE_SRATIONAL,
+                 1, (rational_t *)&exifInfo->exposure_bias, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_MAX_APERTURE, EXIF_TYPE_RATIONAL,
+                 1, &exifInfo->max_aperture, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_METERING_MODE, EXIF_TYPE_SHORT,
+                 1, exifInfo->metering_mode);
+    writeExifIfd(&pCur, EXIF_TAG_FLASH, EXIF_TYPE_SHORT,
+                 1, exifInfo->flash);
+    writeExifIfd(&pCur, EXIF_TAG_FOCAL_LENGTH, EXIF_TYPE_RATIONAL,
+                 1, &exifInfo->focal_length, &LongerTagOffest, pIfdStart);
+    char code[8] = { 0x00, 0x00, 0x00, 0x49, 0x49, 0x43, 0x53, 0x41 };
+    int commentsLen = strlen((char *)exifInfo->user_comment) + 1;
+    memmove(exifInfo->user_comment + sizeof(code), exifInfo->user_comment, commentsLen);
+    memcpy(exifInfo->user_comment, code, sizeof(code));
+    writeExifIfd(&pCur, EXIF_TAG_USER_COMMENT, EXIF_TYPE_UNDEFINED,
+                 commentsLen + sizeof(code), exifInfo->user_comment, &LongerTagOffest, pIfdStart);
+    writeExifIfd(&pCur, EXIF_TAG_COLOR_SPACE, EXIF_TYPE_SHORT,
+                 1, exifInfo->color_space);
+    writeExifIfd(&pCur, EXIF_TAG_PIXEL_X_DIMENSION, EXIF_TYPE_LONG,
+                 1, exifInfo->width);
+    writeExifIfd(&pCur, EXIF_TAG_PIXEL_Y_DIMENSION, EXIF_TYPE_LONG,
+                 1, exifInfo->height);
+    writeExifIfd(&pCur, EXIF_TAG_EXPOSURE_MODE, EXIF_TYPE_LONG,
+                 1, exifInfo->exposure_mode);
+    writeExifIfd(&pCur, EXIF_TAG_WHITE_BALANCE, EXIF_TYPE_LONG,
+                 1, exifInfo->white_balance);
+    writeExifIfd(&pCur, EXIF_TAG_SCENCE_CAPTURE_TYPE, EXIF_TYPE_LONG,
+                 1, exifInfo->scene_capture_type);
+    tmp = 0;
+    memcpy(pCur, &tmp, OFFSET_SIZE); // next IFD offset
+    pCur += OFFSET_SIZE;
+
+    //2 0th IFD GPS Info Tags
+    if (exifInfo->enableGps) {
+        writeExifIfd(&pGpsIfdPtr, EXIF_TAG_GPS_IFD_POINTER, EXIF_TYPE_LONG,
+                     1, LongerTagOffest); // GPS IFD pointer skipped on 0th IFD
+
+        pCur = pIfdStart + LongerTagOffest;
+
+        if (exifInfo->gps_processing_method[0] == 0) {
+            // don't create GPS_PROCESSING_METHOD tag if there isn't any
+            tmp = NUM_0TH_IFD_GPS - 1;
+        } else {
+            tmp = NUM_0TH_IFD_GPS;
+        }
+        memcpy(pCur, &tmp, NUM_SIZE);
+        pCur += NUM_SIZE;
+
+        LongerTagOffest += NUM_SIZE + tmp*IFD_SIZE + OFFSET_SIZE;
+
+        writeExifIfd(&pCur, EXIF_TAG_GPS_VERSION_ID, EXIF_TYPE_BYTE,
+                     4, exifInfo->gps_version_id);
+        writeExifIfd(&pCur, EXIF_TAG_GPS_LATITUDE_REF, EXIF_TYPE_ASCII,
+                     2, exifInfo->gps_latitude_ref);
+        writeExifIfd(&pCur, EXIF_TAG_GPS_LATITUDE, EXIF_TYPE_RATIONAL,
+                     3, exifInfo->gps_latitude, &LongerTagOffest, pIfdStart);
+        writeExifIfd(&pCur, EXIF_TAG_GPS_LONGITUDE_REF, EXIF_TYPE_ASCII,
+                     2, exifInfo->gps_longitude_ref);
+        writeExifIfd(&pCur, EXIF_TAG_GPS_LONGITUDE, EXIF_TYPE_RATIONAL,
+                     3, exifInfo->gps_longitude, &LongerTagOffest, pIfdStart);
+        writeExifIfd(&pCur, EXIF_TAG_GPS_ALTITUDE_REF, EXIF_TYPE_BYTE,
+                     1, exifInfo->gps_altitude_ref);
+        writeExifIfd(&pCur, EXIF_TAG_GPS_ALTITUDE, EXIF_TYPE_RATIONAL,
+                     1, &exifInfo->gps_altitude, &LongerTagOffest, pIfdStart);
+        writeExifIfd(&pCur, EXIF_TAG_GPS_TIMESTAMP, EXIF_TYPE_RATIONAL,
+                     3, exifInfo->gps_timestamp, &LongerTagOffest, pIfdStart);
+        tmp = strlen((char*)exifInfo->gps_processing_method);
+        if (tmp > 0) {
+            if (tmp > 100) {
+                tmp = 100;
+            }
+            unsigned char tmp_buf[100+sizeof(ExifAsciiPrefix)];
+            memcpy(tmp_buf, ExifAsciiPrefix, sizeof(ExifAsciiPrefix));
+            memcpy(&tmp_buf[sizeof(ExifAsciiPrefix)], exifInfo->gps_processing_method, tmp);
+            writeExifIfd(&pCur, EXIF_TAG_GPS_PROCESSING_METHOD, EXIF_TYPE_UNDEFINED,
+                         tmp+sizeof(ExifAsciiPrefix), tmp_buf, &LongerTagOffest, pIfdStart);
+        }
+        writeExifIfd(&pCur, EXIF_TAG_GPS_DATESTAMP, EXIF_TYPE_ASCII,
+                     11, exifInfo->gps_datestamp, &LongerTagOffest, pIfdStart);
+        tmp = 0;
+        memcpy(pCur, &tmp, OFFSET_SIZE); // next IFD offset
+        pCur += OFFSET_SIZE;
+    }
+
+    //2 1th IFD TIFF Tags
+    int iThumbFd = 0;
+    char *thumbBuf = NULL;
+    unsigned int thumbSize = 0;
+    int thumbBufSize = 0;
+    int ret = ERROR_NONE;
+
+    if (useMainbufForThumb) {
+        if (m_jpegMain) {
+            ret = m_jpegMain->getOutBuf((int *)&iThumbFd, (int *)&thumbBufSize);
+            if (ret != ERROR_NONE) {
+                iThumbFd = -1;
+            }
+            thumbSize = (unsigned int)m_jpegMain->getJpegSize();
+        }
+    } else {
+        if (m_jpegThumb) {
+            ret = m_jpegThumb->getOutBuf((int *)&iThumbFd, (int *)&thumbBufSize);
+            if (ret != ERROR_NONE) {
+                iThumbFd = -1;
+            }
+            thumbSize = (unsigned int)m_jpegThumb->getJpegSize();
+        }
+    }
+
+    mmapJpegMemory(&iThumbFd, &thumbBuf, &thumbBufSize, MAX_OUTPUT_BUFFER_PLANE_NUM);
+
+    if (exifInfo->enableThumb && (thumbBuf != NULL) && (thumbSize != 0)) {
+        exifSizeExceptThumb = tmp = LongerTagOffest;
+        memcpy(pNextIfdOffset, &tmp, OFFSET_SIZE);  // NEXT IFD offset skipped on 0th IFD
+
+        pCur = pIfdStart + LongerTagOffest;
+
+        tmp = NUM_1TH_IFD_TIFF;
+        memcpy(pCur, &tmp, NUM_SIZE);
+        pCur += NUM_SIZE;
+
+        LongerTagOffest += NUM_SIZE + NUM_1TH_IFD_TIFF*IFD_SIZE + OFFSET_SIZE;
+
+        writeExifIfd(&pCur, EXIF_TAG_IMAGE_WIDTH, EXIF_TYPE_LONG,
+                     1, exifInfo->widthThumb);
+        writeExifIfd(&pCur, EXIF_TAG_IMAGE_HEIGHT, EXIF_TYPE_LONG,
+                     1, exifInfo->heightThumb);
+        writeExifIfd(&pCur, EXIF_TAG_COMPRESSION_SCHEME, EXIF_TYPE_SHORT,
+                     1, exifInfo->compression_scheme);
+        writeExifIfd(&pCur, EXIF_TAG_ORIENTATION, EXIF_TYPE_SHORT,
+                     1, exifInfo->orientation);
+        writeExifIfd(&pCur, EXIF_TAG_X_RESOLUTION, EXIF_TYPE_RATIONAL,
+                     1, &exifInfo->x_resolution, &LongerTagOffest, pIfdStart);
+        writeExifIfd(&pCur, EXIF_TAG_Y_RESOLUTION, EXIF_TYPE_RATIONAL,
+                     1, &exifInfo->y_resolution, &LongerTagOffest, pIfdStart);
+        writeExifIfd(&pCur, EXIF_TAG_RESOLUTION_UNIT, EXIF_TYPE_SHORT,
+                     1, exifInfo->resolution_unit);
+        writeExifIfd(&pCur, EXIF_TAG_JPEG_INTERCHANGE_FORMAT, EXIF_TYPE_LONG,
+                     1, LongerTagOffest);
+        writeExifIfd(&pCur, EXIF_TAG_JPEG_INTERCHANGE_FORMAT_LEN, EXIF_TYPE_LONG,
+                     1, thumbSize);
+
+        tmp = 0;
+        memcpy(pCur, &tmp, OFFSET_SIZE); // next IFD offset
+        pCur += OFFSET_SIZE;
+
+        memcpy(pIfdStart + LongerTagOffest,
+               thumbBuf, thumbSize);
+        LongerTagOffest += thumbSize;
+        if (LongerTagOffest > EXIF_LIMIT_SIZE) {
+            LongerTagOffest = exifSizeExceptThumb;
+            tmp = 0;
+            memcpy(pNextIfdOffset, &tmp, OFFSET_SIZE);  // NEXT IFD offset skipped on 0th IFD
+        }
+    } else {
+        tmp = 0;
+        memcpy(pNextIfdOffset, &tmp, OFFSET_SIZE);  // NEXT IFD offset skipped on 0th IFD
+    }
+
+    unsigned char App1Marker[2] = { 0xff, 0xe1 };
+    memcpy(pApp1Start, App1Marker, 2);
+    pApp1Start += 2;
+
+    *size = 10 + LongerTagOffest;
+    tmp = *size - 2;    // APP1 Maker isn't counted
+    unsigned char size_mm[2] = {(tmp >> 8) & 0xFF, tmp & 0xFF};
+    memcpy(pApp1Start, size_mm, 2);
+
+    unmapJpegMemory(&iThumbFd, &thumbBuf, &thumbBufSize, MAX_OUTPUT_BUFFER_PLANE_NUM);
+
+    return ERROR_NONE;
+}
+
+/*
+ * private member functions
+*/
+inline void ExynosJpegEncoderForCamera::writeExifIfd(unsigned char **pCur,
+                                             unsigned short tag,
+                                             unsigned short type,
+                                             unsigned int count,
+                                             unsigned int value)
+{
+    memcpy(*pCur, &tag, 2);
+    *pCur += 2;
+    memcpy(*pCur, &type, 2);
+    *pCur += 2;
+    memcpy(*pCur, &count, 4);
+    *pCur += 4;
+    memcpy(*pCur, &value, 4);
+    *pCur += 4;
+}
+
+inline void ExynosJpegEncoderForCamera::writeExifIfd(unsigned char **pCur,
+                                             unsigned short tag,
+                                             unsigned short type,
+                                             unsigned int count,
+                                             unsigned char *pValue)
+{
+    char buf[4] = { 0,};
+
+    memcpy(buf, pValue, count);
+    memcpy(*pCur, &tag, 2);
+    *pCur += 2;
+    memcpy(*pCur, &type, 2);
+    *pCur += 2;
+    memcpy(*pCur, &count, 4);
+    *pCur += 4;
+    memcpy(*pCur, buf, 4);
+    *pCur += 4;
+}
+
+inline void ExynosJpegEncoderForCamera::writeExifIfd(unsigned char **pCur,
+                                             unsigned short tag,
+                                             unsigned short type,
+                                             unsigned int count,
+                                             unsigned char *pValue,
+                                             unsigned int *offset,
+                                             unsigned char *start)
+{
+    memcpy(*pCur, &tag, 2);
+    *pCur += 2;
+    memcpy(*pCur, &type, 2);
+    *pCur += 2;
+    memcpy(*pCur, &count, 4);
+    *pCur += 4;
+    memcpy(*pCur, offset, 4);
+    *pCur += 4;
+    memcpy(start + *offset, pValue, count);
+    *offset += count;
+}
+
+inline void ExynosJpegEncoderForCamera::writeExifIfd(unsigned char **pCur,
+                                             unsigned short tag,
+                                             unsigned short type,
+                                             unsigned int count,
+                                             rational_t *pValue,
+                                             unsigned int *offset,
+                                             unsigned char *start)
+{
+    memcpy(*pCur, &tag, 2);
+    *pCur += 2;
+    memcpy(*pCur, &type, 2);
+    *pCur += 2;
+    memcpy(*pCur, &count, 4);
+    *pCur += 4;
+    memcpy(*pCur, offset, 4);
+    *pCur += 4;
+    memcpy(start + *offset, pValue, 8 * count);
+    *offset += 8 * count;
+}
+
+int ExynosJpegEncoderForCamera::scaleDownYuv422(char **srcBuf, unsigned int srcW, unsigned int srcH,  char **dstBuf, unsigned int dstW, unsigned int dstH)
+{
+    int step_x, step_y;
+    int src_y_start_pos, dst_pos, src_pos;
+    char *src_buf = srcBuf[0];
+    char *dst_buf = dstBuf[0];
+
+    if (dstW & 0x01 || dstH & 0x01) {
+        return ERROR_INVALID_SCALING_WIDTH_HEIGHT;
+    }
+
+    step_x = srcW / dstW;
+    step_y = srcH / dstH;
+
+    unsigned int srcWStride = srcW * 2;
+    unsigned int stepXStride = step_x * 2;
+
+    dst_pos = 0;
+    for (unsigned int y = 0; y < dstH; y++) {
+        src_y_start_pos = srcWStride * step_y * y;
+
+        for (unsigned int x = 0; x < dstW; x += 2) {
+            src_pos = src_y_start_pos + (stepXStride * x);
+
+            dst_buf[dst_pos++] = src_buf[src_pos    ];
+            dst_buf[dst_pos++] = src_buf[src_pos + 1];
+            dst_buf[dst_pos++] = src_buf[src_pos + 2];
+            dst_buf[dst_pos++] = src_buf[src_pos + 3];
+        }
+    }
+
+    return ERROR_NONE;
+}
+
+int ExynosJpegEncoderForCamera::scaleDownYuv422_2p(char **srcBuf, unsigned int srcW, unsigned int srcH, char **dstBuf, unsigned int dstW, unsigned int dstH)
+{
+    int32_t step_x, step_y;
+    int32_t src_y_start_pos, dst_pos, src_pos;
+    int32_t src_Y_offset;
+    char *src_buf;
+    char *dst_buf;
+
+    if (dstW % 2 != 0 || dstH % 2 != 0) {
+        return ERROR_INVALID_SCALING_WIDTH_HEIGHT;
+    }
+
+    step_x = srcW / dstW;
+    step_y = srcH / dstH;
+
+    // Y scale down
+    src_buf = srcBuf[0];
+    dst_buf = dstBuf[0];
+    dst_pos = 0;
+    for (uint32_t y = 0; y < dstH; y++) {
+        src_y_start_pos = y * step_y * srcW;
+
+        for (uint32_t x = 0; x < dstW; x++) {
+            src_pos = src_y_start_pos + (x * step_x);
+
+            dst_buf[dst_pos++] = src_buf[src_pos];
+        }
+    }
+
+    // UV scale down
+    for (uint32_t i = 0; i < dstH; i++) {
+        src_y_start_pos = i * step_y * srcW + (srcW*srcH);
+
+        for (uint32_t j = 0; j < dstW; j += 2) {
+            src_pos = src_y_start_pos + (j * step_x);
+
+            dst_buf[dst_pos++] = src_buf[src_pos    ];
+            dst_buf[dst_pos++] = src_buf[src_pos + 1];
+        }
+    }
+
+    return ERROR_NONE;
+}
+
+// thumbnail
+int ExynosJpegEncoderForCamera::setThumbnailSize(int w, int h)
+{
+    if (m_flagCreate == false) {
+        return ERROR_CANNOT_CREATE_EXYNOS_JPEG_ENC_HAL;
+    }
+
+    if (w < 0 || MAX_JPG_WIDTH < w) {
+        return false;
+    }
+
+    if (h < 0 || MAX_JPG_HEIGHT < h) {
+        return false;
+    }
+
+    m_thumbnailW = w;
+    m_thumbnailH = h;
+    return ERROR_NONE;
+}
+
+int ExynosJpegEncoderForCamera::setThumbnailQuality(int quality)
+{
+    if (m_flagCreate == false) {
+        return ERROR_CANNOT_CREATE_EXYNOS_JPEG_ENC_HAL;
+    }
+
+    if (quality < 1 || 100 < quality) {
+        return false;
+    }
+
+    m_thumbnailQuality = quality;
+    return ERROR_NONE;
+}
+
+int ExynosJpegEncoderForCamera::encodeThumbnail(unsigned int *size, bool useMain)
+{
+    int ret = ERROR_NONE;
+
+    if (m_flagCreate == false) {
+        return ERROR_CANNOT_CREATE_EXYNOS_JPEG_ENC_HAL;
+    }
+
+    // create jpeg thumbnail class
+    if (m_jpegThumb == NULL) {
+        m_jpegThumb = new ExynosJpegEncoder;
+
+        if (m_jpegThumb == NULL) {
+            JPEG_ERROR_LOG("ERR(%s):Cannot open a jpeg device file\n", __func__);
+            return ERROR_CANNOT_CREATE_SEC_THUMB;
+        }
+    }
+
+    ret = m_jpegThumb->create();
+    if (ret) {
+        JPEG_ERROR_LOG("ERR(%s):Fail create\n", __func__);
+        return ret;
+    }
+
+        ret = m_jpegThumb->setCache(JPEG_CACHE_ON);
+    if (ret) {
+        JPEG_ERROR_LOG("ERR(%s):Fail cache set\n", __func__);
+        return ret;
+    }
+
+    void *pConfig = m_jpegMain->getJpegConfig();
+    if (pConfig == NULL) {
+        JPEG_ERROR_LOG("ERR(%s):Fail getJpegConfig\n", __func__);
+        return ERROR_BUFFR_IS_NULL;
+    }
+
+    ret = m_jpegThumb->setJpegConfig(pConfig);
+    if (ret) {
+        JPEG_ERROR_LOG("ERR(%s):Fail setJpegConfig\n", __func__);
+        return ret;
+    }
+
+    /* TODO: Currently we fix the thumbnail quality */
+    ret = m_jpegThumb->setQuality(JPEG_THUMBNAIL_QUALITY);
+    if (ret) {
+        JPEG_ERROR_LOG("ERR(%s):Fail setQuality\n", __func__);
+        return ret;
+    }
+
+    ret = m_jpegThumb->setSize(m_thumbnailW, m_thumbnailH);
+    if (ret) {
+        JPEG_ERROR_LOG("ERR(%s):Fail setSize\n", __func__);
+        return ret;
+    }
+
+    freeJpegMemory(&m_stThumbInBuf, MAX_IMAGE_PLANE_NUM);
+    freeJpegMemory(&m_stThumbOutBuf, MAX_IMAGE_PLANE_NUM);
+
+    if (m_jpegThumb->setColorBufSize(m_stThumbInBuf.iSize, MAX_IMAGE_PLANE_NUM) != ERROR_NONE) {
+        return ERROR_INVALID_COLOR_FORMAT;
+    }
+    m_stThumbOutBuf.iSize[0] = sizeof(char)*m_thumbnailW*m_thumbnailH*THUMBNAIL_IMAGE_PIXEL_SIZE;
+
+
+    if (allocJpegMemory(&m_stThumbInBuf, MAX_IMAGE_PLANE_NUM) != ERROR_NONE) {
+        return ERROR_MEM_ALLOC_FAIL;
+    }
+
+    if (allocJpegMemory(&m_stThumbOutBuf, MAX_IMAGE_PLANE_NUM) != ERROR_NONE) {
+        return ERROR_MEM_ALLOC_FAIL;
+    }
+
+    ret = m_jpegThumb->setInBuf(m_stThumbInBuf.ionBuffer, m_stThumbInBuf.iSize);
+    if (ret) {
+        JPEG_ERROR_LOG("ERR(%s):Fail setInBuf\n", __func__);
+        return ret;
+    }
+
+    ret = m_jpegThumb->setOutBuf(m_stThumbOutBuf.ionBuffer[0], m_stThumbOutBuf.iSize[0]);
+    if (ret) {
+        JPEG_ERROR_LOG("ERR(%s):Fail setOutBuf\n", __func__);
+        return ret;
+    }
+
+    ret = m_jpegThumb->updateConfig();
+    if (ret) {
+        JPEG_ERROR_LOG("update config failed\n");
+        return ret;
+    }
+
+    if (useMain) {
+        int iTempWidth=0;
+        int iTempHeight=0;
+        int iMainInputBuf[MAX_INPUT_BUFFER_PLANE_NUM];
+        char *pcMainInputBuf[2];
+        int iMainInputSize[MAX_INPUT_BUFFER_PLANE_NUM];
+        char *pcThumbInputBuf[2];
+        int iThumbInputSize[MAX_INPUT_BUFFER_PLANE_NUM];
+        int iTempColorformat = 0;
+
+        iTempColorformat = m_jpegMain->getColorFormat();
+
+        ret = m_jpegMain->getSize(&iTempWidth, &iTempHeight);
+        if (ret) {
+            JPEG_ERROR_LOG("ERR(%s):Fail getSize\n", __func__);
+            return ret;
+        }
+
+        ret = m_jpegMain->getInBuf(iMainInputBuf, iMainInputSize, MAX_INPUT_BUFFER_PLANE_NUM);
+        if (ret) {
+            JPEG_ERROR_LOG("ERR(%s):Fail getInBuf\n", __func__);
+            return ret;
+        }
+
+        pcThumbInputBuf[0] = m_stThumbInBuf.pcBuf[0];
+        pcThumbInputBuf[1] = (char *)(MAP_FAILED);
+
+        mmapJpegMemory(iMainInputBuf, pcMainInputBuf, iMainInputSize, MAX_INPUT_BUFFER_PLANE_NUM);
+
+        switch (iTempColorformat) {
+        case V4L2_PIX_FMT_YUYV:
+            ret = scaleDownYuv422(pcMainInputBuf,
+                              iTempWidth,
+                              iTempHeight,
+                              m_stThumbInBuf.pcBuf,
+                              m_thumbnailW,
+                              m_thumbnailH);
+            break;
+        case V4L2_PIX_FMT_NV16:
+            pcMainInputBuf[1] = pcMainInputBuf[0] + (iTempWidth*iTempHeight);
+            pcThumbInputBuf[1] = pcThumbInputBuf[0] + (m_thumbnailW*m_thumbnailH);
+            ret = scaleDownYuv422_2p(pcMainInputBuf,
+                              iTempWidth,
+                              iTempHeight,
+                              m_stThumbInBuf.pcBuf,
+                              m_thumbnailW,
+                              m_thumbnailH);
+            break;
+        default:
+            return ERROR_INVALID_COLOR_FORMAT;
+            break;
+        }
+
+        pcMainInputBuf[1] = (char *)(MAP_FAILED);
+        unmapJpegMemory(iMainInputBuf, pcMainInputBuf, iMainInputSize, MAX_INPUT_BUFFER_PLANE_NUM);
+
+        if (ret) {
+            JPEG_ERROR_LOG("%s::scaleDown(%d, %d, %d, %d) fail", __func__, iTempWidth, iTempHeight, m_thumbnailW, m_thumbnailH);
+            return ret;
+        }
+    }
+    else {
+        return ERROR_IMPLEMENT_NOT_YET;
+    }
+
+    int iOutSizeThumb;
+
+    ret = m_jpegThumb->encode();
+    if (ret) {
+        JPEG_ERROR_LOG("encode failed\n");
+        return ret;
+    }
+
+    iOutSizeThumb = m_jpegThumb->getJpegSize();
+    if (iOutSizeThumb<=0) {
+        JPEG_ERROR_LOG("jpeg size is too small\n");
+        return ERROR_THUMB_JPEG_SIZE_TOO_SMALL;
+    }
+
+    *size = (unsigned int)iOutSizeThumb;
+
+    return ERROR_NONE;
+
+}
+
+int ExynosJpegEncoderForCamera::createIonClient(ion_client ionClient)
+{
+    if (ionClient == 0) {
+        ionClient = ion_client_create();
+        if (ionClient < 0) {
+            JPEG_ERROR_LOG("[%s]src ion client create failed, value = %d\n", __func__, ionClient);
+            return 0;
+        }
+    }
+
+    return ionClient;
+}
+
+int ExynosJpegEncoderForCamera::deleteIonClient(ion_client ionClient)
+{
+    if (ionClient != 0) {
+        if (ionClient > 0) {
+            ion_client_destroy(ionClient);
+        }
+        ionClient = 0;
+    }
+
+    return ionClient;
+}
+
+int ExynosJpegEncoderForCamera::allocJpegMemory(struct stJpegMem *pstMem, int iMemoryNum)
+{
+    int ret = ERROR_NONE;
+    int i = 0;
+
+    if (pstMem->ionClient == 0) {
+        JPEG_ERROR_LOG("[%s] i = %d , ionClient is zero (%d)\n", __func__, i, pstMem->ionClient);
+        return ERROR_BUFFR_IS_NULL;
+    }
+
+    for (i=0;i<iMemoryNum;i++) {
+        if (pstMem->iSize[i] == 0) {
+            break;
+        }
+
+        pstMem->ionBuffer[i] = ion_alloc(pstMem->ionClient, \
+                                                        pstMem->iSize[i], 0, ION_HEAP_SYSTEM_MASK, 0);
+        if ((pstMem->ionBuffer[i] == -1) ||(pstMem->ionBuffer[i] == 0)) {
+            JPEG_ERROR_LOG("[%s]ion_alloc(%d) failed\n", __func__, pstMem->iSize[i]);
+            pstMem->ionBuffer[i] = -1;
+            freeJpegMemory(pstMem, iMemoryNum);
+            return ERROR_MEM_ALLOC_FAIL;
+        }
+
+        pstMem->pcBuf[i] = (char *)ion_map(pstMem->ionBuffer[i], \
+                                                            pstMem->iSize[i], 0);
+        if ((pstMem->pcBuf[i] == (char *)MAP_FAILED) || (pstMem->pcBuf[i] == NULL)) {
+            JPEG_ERROR_LOG("[%s]src ion map failed(%d)\n", __func__, pstMem->iSize[i]);
+            pstMem->pcBuf[i] = (char *)MAP_FAILED;
+            freeJpegMemory(pstMem, iMemoryNum);
+            return ERROR_MEM_ALLOC_FAIL;
+        }
+    }
+
+    return ERROR_NONE;
+}
+
+void ExynosJpegEncoderForCamera::freeJpegMemory(struct stJpegMem *pstMem, int iMemoryNum)
+{
+    int i =0 ;
+    if (pstMem->ionClient == 0) {
+        return;
+    }
+
+
+    for (i=0;i<iMemoryNum;i++) {
+        if (pstMem->ionBuffer[i] != -1) {
+            if (pstMem->pcBuf[i] != (char *)MAP_FAILED) {
+                ion_unmap(pstMem->pcBuf[i], pstMem->iSize[i]);
+            }
+            ion_free(pstMem->ionBuffer[i]);
+        }
+        pstMem->ionBuffer[i] = -1;
+        pstMem->pcBuf[i] = (char *)MAP_FAILED;
+        pstMem->iSize[i] = 0;
+    }
+}
+
+void ExynosJpegEncoderForCamera::initJpegMemory(struct stJpegMem *pstMem, int iMemoryNum)
+{
+    int i =0 ;
+    for (i=0;i<iMemoryNum;i++) {
+        pstMem->pcBuf[i] = (char *)MAP_FAILED;
+        pstMem->ionBuffer[i] = -1;
+        pstMem->iSize[i] = 0;
+    }
+    pstMem->ionClient = 0;
+}
+
+void ExynosJpegEncoderForCamera::mmapJpegMemory(int *iFd, char **ppcBuf, int *piSize, int iMemoryNum)
+{
+    for (int i=0;i<iMemoryNum;i++) {
+        if (piSize[i] != 0) {
+            ppcBuf[i] = (char *)ion_map(iFd[i], piSize[i], 0);
+            if ((ppcBuf[i] == (char *)MAP_FAILED) || (ppcBuf[i] == NULL)) {
+                JPEG_ERROR_LOG("[%s]ion map failed(0x%x)\n", __func__, ppcBuf[i]);
+                ppcBuf[i] = (char *)MAP_FAILED;
+            }
+        } else {
+            ppcBuf[i] = (char *)MAP_FAILED;
+        }
+    }
+}
+
+void ExynosJpegEncoderForCamera::unmapJpegMemory(int *iFd, char **ppcBuf, int *piSize, int iMemoryNum)
+{
+    for (int i=0;i<iMemoryNum;i++) {
+        if (ppcBuf[i] != (char *)MAP_FAILED) {
+            ion_unmap(ppcBuf[i], piSize[i]);
+        }
+        ppcBuf[i] = (char *)MAP_FAILED;
+        piSize[i] = 0;
+    }
+}
diff --git a/libcamera2/ExynosJpegEncoderForCamera.h b/libcamera2/ExynosJpegEncoderForCamera.h
new file mode 100644 (file)
index 0000000..d63c4df
--- /dev/null
@@ -0,0 +1,161 @@
+/*
+ * Copyright Samsung Electronics Co.,LTD.
+ * Copyright (C) 2010 The Android Open Source Project
+ *
+ * Licensed under the Apache License, Version 2.0 (the "License");
+ * you may not use this file except in compliance with the License.
+ * You may obtain a copy of the License at
+ *
+ *     http://www.apache.org/licenses/LICENSE-2.0
+ *
+ * Unless required by applicable law or agreed to in writing, software
+ * distributed under the License is distributed on an "AS IS" BASIS,
+ * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+ * See the License for the specific language governing permissions and
+ * limitations under the License.
+ */
+
+#ifndef EXYNOS_JPEG_ENCODER_FOR_CAMERA_H_
+#define EXYNOS_JPEG_ENCODER_FOR_CAMERA_H_
+
+#include "ExynosExif.h"
+
+#include "ExynosJpegApi.h"
+
+#include <sys/mman.h>
+#include "ion.h"
+
+#define MAX_IMAGE_PLANE_NUM (3)
+
+class ExynosJpegEncoderForCamera {
+public :
+    ;
+    enum ERROR {
+        ERROR_ALREADY_CREATE = -0x200,
+        ERROR_CANNOT_CREATE_EXYNOS_JPEG_ENC_HAL,
+        ERROR_NOT_YET_CREATED,
+        ERROR_ALREADY_DESTROY,
+        ERROR_INPUT_DATA_SIZE_TOO_LARGE,
+        ERROR_OUT_BUFFER_SIZE_TOO_SMALL,
+        ERROR_EXIFOUT_ALLOC_FAIL,
+        ERROR_MAKE_EXIF_FAIL,
+        ERROR_INVALID_SCALING_WIDTH_HEIGHT,
+        ERROR_CANNOT_CREATE_SEC_THUMB,
+        ERROR_THUMB_JPEG_SIZE_TOO_SMALL,
+        ERROR_IMPLEMENT_NOT_YET,
+        ERROR_MEM_ALLOC_FAIL,
+        ERROR_JPEG_DEVICE_ALREADY_CREATE = -0x100,
+        ERROR_CANNOT_OPEN_JPEG_DEVICE,
+        ERROR_JPEG_DEVICE_ALREADY_CLOSED,
+        ERROR_JPEG_DEVICE_ALREADY_DESTROY,
+        ERROR_JPEG_DEVICE_NOT_CREATE_YET,
+        ERROR_INVALID_COLOR_FORMAT,
+        ERROR_INVALID_JPEG_FORMAT,
+        ERROR_JPEG_CONFIG_POINTER_NULL,
+        ERROR_INVALID_JPEG_CONFIG,
+        ERROR_IN_BUFFER_CREATE_FAIL,
+        ERROR_OUT_BUFFER_CREATE_FAIL,
+        ERROR_EXCUTE_FAIL,
+        ERROR_JPEG_SIZE_TOO_SMALL,
+        ERROR_CANNOT_CHANGE_CACHE_SETTING,
+        ERROR_SIZE_NOT_SET_YET,
+        ERROR_BUFFR_IS_NULL,
+        ERROR_BUFFER_TOO_SMALL,
+        ERROR_GET_SIZE_FAIL,
+        ERROR_REQBUF_FAIL,
+        ERROR_INVALID_V4l2_BUF_TYPE = -0x80,
+        ERROR_MMAP_FAILED,
+        ERROR_FAIL,
+        ERROR_NONE = 0
+    };
+
+    ExynosJpegEncoderForCamera();
+    virtual ~ExynosJpegEncoderForCamera();
+
+    bool   flagCreate();
+    int     create(void);
+    int     destroy(void);
+
+    int     setSize(int w, int h);
+    int     setQuality(int quality);
+    int     setColorFormat(int colorFormat);
+    int     setJpegFormat(int jpegFormat);
+
+    int     updateConfig(void);
+
+    int     setInBuf(int *buf, int *size);
+    int     setOutBuf(int buf, int size);
+
+    int     encode(int *size, exif_attribute_t *exifInfo);
+
+    int     setThumbnailSize(int w, int h);
+    int     setThumbnailQuality(int quality);
+
+    int     makeExif(unsigned char *exifOut,
+                               exif_attribute_t *exifIn,
+                               unsigned int *size,
+                               bool useMainbufForThumb = false);
+
+private:
+    inline void writeExifIfd(unsigned char **pCur,
+                                         unsigned short tag,
+                                         unsigned short type,
+                                         unsigned int count,
+                                         uint32_t value);
+    inline void writeExifIfd(unsigned char **pCur,
+                                        unsigned short tag,
+                                        unsigned short type,
+                                        unsigned int count,
+                                        unsigned char *pValue);
+    inline void writeExifIfd(unsigned char **pCur,
+                                         unsigned short tag,
+                                         unsigned short type,
+                                         unsigned int count,
+                                         rational_t *pValue,
+                                         unsigned int *offset,
+                                         unsigned char *start);
+    inline void writeExifIfd(unsigned char **pCur,
+                                         unsigned short tag,
+                                         unsigned short type,
+                                         unsigned int count,
+                                         unsigned char *pValue,
+                                         unsigned int *offset,
+                                         unsigned char *start);
+    int     scaleDownYuv422(char **srcBuf, unsigned int srcW, unsigned int srcH,
+                                                char **dstBuf, unsigned int dstW, unsigned int dstH);
+    int     scaleDownYuv422_2p(char **srcBuf, unsigned int srcW, unsigned int srcH,
+                                                        char **dstBuf, unsigned int dstW, unsigned int dstH);
+    // thumbnail
+    int     encodeThumbnail(unsigned int *size, bool useMain = true);
+
+    struct stJpegMem {
+        ion_client ionClient;
+        ion_buffer ionBuffer[MAX_IMAGE_PLANE_NUM];
+        char *pcBuf[MAX_IMAGE_PLANE_NUM];
+        int iSize[MAX_IMAGE_PLANE_NUM];
+    };
+
+    int     createIonClient(ion_client ionClient);
+    int     deleteIonClient(ion_client ionClient);
+    int     allocJpegMemory(struct stJpegMem *pstMem, int iMemoryNum);
+    void    freeJpegMemory(struct stJpegMem *pstMem, int iMemoryNum);
+    void    initJpegMemory(struct stJpegMem *pstMem, int iMemoryNum);
+
+    void    mmapJpegMemory(int *iFd, char **ppcBuf, int *piSize, int iMemoryNum);
+    void    unmapJpegMemory(int *iFd, char **ppcBuf, int *piSize, int iMemoryNum);
+
+    bool     m_flagCreate;
+
+    ExynosJpegEncoder *m_jpegMain;
+    ExynosJpegEncoder *m_jpegThumb;
+
+    ion_client m_ionJpegClient;
+    struct stJpegMem m_stThumbInBuf;
+    struct stJpegMem m_stThumbOutBuf;
+
+    int m_thumbnailW;
+    int m_thumbnailH;
+    int m_thumbnailQuality;
+};
+
+#endif /* __SEC_JPG_ENC_H__ */
index b0948bcfb0a6e8f07f3bc0da5a92f8836f4feb44..8557e678f4c41e314cb50dc7b96fbb2680a5f360 100644 (file)
@@ -27,7 +27,7 @@
  *   Initial Release
  */
 
-//#define LOG_NDEBUG 0
+//#define LOG_NDEBUG 1
 #define LOG_TAG "MetadataConverter"
 #include <utils/Log.h>
 
@@ -44,7 +44,7 @@ MetadataConverter::MetadataConverter()
 
 MetadataConverter::~MetadataConverter()
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return;
 }
 
@@ -54,7 +54,7 @@ status_t MetadataConverter::CheckEntryTypeMismatch(camera_metadata_entry_t * ent
     if (!(entry->type==type))
     {
         ALOGV("DEBUG(%s):Metadata Missmatch tag(%s) type (%d) count(%d)",
-            __func__, get_camera_metadata_tag_name(entry->tag), entry->type, entry->count);
+            __FUNCTION__, get_camera_metadata_tag_name(entry->tag), entry->type, entry->count);
         return BAD_VALUE;
     }
     return NO_ERROR;
@@ -66,20 +66,20 @@ status_t MetadataConverter::CheckEntryTypeMismatch(camera_metadata_entry_t * ent
     if (!((entry->type==type)&&(entry->count==count)))
     {
         ALOGV("DEBUG(%s):Metadata Missmatch tag(%s) type (%d) count(%d)",
-            __func__, get_camera_metadata_tag_name(entry->tag), entry->type, entry->count);
+            __FUNCTION__, get_camera_metadata_tag_name(entry->tag), entry->type, entry->count);
         return BAD_VALUE;
     }
     return NO_ERROR;
 }
 
-status_t MetadataConverter::ToInternalCtl(camera_metadata_t * request, camera2_ctl_metadata_NEW_t * dst)
+status_t MetadataConverter::ToInternalShot(camera_metadata_t * request, camera2_ctl_metadata_NEW_t * dst)
 {
     uint32_t    num_entry = 0;
     uint32_t    index = 0;
     uint32_t    i = 0;
     camera_metadata_entry_t curr_entry;
 
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     if (request == NULL || dst == NULL)
         return BAD_VALUE;
 
@@ -347,7 +347,7 @@ status_t MetadataConverter::ToInternalCtl(camera_metadata_t * request, camera2_c
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
                     break;
                 dst->ctl.aa.captureIntent = (aa_captureintent_NEW_t)curr_entry.data.u8[0];
-                break;\r
+                break;
 
             case ANDROID_CONTROL_MODE:
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
@@ -437,21 +437,23 @@ status_t MetadataConverter::ToInternalCtl(camera_metadata_t * request, camera2_c
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_INT32, 1))
                     break;
                 dst->ctl.request.id = curr_entry.data.i32[0];
-                ALOGV("DEBUG(%s): ANDROID_REQUEST_ID (%d)",  __func__,  dst->ctl.request.id);
+                ALOGV("DEBUG(%s): ANDROID_REQUEST_ID (%d)",  __FUNCTION__,  dst->ctl.request.id);
                 break;
 
             case ANDROID_REQUEST_METADATA_MODE:
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
                     break;
                 dst->ctl.request.metadataMode = (metadata_mode_NEW_t)curr_entry.data.u8[0];
-                ALOGV("DEBUG(%s): ANDROID_REQUEST_METADATA_MODE (%d)",  __func__, (int)( dst->ctl.request.metadataMode));
+                ALOGV("DEBUG(%s): ANDROID_REQUEST_METADATA_MODE (%d)",  __FUNCTION__, (int)( dst->ctl.request.metadataMode));
                 break;
 
             case ANDROID_REQUEST_OUTPUT_STREAMS:
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE))
                     break;
-                for (i=0 ; i<curr_entry.count ; i++)
+                for (i=0 ; i<curr_entry.count ; i++) {
                     dst->ctl.request.outputStreams[i] = curr_entry.data.u8[i];
+                    ALOGV("DEBUG(%s): OUTPUT_STREAM[%d] = %d ",  __FUNCTION__, i, (int)(dst->ctl.request.outputStreams[i]));
+                }
                 dst->ctl.request.numOutputStream = curr_entry.count;
                 break;
 
@@ -459,11 +461,11 @@ status_t MetadataConverter::ToInternalCtl(camera_metadata_t * request, camera2_c
                 if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_INT32, 1))
                     break;
                 dst->ctl.request.frameCount = curr_entry.data.i32[0];
-                ALOGV("DEBUG(%s): ANDROID_REQUEST_FRAME_COUNT (%d)",  __func__, dst->ctl.request.frameCount);
+                ALOGV("DEBUG(%s): ANDROID_REQUEST_FRAME_COUNT (%d)",  __FUNCTION__, dst->ctl.request.frameCount);
                 break;
 
             default:
-                ALOGD("DEBUG(%s):Bad Metadata tag (%d)",  __func__, curr_entry.tag);
+                ALOGD("DEBUG(%s):Bad Metadata tag (%d)",  __FUNCTION__, curr_entry.tag);
                 break;
             }
         }
@@ -479,7 +481,7 @@ status_t MetadataConverter::ToDynamicMetadata(camera2_ctl_metadata_NEW_t * metad
 {
     status_t    res;
 
-    ALOGV("DEBUG(%s): TEMP version using original request METADATA", __func__);
+    ALOGV("DEBUG(%s): TEMP version using original request METADATA", __FUNCTION__);
     if (0 != add_camera_metadata_entry(dst, ANDROID_REQUEST_ID,
                 &(metadata->ctl.request.id), 1))
         return NO_MEMORY;
@@ -495,13 +497,16 @@ status_t MetadataConverter::ToDynamicMetadata(camera2_ctl_metadata_NEW_t * metad
 
 
     if (metadata->ctl.request.metadataMode == METADATA_MODE_NONE_NEW) {
-        ALOGV("DEBUG(%s): METADATA_MODE_NONE", __func__);
+        ALOGV("DEBUG(%s): METADATA_MODE_NONE", __FUNCTION__);
         return NO_ERROR;
     }
 
-     ALOGV("DEBUG(%s): METADATA_MODE_FULL", __func__);
-
+    ALOGV("DEBUG(%s): METADATA_MODE_FULL", __FUNCTION__);
 
+    if (0 != add_camera_metadata_entry(dst, ANDROID_SENSOR_TIMESTAMP,
+                &(metadata->dm.sensor.timeStamp), 1))
+        return NO_MEMORY;
+    ALOGV("DEBUG(%s): Timestamp: %lld", __FUNCTION__, metadata->dm.sensor.timeStamp);
     return NO_ERROR;
 
 
index fd602442c86a822871e1b54515b5d095f97b7d78..3b9f976c7b131eb15eaa56cc44e4f3fdd49f3b2a 100644 (file)
@@ -46,7 +46,7 @@ public:
     MetadataConverter();
     ~MetadataConverter();
 
-    status_t ToInternalCtl(camera_metadata_t * request, camera2_ctl_metadata_NEW_t * dst);
+    status_t ToInternalShot(camera_metadata_t * request, camera2_ctl_metadata_NEW_t * dst);
        status_t ToDynamicMetadata(camera2_ctl_metadata_NEW_t * metadata, camera_metadata_t * dst);
 
 private:
index 219f43bc00d5689995107aec65918c96d8a3f052..751f570cbd6f0da8d8fcf4febfe6ad5c494ab35a 100644 (file)
  * <b>Revision History: </b>
  * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
  *   Initial Release
+ *
+ * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
+ *   2nd Release
+ *
  */
+
 //#define LOG_NDEBUG 0
 #define LOG_TAG "SignalDrivenThread"
 #include <utils/Log.h>
@@ -36,14 +41,24 @@ namespace android {
 
 
 SignalDrivenThread::SignalDrivenThread()
+    :Thread(false)
 {
+    ALOGV("DEBUG(SignalDrivenThread() ):");
+    m_processingSignal = 0;
+    m_receivedSignal = 0;
 }
 
+void SignalDrivenThread::Start(const char* name,
+                            int32_t priority, size_t stack)
+{
+    ALOGV("DEBUG(SignalDrivenThread::Start() ):");
+    run(name, priority, stack);
+}
 SignalDrivenThread::SignalDrivenThread(const char* name,
                             int32_t priority, size_t stack)
     :Thread(false)
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(SignalDrivenThread( , , )):");
     m_processingSignal = 0;
     m_receivedSignal = 0;
     run(name, priority, stack);
@@ -52,16 +67,16 @@ SignalDrivenThread::SignalDrivenThread(const char* name,
 
 SignalDrivenThread::~SignalDrivenThread()
 {
-    ALOGV("DEBUG(%s):", __func__);
+    ALOGV("DEBUG(%s):", __FUNCTION__);
     return;
 }
 
 status_t SignalDrivenThread::SetSignal(uint32_t signal)
 {
-    ALOGV("DEBUG(%s):Setting Signal (%x)", __func__, signal);
+    ALOGV("DEBUG(%s):Setting Signal (%x)", __FUNCTION__, signal);
 
     Mutex::Autolock lock(m_signalMutex);
-    ALOGV("DEBUG(%s):Signal Set     (%x) - prev(%x)", __func__, signal, m_receivedSignal);
+    ALOGV("DEBUG(%s):Signal Set     (%x) - prev(%x)", __FUNCTION__, signal, m_receivedSignal);
     m_receivedSignal |= signal;
     m_threadCondition.signal();
     return NO_ERROR;
@@ -69,7 +84,7 @@ status_t SignalDrivenThread::SetSignal(uint32_t signal)
 
 uint32_t SignalDrivenThread::GetProcessingSignal()
 {
-    ALOGV("DEBUG(%s): Signal (%x)", __func__, m_processingSignal);
+    ALOGV("DEBUG(%s): Signal (%x)", __FUNCTION__, m_processingSignal);
 
     return m_processingSignal;
 }
@@ -95,7 +110,7 @@ bool SignalDrivenThread::threadLoop()
 {
     {
         Mutex::Autolock lock(m_signalMutex);
-        ALOGV("DEBUG(%s):Waiting Signal", __func__);
+        ALOGV("DEBUG(%s):Waiting Signal", __FUNCTION__);
         while (!m_receivedSignal)
         {
             m_threadCondition.wait(m_signalMutex);
@@ -103,21 +118,21 @@ bool SignalDrivenThread::threadLoop()
         m_processingSignal = m_receivedSignal;
         m_receivedSignal = 0;
     }
-    ALOGV("DEBUG(%s):Got Signal (%x)", __func__, m_processingSignal);
+    ALOGV("DEBUG(%s):Got Signal (%x)", __FUNCTION__, m_processingSignal);
 
     if (m_processingSignal & SIGNAL_THREAD_TERMINATE)
     {
-        ALOGV("DEBUG(%s):Thread Terminating", __func__);
+        ALOGV("DEBUG(%s):Thread Terminating", __FUNCTION__);
         return (false);
     }
     else if (m_processingSignal & SIGNAL_THREAD_PAUSE)
     {
-        ALOGV("DEBUG(%s):Thread Paused", __func__);
+        ALOGV("DEBUG(%s):Thread Paused", __FUNCTION__);
         return (true);
     }
 
-    threadLoopInternal();
-    return (true);
+    threadFunctionInternal();
+    return true;
 }
 
 
index 00b85b1ce484df6d851a2185789dd721209bbe6b..ea95d4baef6e8ead49e6912096f60be4ff854485 100644 (file)
@@ -1,77 +1,80 @@
-/*
-**
-** Copyright 2008, The Android Open Source Project
-** Copyright 2012, Samsung Electronics Co. LTD
-**
-** Licensed under the Apache License, Version 2.0 (the "License");
-** you may not use this file except in compliance with the License.
-** You may obtain a copy of the License at
-**
-**     http://www.apache.org/licenses/LICENSE-2.0
-**
-** Unless required by applicable law or agreed to in writing, software
-** distributed under the License is distributed on an "AS IS" BASIS,
-** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-** See the License for the specific language governing permissions and
-** limitations under the License.
-*/
-
-/*!
- * \file      SignalDrivenThread.h
- * \brief     header file for general thread ( for camera hal2 implementation )
- * \author    Sungjoong Kang(sj3.kang@samsung.com)
- * \date      2012/05/31
- *
- * <b>Revision History: </b>
- * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
- *   Initial Release
- */
-
-
-#ifndef SIGNAL_DRIVEN_THREAD_H
-#define SIGNAL_DRIVEN_THREAD_H
-
-#include <utils/threads.h>
-
-namespace android {
-
-#define SIGNAL_THREAD_TERMINATE     (1<<0)
-#define SIGNAL_THREAD_PAUSE         (1<<1)
-
-#define SIGNAL_THREAD_COMMON_LAST   (1<<3)
-
-class SignalDrivenThread : public Thread {
-public:
-                        SignalDrivenThread();
-                        SignalDrivenThread(const char* name, 
-                            int32_t priority, size_t stack);
-    virtual             ~SignalDrivenThread();
-
-            status_t    SetSignal(uint32_t signal);
-            
-
-            uint32_t    GetProcessingSignal();
-            //void        ClearProcessingSignal(uint32_t signal);
-               
-
-private:
-            status_t    readyToRun();
-    virtual status_t    readyToRunInternal() = 0;
-    
-            bool        threadLoop();
-    virtual void        threadLoopInternal() = 0;
-
-            void        ClearSignal();
-    
-            uint32_t    m_receivedSignal;
-            uint32_t    m_processingSignal;  
-
-            Mutex       m_signalMutex;
-            Condition   m_threadCondition;
-};
-
-}; // namespace android
-
-#endif
-
+/*\r
+**\r
+** Copyright 2008, The Android Open Source Project\r
+** Copyright 2012, Samsung Electronics Co. LTD\r
+**\r
+** Licensed under the Apache License, Version 2.0 (the "License");\r
+** you may not use this file except in compliance with the License.\r
+** You may obtain a copy of the License at\r
+**\r
+**     http://www.apache.org/licenses/LICENSE-2.0\r
+**\r
+** Unless required by applicable law or agreed to in writing, software\r
+** distributed under the License is distributed on an "AS IS" BASIS,\r
+** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.\r
+** See the License for the specific language governing permissions and\r
+** limitations under the License.\r
+*/\r
+\r
+/*!\r
+ * \file      SignalDrivenThread.h\r
+ * \brief     header file for general thread ( for camera hal2 implementation )\r
+ * \author    Sungjoong Kang(sj3.kang@samsung.com)\r
+ * \date      2012/05/31\r
+ *\r
+ * <b>Revision History: </b>\r
+ * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n\r
+ *   Initial Release\r
+ *\r
+ * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n\r
+ *   2nd Release\r
+ *\r
+ */\r
+\r
+\r
+\r
+#ifndef SIGNAL_DRIVEN_THREAD_H\r
+#define SIGNAL_DRIVEN_THREAD_H\r
+\r
+#include <utils/threads.h>\r
+\r
+namespace android {\r
+\r
+#define SIGNAL_THREAD_TERMINATE     (1<<0)\r
+#define SIGNAL_THREAD_PAUSE         (1<<1)\r
+\r
+#define SIGNAL_THREAD_COMMON_LAST   (1<<3)\r
+\r
+class SignalDrivenThread:public Thread {\r
+public:\r
+                        SignalDrivenThread();\r
+                        SignalDrivenThread(const char *name,\r
+                            int32_t priority, size_t stack);\r
+    virtual             ~SignalDrivenThread();\r
+\r
+            status_t    SetSignal(uint32_t signal);\r
+\r
+            uint32_t    GetProcessingSignal();\r
+            //void        ClearProcessingSignal(uint32_t signal);\r
+            void        Start(const char *name,\r
+                            int32_t priority, size_t stack);\r
+\r
+private:\r
+            status_t    readyToRun();\r
+    virtual status_t    readyToRunInternal() = 0;\r
+\r
+            bool        threadLoop();\r
+    virtual void        threadFunctionInternal() = 0;\r
+\r
+            void        ClearSignal();\r
+\r
+            uint32_t    m_receivedSignal;\r
+            uint32_t    m_processingSignal;\r
+\r
+            Mutex       m_signalMutex;\r
+            Condition   m_threadCondition;\r
+};\r
+\r
+}; // namespace android\r
+\r
+#endif\r
index 4624ee9e87dc3397daa8135923c0a545bad4d1f5..7c4462d838a50007abb7e19a5464131d7e919f26 100644 (file)
@@ -99,12 +99,15 @@ const uint32_t Sensor::kAvailableSensitivities[5] =
 const uint32_t Sensor::kDefaultSensitivity = 100;
 
 
-const uint32_t kAvailableFormats[3] = {
+const uint32_t kAvailableFormats[5] = {
         HAL_PIXEL_FORMAT_RAW_SENSOR,
+        HAL_PIXEL_FORMAT_BLOB,
+        HAL_PIXEL_FORMAT_RGBA_8888,
         HAL_PIXEL_FORMAT_YV12,
         HAL_PIXEL_FORMAT_YCrCb_420_SP
 };
 
+
 const uint32_t kAvailableRawSizes[2] = {
     //640, 480
     Sensor::kResolution[0], Sensor::kResolution[1]
@@ -124,8 +127,12 @@ const uint64_t kAvailableProcessedMinDurations[1] = {
 };
 
 const uint32_t kAvailableJpegSizes[2] = {
-    //640, 480
-    Sensor::kResolution[0], Sensor::kResolution[1]
+    1280, 960,
+//    1280, 1080,
+//    2560, 1920,
+//    1280, 720,
+//    640, 480
+    //Sensor::kResolution[0], Sensor::kResolution[1]
 };
 
 const uint64_t kAvailableJpegMinDurations[1] = {
@@ -325,6 +332,9 @@ status_t constructStaticInfo(
     ADD_OR_SIZE(ANDROID_JPEG_AVAILABLE_THUMBNAIL_SIZES,
             jpegThumbnailSizes, sizeof(jpegThumbnailSizes)/sizeof(int32_t));
 
+    static const int32_t jpegMaxSize = 5*1024*1024;
+    ADD_OR_SIZE(ANDROID_JPEG_MAX_SIZE, &jpegMaxSize, 1);
+
     // android.stats
 
     static const uint8_t availableFaceDetectModes[] = {
index 3dd0de233cc1ff29208ccf6f7b3c8f255ac3df22..f2d2f299a03d5ed5bad6f97b3599331227a11f8b 100644 (file)
@@ -531,7 +531,7 @@ typedef struct camera2_ctl_NEW {
     camera2_sensor_ctl_NEW_t            sensor;
     camera2_flash_ctl_NEW_t             flash;
     camera2_hotpixel_ctl_NEW_t          hotpixel;
-    camera2_demosaic_ctl_NEW_t          demosaic;\r
+    camera2_demosaic_ctl_NEW_t          demosaic;
     camera2_noisereduction_ctl_NEW_t    noise;
     camera2_shading_ctl_NEW_t           shading;
     camera2_geometric_ctl_NEW_t         geometric;
@@ -551,7 +551,7 @@ typedef struct camera2_dm_NEW {
     camera2_sensor_dm_NEW_t             sensor;
     camera2_flash_dm_NEW_t              flash;
     camera2_hotpixel_dm_NEW_t           hotpixel;
-    camera2_demosaic_dm_NEW_t           demosaic;\r
+    camera2_demosaic_dm_NEW_t           demosaic;
     camera2_noisereduction_dm_NEW_t     noise;
     camera2_shading_dm_NEW_t            shading;
     camera2_geometric_dm_NEW_t          geometric;
@@ -697,7 +697,7 @@ typedef enum scaler_availableformats_NEW {
 } scaler_availableformats_NEW_t;
 
 typedef struct camera2_scaler_sm_NEW {
-    scaler_availableformats_NEW_t availableFormats[4]; // assuming \r
+    scaler_availableformats_NEW_t availableFormats[4]; // assuming 
                                                    // # of availableFormats = 4
     uint32_t    availableSizesPerFormat[4];  
     uint32_t    availableSizes[4][8][2]; // assuning availableSizesPerFormat=8
index 576f7051308db82dddbda8a6d78271c79725d60e..71056569b5278103d39daa48b6aa02d0dbdc64cc 100644 (file)
  * published by the Free Software Foundation.
  */
 
-/* 2012.04.18  Version 0.1  Initial Release */
-/* 2012.04.23  Version 0.2  Added static metadata (draft) */
+/*2012.04.18 Version 0.1 Initial Release*/
+/*2012.04.23 Version 0.2 Added static metadata (draft)*/
 
 
 #ifndef FIMC_IS_METADATA_H_
 #define FIMC_IS_METADATA_H_
 
-//#include "camera_common.h"
-#include <stdint.h>
-#include <sys/cdefs.h>
-#include <sys/types.h>
-
-
-typedef struct rational {
+struct rational {
        uint32_t num;
        uint32_t den;
-} rational_t;
+};
+
+#define CAMERA2_MAX_AVAILABLE_MODE 21
 
 
 /*
- *   controls/dynamic metadata 
- */
+ *controls/dynamic metadata
+*/
 
 /* android.request */
 
-typedef enum metadata_mode
-{
+enum metadata_mode {
        METADATA_MODE_NONE,
        METADATA_MODE_FULL
-} metadata_mode_t;
+};
 
-typedef struct camera2_request_ctl {
-       uint32_t                        id;
-       metadata_mode_t         metadataMode;
-       uint8_t                         outputStreams[16];
-} camera2_request_ctl_t;
+struct camera2_request_ctl {
+       uint32_t                id;
+       enum metadata_mode      metadataMode;
+       uint8_t                 outputStreams[16];
+};
 
-typedef struct camera2_request_dm {
-       uint32_t                        id;
-       metadata_mode_t         metadataMode;
-       uint32_t                        frameCount; 
-} camera2_request_dm_t;
+struct camera2_request_dm {
+       uint32_t                id;
+       enum metadata_mode      metadataMode;
+       uint32_t                frameCount;
+};
 
 
 
 /* android.lens */
 
-typedef enum optical_stabilization_mode
-{
+enum optical_stabilization_mode {
        OPTICAL_STABILIZATION_MODE_OFF,
        OPTICAL_STABILIZATION_MODE_ON
-} optical_stabilization_mode_t;
-       
-typedef struct camera2_lens_ctl {
-       float                                                   focusDistance;
-       float                                                   aperture;
-       float                                                   focalLength;
-       float                                                   filterDensity;
-       optical_stabilization_mode_t    opticalStabilizationMode;
-} camera2_lens_ctl_t;
-
-typedef struct camera2_lens_dm {
-       float                                                   focusDistance;
-       float                                                   aperture;
-       float                                                   focalLength;
-       float                                                   filterDensity;
-       optical_stabilization_mode_t    opticalStabilizationMode;
-       float                                                   focusRange[2];
-} camera2_lens_dm_t;
-
+};
 
+enum lens_facing {
+       LENS_FACING_FRONT,
+       LENS_FACING_BACK
+};
+
+struct camera2_lens_ctl {
+       float                                   focusDistance;
+       float                                   aperture;
+       float                                   focalLength;
+       float                                   filterDensity;
+       enum optical_stabilization_mode         opticalStabilizationMode;
+};
+
+struct camera2_lens_dm {
+       float                                   focusDistance;
+       float                                   aperture;
+       float                                   focalLength;
+       float                                   filterDensity;
+       enum optical_stabilization_mode         opticalStabilizationMode;
+       float                                   focusRange[2];
+};
+
+struct camera2_lens_sm {
+       float                           minimumFocusDistance;
+       float                           availableFocalLength[2];
+       float                           availableApertures;
+       /*assuming 1 aperture*/
+       float                           availableFilterDensities;
+       /*assuming 1 ND filter value*/
+       enum optical_stabilization_mode availableOpticalStabilization;
+       /*assuming 1*/
+       float                           shadingMap[3][40][30];
+       float                           geometricCorrectionMap[2][3][40][30];
+       enum lens_facing                facing;
+       float                           position[2];
+};
 
 /* android.sensor */
 
-typedef struct camera2_sensor_ctl {
+enum sensor_colorfilterarrangement {
+       SENSOR_COLORFILTERARRANGEMENT_RGGB,
+       SENSOR_COLORFILTERARRANGEMENT_GRBG,
+       SENSOR_COLORFILTERARRANGEMENT_GBRG,
+       SENSOR_COLORFILTERARRANGEMENT_BGGR,
+       SENSOR_COLORFILTERARRANGEMENT_RGB
+};
+
+enum sensor_ref_illuminant {
+       SENSOR_ILLUMINANT_DAYLIGHT = 1,
+       SENSOR_ILLUMINANT_FLUORESCENT = 2,
+       SENSOR_ILLUMINANT_TUNGSTEN = 3,
+       SENSOR_ILLUMINANT_FLASH = 4,
+       SENSOR_ILLUMINANT_FINE_WEATHER = 9,
+       SENSOR_ILLUMINANT_CLOUDY_WEATHER = 10,
+       SENSOR_ILLUMINANT_SHADE = 11,
+       SENSOR_ILLUMINANT_DAYLIGHT_FLUORESCENT = 12,
+       SENSOR_ILLUMINANT_DAY_WHITE_FLUORESCENT = 13,
+       SENSOR_ILLUMINANT_COOL_WHITE_FLUORESCENT = 14,
+       SENSOR_ILLUMINANT_WHITE_FLUORESCENT = 15,
+       SENSOR_ILLUMINANT_STANDARD_A = 17,
+       SENSOR_ILLUMINANT_STANDARD_B = 18,
+       SENSOR_ILLUMINANT_STANDARD_C = 19,
+       SENSOR_ILLUMINANT_D55 = 20,
+       SENSOR_ILLUMINANT_D65 = 21,
+       SENSOR_ILLUMINANT_D75 = 22,
+       SENSOR_ILLUMINANT_D50 = 23,
+       SENSOR_ILLUMINANT_ISO_STUDIO_TUNGSTEN = 24
+};
+
+struct camera2_sensor_ctl {
+       /* unit : nano */
        uint64_t        exposureTime;
+       /* unit : nano(It's min frame duration */
        uint64_t        frameDuration;
+       /* unit : percent(need to change ISO value?) */
        uint32_t        sensitivity;
-} camera2_sensor_ctl_t;
+};
 
-typedef struct camera2_sensor_dm {
+struct camera2_sensor_dm {
        uint64_t        exposureTime;
        uint64_t        frameDuration;
        uint32_t        sensitivity;
        uint64_t        timeStamp;
        uint32_t        frameCount;
-} camera2_sensor_dm_t;
+};
+
+struct camera2_sensor_sm {
+       uint32_t        exposureTimeRange[2];
+       uint32_t        maxFrameDuration;
+       uint32_t        sensitivityRange[2];
+       enum sensor_colorfilterarrangement colorFilterArrangement;
+       uint32_t        pixelArraySize[2];
+       uint32_t        activeArraySize[4];
+       uint32_t        whiteLevel;
+       uint32_t        blackLevelPattern[4];
+       struct rational colorTransform1[9];
+       struct rational colorTransform2[9];
+       enum sensor_ref_illuminant      referenceIlluminant1;
+       enum sensor_ref_illuminant      referenceIlluminant2;
+       struct rational forwardMatrix1[9];
+       struct rational forwardMatrix2[9];
+       struct rational calibrationTransform1[9];
+       struct rational calibrationTransform2[9];
+       struct rational baseGainFactor;
+       uint32_t        maxAnalogSensitivity;
+       float           noiseModelCoefficients[2];
+       uint32_t        orientation;
+};
 
 
 
 /* android.flash */
 
-typedef enum flash_mode
-{
-       CAM2_FLASH_MODE_OFF,
+enum flash_mode {
+       CAM2_FLASH_MODE_OFF = 1,
        CAM2_FLASH_MODE_SINGLE,
        CAM2_FLASH_MODE_TORCH
-} flash_mode_t;
+};
 
-typedef struct camera2_flash_ctl {
-       flash_mode_t    flashMode;
+struct camera2_flash_ctl {
+       enum flash_mode flashMode;
        uint8_t                 firingPower;
        uint64_t                firingTime;
-} camera2_flash_ctl_t;
+};
 
-typedef struct camera2_flash_dm {
-       flash_mode_t    flashMode;
+struct camera2_flash_dm {
+       enum flash_mode flashMode;
        uint8_t                 firingPower;
+       /*10 is max power*/
        uint64_t                firingTime;
-} camera2_flash_dm_t;
+       /*unit : microseconds*/
+};
 
+struct camera2_flash_sm {
+       uint8_t         available;
+       uint64_t        chargeDuration;
+};
 
 
 /* android.flash */
 
-typedef enum hotpixel_mode
-{
-       HOTPIXEL_MODE_OFF,
+enum hotpixel_mode {
+       HOTPIXEL_MODE_OFF = 1,
        HOTPIXEL_MODE_FAST,
        HOTPIXEL_MODE_HIGH_QUALITY
-} hotpixel_mode_t;
+};
 
 
-typedef struct camera2_hotpixel_ctl {
-       hotpixel_mode_t mode;
-} camera2_hotpixel_ctl_t;
+struct camera2_hotpixel_ctl {
+       enum hotpixel_mode      mode;
+};
 
-typedef struct camera2_hotpixel_dm {
-       hotpixel_mode_t mode;
-} camera2_hotpixel_dm_t;
+struct camera2_hotpixel_dm {
+       enum hotpixel_mode      mode;
+};
 
 
 
 /* android.demosaic */
 
-typedef enum demosaic_mode
-{
-       DEMOSAIC_MODE_OFF,
+enum demosaic_mode {
+       DEMOSAIC_MODE_OFF = 1,
        DEMOSAIC_MODE_FAST,
        DEMOSAIC_MODE_HIGH_QUALITY
-} demosaic_mode_t;
+};
 
-typedef struct camera2_demosaic_ctl {
-       demosaic_mode_t mode;
-} camera2_demosaic_ctl_t;
+struct camera2_demosaic_ctl {
+       enum demosaic_mode      mode;
+};
 
-typedef struct camera2_demosaic_dm {
-       demosaic_mode_t mode;
-} camera2_demosaic_dm_t;
+struct camera2_demosaic_dm {
+       enum demosaic_mode      mode;
+};
 
 
 
 /* android.noiseReduction */
 
-typedef enum noise_mode
-{
-       NOISEREDUCTION_MODE_OFF,
+enum noise_mode {
+       NOISEREDUCTION_MODE_OFF = 1,
        NOISEREDUCTION_MODE_FAST,
        NOISEREDUCTION_MODE_HIGH_QUALITY
-} noise_mode_t;
+};
 
-typedef struct camera2_noisereduction_ctl {
-       noise_mode_t    mode;
+struct camera2_noisereduction_ctl {
+       enum noise_mode mode;
        uint8_t                 strength;
-} camera2_noisereduction_ctl_t;
+};
 
-typedef struct camera2_noisereduction_dm {
-       noise_mode_t    mode;
+struct camera2_noisereduction_dm {
+       enum noise_mode mode;
        uint8_t                 strength;
-} camera2_noisereduction_dm_t;
+};
 
 
 
 /* android.shading */
 
-typedef enum shading_mode
-{
-       SHADING_MODE_OFF,
+enum shading_mode {
+       SHADING_MODE_OFF = 1,
        SHADING_MODE_FAST,
        SHADING_MODE_HIGH_QUALITY
-} shading_mode_t;
+};
 
-typedef struct camera2_shading_ctl {
-       shading_mode_t  mode;
-} camera2_shading_ctl_t;
+struct camera2_shading_ctl {
+       enum shading_mode       mode;
+};
 
-typedef struct camera2_shading_dm {
-       shading_mode_t  mode;
-} camera2_shading_dm_t;
+struct camera2_shading_dm {
+       enum shading_mode       mode;
+};
 
 
 
 /* android.geometric */
 
-typedef enum geometric_mode
-{
-       GEOMETRIC_MODE_OFF,
+enum geometric_mode {
+       GEOMETRIC_MODE_OFF = 1,
        GEOMETRIC_MODE_FAST,
        GEOMETRIC_MODE_HIGH_QUALITY
-} geometric_mode_t;
+};
 
-typedef struct camera2_geometric_ctl {
-       geometric_mode_t        mode;
-} camera2_geometric_ctl_t;
+struct camera2_geometric_ctl {
+       enum geometric_mode     mode;
+};
 
-typedef struct camera2_geometric_dm {
-       geometric_mode_t        mode;
-} camera2_geometric_dm_t;
+struct camera2_geometric_dm {
+       enum geometric_mode     mode;
+};
 
 
 
 /* android.colorCorrection */
 
-typedef enum colorcorrection_mode
-{
-       COLORCORRECTION_MODE_TRANSFORM_MATRIX,
+enum colorcorrection_mode {
+       COLORCORRECTION_MODE_TRANSFORM_MATRIX = 1,
        COLORCORRECTION_MODE_FAST,
-       COLORCORRECTION_MODE_HIGH_QUALITY, 
-       COLORCORRECTION_MODE_EFFECT_MONO, 
+       COLORCORRECTION_MODE_HIGH_QUALITY,
+       COLORCORRECTION_MODE_EFFECT_MONO,
        COLORCORRECTION_MODE_EFFECT_NEGATIVE,
        COLORCORRECTION_MODE_EFFECT_SOLARIZE,
        COLORCORRECTION_MODE_EFFECT_SEPIA,
@@ -238,86 +304,110 @@ typedef enum colorcorrection_mode
        COLORCORRECTION_MODE_EFFECT_WHITEBOARD,
        COLORCORRECTION_MODE_EFFECT_BLACKBOARD,
        COLORCORRECTION_MODE_EFFECT_AQUA
-} colorcorrection_mode_t;
+};
+
 
+struct camera2_colorcorrection_ctl {
+       enum colorcorrection_mode       mode;
+       float                           transform[9];
+};
 
-typedef struct camera2_colorcorrection_ctl {
-       colorcorrection_mode_t  mode;
-       float                                   transform[9];
-} camera2_colorcorrection_ctl_t;
+struct camera2_colorcorrection_dm {
+       enum colorcorrection_mode       mode;
+       float                           transform[9];
+};
 
-typedef struct camera2_colorcorrection_dm {
-       colorcorrection_mode_t  mode;
-       float                                   transform[9];
-} camera2_colorcorrection_dm_t;
+struct camera2_colorcorrection_sm {
+       uint8_t         availableModes[CAMERA2_MAX_AVAILABLE_MODE];
+       /*assuming 10 supported modes*/
+};
 
 
 
 /* android.tonemap */
 
-typedef enum tonemap_mode
-{
-       TONEMAP_MODE_CONTRAST_CURVE,
+enum tonemap_mode {
+       TONEMAP_MODE_CONTRAST_CURVE = 1,
        TONEMAP_MODE_FAST,
        TONEMAP_MODE_HIGH_QUALITY
-} tonemap_mode_t;
-
-typedef struct camera2_tonemap_ctl {
-       tonemap_mode_t  mode;
-       float                   curveRed[32]; // assuming maxCurvePoints = 32
-       float                   curveGreen[32];
-       float                   curveBlue[32];
-} camera2_tonemap_ctl_t;
-
-typedef struct camera2_tonemap_dm {
-       tonemap_mode_t  mode;
-       float                   curveRed[32]; // assuming maxCurvePoints = 32
-       float                   curveGreen[32];
-       float                   curveBlue[32];
-} camera2_tonemap_dm_t;
-
-
+};
+
+struct camera2_tonemap_ctl {
+       enum tonemap_mode               mode;
+       /* assuming maxCurvePoints = 64 */
+       float                           curveRed[64];
+       float                           curveGreen[64];
+       float                           curveBlue[64];
+};
+
+struct camera2_tonemap_dm {
+       enum tonemap_mode               mode;
+       /* assuming maxCurvePoints = 64 */
+       float                           curveRed[64];
+       float                           curveGreen[64];
+       float                           curveBlue[64];
+};
+
+struct camera2_tonemap_sm {
+       uint32_t        maxCurvePoints;
+};
 
 /* android.edge */
 
-typedef enum edge_mode
-{
-       EDGE_MODE_OFF,
+enum edge_mode {
+       EDGE_MODE_OFF = 1,
        EDGE_MODE_FAST,
        EDGE_MODE_HIGH_QUALITY
-} edge_mode_t;
+};
 
-typedef struct camera2_edge_ctl {
-       edge_mode_t     mode;
+struct camera2_edge_ctl {
+       enum edge_mode          mode;
        uint8_t                 strength;
-} camera2_edge_ctl_t;
+};
 
-typedef struct camera2_edge_dm {
-       edge_mode_t     mode;
+struct camera2_edge_dm {
+       enum edge_mode          mode;
        uint8_t                 strength;
-} camera2_edge_dm_t;
+};
 
 
 
 /* android.scaler */
 
-typedef struct camera2_scaler_ctl {
+enum scaler_availableformats {
+       SCALER_FORMAT_BAYER_RAW,
+       SCALER_FORMAT_YV12,
+       SCALER_FORMAT_NV21,
+       SCALER_FORMAT_JPEG,
+       SCALER_FORMAT_UNKNOWN
+};
+
+struct camera2_scaler_ctl {
        uint32_t        cropRegion[3];
        uint32_t        rotation;
-} camera2_scaler_ctl_t;
+};
 
-typedef struct camera2_scaler_dm {
+struct camera2_scaler_dm {
        uint32_t        size[2];
        uint8_t         format;
        uint32_t        cropRegion[3];
        uint32_t        rotation;
-} camera2_scaler_dm_t;
+};
+
+struct camera2_scaler_sm {
+       enum scaler_availableformats availableFormats[4];
+       /*assuming # of availableFormats = 4*/
+       uint32_t        availableSizesPerFormat[4];
+       uint32_t        availableSizes[4][8][2];
+       /*assuning availableSizesPerFormat=8*/
+       uint64_t        availableMinFrameDurations[4][8];
+       float           maxDigitalZoom;
+};
 
 
 
 /* android.jpeg */
-
-typedef struct camera2_jpeg_ctl {
+struct camera2_jpeg_ctl {
        uint8_t         quality;
        uint32_t        thumbnailSize[2];
        uint8_t         thumbnailQuality;
@@ -325,9 +415,9 @@ typedef struct camera2_jpeg_ctl {
        uint8_t         gpsProcessingMethod;
        uint64_t        gpsTimestamp;
        uint32_t        orientation;
-} camera2_jpeg_ctl_t;
+};
 
-typedef struct camera2_jpeg_dm {
+struct camera2_jpeg_dm {
        uint8_t         quality;
        uint32_t        thumbnailSize[2];
        uint8_t         thumbnailQuality;
@@ -335,57 +425,65 @@ typedef struct camera2_jpeg_dm {
        uint8_t         gpsProcessingMethod;
        uint64_t        gpsTimestamp;
        uint32_t        orientation;
-} camera2_jpeg_dm_t;
+};
+
+struct camera2_jpeg_sm {
+       uint32_t        availableThumbnailSizes[2][8];
+       /*assuming supported size=8*/
+};
 
 
 
 /* android.statistics */
 
-typedef enum facedetect_mode
-{
-       FACEDETECT_MODE_OFF,
+enum facedetect_mode {
+       FACEDETECT_MODE_OFF = 1,
        FACEDETECT_MODE_SIMPLE,
        FACEDETECT_MODE_FULL
-} facedetect_mode_t;
+};
 
-typedef enum histogram_mode
-{
-       HISTOGRAM_MODE_OFF,
+enum histogram_mode {
+       HISTOGRAM_MODE_OFF = 1,
        HISTOGRAM_MODE_ON
-} histogram_mode_t;
+};
 
-typedef enum sharpnessmap_mode
-{
-       SHARPNESSMAP_MODE_OFF,
+enum sharpnessmap_mode {
+       SHARPNESSMAP_MODE_OFF = 1,
        SHARPNESSMAP_MODE_ON
-} sharpnessmap_mode_t;
+};
 
-typedef struct camera2_stats_ctl {
-       facedetect_mode_t       faceDetectMode;
-       histogram_mode_t        histogramMode;
-       sharpnessmap_mode_t     sharpnessMapMode;
-} camera2_stats_ctl_t;
+struct camera2_stats_ctl {
+       enum facedetect_mode    faceDetectMode;
+       enum histogram_mode     histogramMode;
+       enum sharpnessmap_mode  sharpnessMapMode;
+};
 
 /* REMARKS : FD results are not included */
-typedef struct camera2_stats_dm {
-       facedetect_mode_t       faceDetectMode;
-       // faceRetangles
-       // faceScores
-       // faceLandmarks
-       // faceIds
-       histogram_mode_t        histogramMode;
-       // histogram
-       sharpnessmap_mode_t     sharpnessMapMode;
-       // sharpnessMap
-} camera2_stats_dm_t;
-
-
+struct camera2_stats_dm {
+       enum facedetect_mode    faceDetectMode;
+       /*faceRetangles
+       faceScores
+       faceLandmarks
+       faceIds*/
+       enum histogram_mode             histogramMode;
+       /*histogram*/
+       enum sharpnessmap_mode  sharpnessMapMode;
+       /*sharpnessMap*/
+};
+
+struct camera2_statistics_sm {
+       uint8_t         availableFaceDetectModes[CAMERA2_MAX_AVAILABLE_MODE];
+       /*assuming supported modes = 3;*/
+       uint32_t        maxFaceCount;
+       uint32_t        histogramBucketCount;
+       uint32_t        maxHistogramCount;
+       uint32_t        sharpnessMapSize[2];
+       uint32_t        maxSharpnessMapValue;
+};
 
 /* android.control */
-
-typedef enum aa_mode
-{
-       AA_MODE_OFF,
+enum aa_mode {
+       AA_MODE_OFF = 1,
        AA_MODE_AUTO,
        AA_MODE_SCENE_MODE_FACE_PRIORITY,
        AA_MODE_SCENE_MODE_ACTION,
@@ -403,28 +501,25 @@ typedef enum aa_mode
        AA_MODE_SCENE_MODE_PARTY,
        AA_MODE_SCENE_MODE_CANDLELIGHT,
        AA_MODE_SCENE_MODE_BARCODE
-} aa_mode_t;
+};
 
-typedef enum aa_aemode
-{
-       AA_AEMODE_OFF,
+enum aa_aemode {
+       AA_AEMODE_OFF = 1,
        AA_AEMODE_ON,
        AA_AEMODE_ON_AUTO_FLASH,
        AA_AEMODE_ON_ALWAYS_FLASH,
        AA_AEMODE_ON_AUTO_FLASH_REDEYE
-} aa_aemode_t;
+};
 
-typedef enum aa_ae_antibanding_mode
-{
-       AA_AE_ANTIBANDING_OFF,
+enum aa_ae_antibanding_mode {
+       AA_AE_ANTIBANDING_OFF = 1,
        AA_AE_ANTIBANDING_50HZ,
        AA_AE_ANTIBANDING_60HZ,
        AA_AE_ANTIBANDING_AUTO
-} aa_ae_antibanding_mode_t;
+};
 
-typedef enum aa_awbmode
-{
-       AA_AWBMODE_OFF,
+enum aa_awbmode {
+       AA_AWBMODE_OFF = 1,
        AA_AWBMODE_WB_AUTO,
        AA_AWBMODE_WB_INCANDESCENT,
        AA_AWBMODE_WB_FLUORESCENT,
@@ -433,298 +528,244 @@ typedef enum aa_awbmode
        AA_AWBMODE_WB_CLOUDY_DAYLIGHT,
        AA_AWBMODE_WB_TWILIGHT,
        AA_AWBMODE_WB_SHADE
-} aa_awbmode_t;
+};
 
-typedef enum aa_afmode
-{
-       AA_AFMODE_OFF,
+enum aa_afmode {
+       AA_AFMODE_OFF = 1,
        AA_AFMODE_FOCUS_MODE_AUTO,
        AA_AFMODE_FOCUS_MODE_MACRO,
        AA_AFMODE_FOCUS_MODE_CONTINUOUS_VIDEO,
        AA_AFMODE_FOCUS_MODE_CONTINUOUS_PICTURE
-} aa_afmode_t;
+};
 
-typedef enum aa_afstate
-{
-       AA_AFSTATE_INACTIVE,
+enum aa_afstate {
+       AA_AFSTATE_INACTIVE = 1,
        AA_AFSTATE_PASSIVE_SCAN,
        AA_AFSTATE_ACTIVE_SCAN,
        AA_AFSTATE_AF_ACQUIRED_FOCUS,
        AA_AFSTATE_AF_FAILED_FOCUS
-} aa_afstate_t;
-
-typedef struct camera2_aa_ctl {
-       aa_mode_t                                       mode;
-       aa_aemode_t                                     aeMode;
-       uint32_t                                        aeRegions[5]; // 5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.
-       int32_t                                         aeExpCompensation;
-       uint32_t                                        aeTargetFpsRange[2];
-       aa_ae_antibanding_mode_t        aeAntibandingMode;
-       aa_awbmode_t                            awbMode;
-       uint32_t                                        awbRegions[5]; // 5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.
-       aa_afmode_t                             afMode;
-       uint32_t                                        afRegions[5]; // 5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.
-       uint8_t                                         afTrigger;
-       uint8_t                                         videoStabilizationMode;
-} camera2_aa_ctl_t;
-
-typedef struct camera2_aa_dm {
-       aa_mode_t                                       mode;
-       aa_aemode_t                                     aeMode; // needs check
-       uint32_t                                        aeRegions[5]; // 5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.
-       int32_t                                         aeExpCompensation; // needs check
-       aa_awbmode_t                            awbMode;
-       uint32_t                                        awbRegions[5]; // 5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.
-       aa_afmode_t                             afMode;
-       uint32_t                                        afRegions[5]; // 5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.
-       uint8_t                                         afTrigger;
-       aa_afstate_t                            afState;
-       uint8_t                                         videoStabilizationMode;
-} camera2_aa_dm_t;
-
-
-
-       
-// sizeof(camera2_ctl) = ?
-typedef struct camera2_ctl {
-       camera2_request_ctl_t                   request;
-       camera2_lens_ctl_t                              lens;
-       camera2_sensor_ctl_t                    sensor;
-       camera2_flash_ctl_t                             flash;
-       camera2_hotpixel_ctl_t                  hotpixel;
-       camera2_demosaic_ctl_t                  demosaic;\r
-       camera2_noisereduction_ctl_t    noise;
-       camera2_shading_ctl_t                   shading;
-       camera2_geometric_ctl_t                 geometric;
-       camera2_colorcorrection_ctl_t   color;
-       camera2_tonemap_ctl_t                   tonemap;
-       camera2_edge_ctl_t                              edge;
-       camera2_scaler_ctl_t                    scaler;
-       camera2_jpeg_ctl_t                              jpeg;
-       camera2_stats_ctl_t                             stats;
-       camera2_aa_ctl_t                                aa;
-} camera2_ctl_t;
-
-// sizeof(camera2_dm) = ?
-typedef struct camera2_dm {
-       camera2_request_dm_t                    request;
-       camera2_lens_dm_t                               lens;
-       camera2_sensor_dm_t                             sensor;
-       camera2_flash_dm_t                              flash;
-       camera2_hotpixel_dm_t                   hotpixel;
-       camera2_demosaic_dm_t                   demosaic;\r
-       camera2_noisereduction_dm_t             noise;
-       camera2_shading_dm_t                    shading;
-       camera2_geometric_dm_t                  geometric;
-       camera2_colorcorrection_dm_t    color;
-       camera2_tonemap_dm_t                    tonemap;
-       camera2_edge_dm_t                               edge;
-       camera2_scaler_dm_t                             scaler;
-       camera2_jpeg_dm_t                               jpeg;
-       camera2_stats_dm_t                              stats;
-       camera2_aa_dm_t                                 aa;
-} camera2_dm_t;
-
-typedef struct camera2_vs {
-    /** \brief
-            Set sensor, lens, flash control for next frame.
-        \remarks
-            This flag can be combined.
-                [0 bit] sensor
-                [1 bit] lens
-                [2 bit] flash
-    */
-    uint32_t updateFlag;
-
-    camera2_lens_ctl_t                         lens;
-    camera2_sensor_ctl_t                       sensor;
-    camera2_flash_ctl_t                                flash;
-} camera2_vs_t;
-
-typedef struct camera2_shot {
-       camera2_ctl_t   ctl;
-       camera2_dm_t    dm;
-       /*vendor specific area*/
-    camera2_vs_t       vender;
-       uint32_t                magicNumber;
-} camera2_shot_t;
-
-/*
- *   static metadata 
- */
-
-
-/* android.lens */
-
-typedef enum lens_facing
-{
-       LENS_FACING_FRONT,
-       LENS_FACING_BACK
-} lens_facing_t;
-
-typedef struct camera2_lens_sm {
-       float   minimumFocusDistance;
-       float   availableFocalLength[2];
-       float   availableApertures;  // assuming 1 aperture
-       float   availableFilterDensities; // assuming 1 ND filter value
-       uint8_t availableOpticalStabilization; // assuming 1
-       float   shadingMap[3][40][30];
-       float   geometricCorrectionMap[2][3][40][30];
-       lens_facing_t facing;
-       float   position[2];
-} camera2_lens_sm_t;
-
-/* android.sensor */
-
-typedef enum sensor_colorfilterarrangement
-{
-       SENSOR_COLORFILTERARRANGEMENT_RGGB,
-       SENSOR_COLORFILTERARRANGEMENT_GRBG,
-       SENSOR_COLORFILTERARRANGEMENT_GBRG,
-       SENSOR_COLORFILTERARRANGEMENT_BGGR,
-       SENSOR_COLORFILTERARRANGEMENT_RGB       
-} sensor_colorfilterarrangement_t;
-
-typedef enum sensor_ref_illuminant
-{
-       SENSOR_ILLUMINANT_DAYLIGHT = 1,
-       SENSOR_ILLUMINANT_FLUORESCENT = 2,
-       SENSOR_ILLUMINANT_TUNGSTEN = 3,
-       SENSOR_ILLUMINANT_FLASH = 4,
-       SENSOR_ILLUMINANT_FINE_WEATHER = 9,
-       SENSOR_ILLUMINANT_CLOUDY_WEATHER = 10,
-       SENSOR_ILLUMINANT_SHADE = 11,
-       SENSOR_ILLUMINANT_DAYLIGHT_FLUORESCENT = 12,
-       SENSOR_ILLUMINANT_DAY_WHITE_FLUORESCENT = 13,
-       SENSOR_ILLUMINANT_COOL_WHITE_FLUORESCENT = 14,
-       SENSOR_ILLUMINANT_WHITE_FLUORESCENT = 15,
-       SENSOR_ILLUMINANT_STANDARD_A = 17,
-       SENSOR_ILLUMINANT_STANDARD_B = 18,
-       SENSOR_ILLUMINANT_STANDARD_C = 19,
-       SENSOR_ILLUMINANT_D55 = 20,
-       SENSOR_ILLUMINANT_D65 = 21,
-       SENSOR_ILLUMINANT_D75 = 22,
-       SENSOR_ILLUMINANT_D50 = 23,
-       SENSOR_ILLUMINANT_ISO_STUDIO_TUNGSTEN = 24
-} sensor_ref_illuminant_t;
-
-typedef struct camera2_sensor_sm {
-       uint32_t        exposureTimeRange[2];
-       uint32_t        maxFrameDuration;
-       uint32_t        sensitivityRange[2];
-       sensor_colorfilterarrangement_t colorFilterArrangement;
-       uint32_t        pixelArraySize[2];
-       uint32_t        activeArraySize[4];
-       uint32_t        whiteLevel;
-       uint32_t        blackLevelPattern[4];
-       rational_t      colorTransform1[9];
-       rational_t      colorTransform2[9];
-       sensor_ref_illuminant_t referenceIlluminant1;
-       sensor_ref_illuminant_t referenceIlluminant2;
-       rational_t      forwardMatrix1[9];
-       rational_t      forwardMatrix2[9];
-       rational_t      calibrationTransform1[9];       
-       rational_t      calibrationTransform2[9];
-       rational_t      baseGainFactor;
-       uint32_t        maxAnalogSensitivity;
-       float   noiseModelCoefficients[2];
-       uint32_t        orientation;
-} camera2_sensor_sm_t;
-
-
-
-/* android.flash */
-
-typedef struct camera2_flash_sm {
-       uint8_t         available;
-       uint64_t        chargeDuration;
-} camera2_flash_sm_t;
-
-
-
-/* android.colorCorrection */
-
-typedef struct camera2_colorcorrection_sm {
-       colorcorrection_mode_t  availableModes[10]; // assuming 10 supported modes
-} camera2_colorcorrection_sm_t;
-
-
-
-/* android.tonemap */
-
-typedef struct camera2_tonemap_sm {
-       uint32_t        maxCurvePoints;
-} camera2_tonemap_sm_t;
-
-
-
-/* android.scaler */
-
-typedef enum scaler_availableformats {
-       SCALER_FORMAT_BAYER_RAW,
-       SCALER_FORMAT_YV12,
-       SCALER_FORMAT_NV21,
-       SCALER_FORMAT_JPEG,
-       SCALER_FORMAT_UNKNOWN
-} scaler_availableformats_t;
-
-typedef struct camera2_scaler_sm {
-       scaler_availableformats_t availableFormats[4]; // assuming \r
-                                                                                                  // # of availableFormats = 4
-       uint32_t        availableSizesPerFormat[4];  
-       uint32_t        availableSizes[4][8][2]; // assuning availableSizesPerFormat=8
-       uint64_t        availableMinFrameDurations[4][8];
-       float           maxDigitalZoom;
-} camera2_scaler_sm_t;
-
-
-
-/* android.jpeg */
-
-typedef struct camera2_jpeg_sm {
-       uint32_t        availableThumbnailSizes[2][8]; // assuming supported size=8
-} camera2_jpeg_sm_t;
-
-
-
-/* android.statistics */
-
-typedef struct camera2_statistics_sm {
-       uint8_t         availableFaceDetectModes[3]; // assuming supported modes = 3;
-       uint32_t        maxFaceCount;
-       uint32_t        histogramBucketCount;
-       uint32_t        maxHistogramCount;
-       uint32_t        sharpnessMapSize[2];
-       uint32_t        maxSharpnessMapValue;
-} camera2_statistics_sm_t;
-
-
-
-/* android.control */
-
-typedef struct camera2_aa_sm {
-       uint8_t         availableModes[10]; // assuming # of available scene modes = 10
+};
+
+struct camera2_aa_ctl {
+       enum aa_mode                    mode;
+       enum aa_aemode                  aeMode;
+       uint32_t                        aeRegions[5];
+       /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/
+       int32_t                         aeExpCompensation;
+       uint32_t                        aeTargetFpsRange[2];
+       enum aa_ae_antibanding_mode     aeAntibandingMode;
+       enum aa_awbmode                 awbMode;
+       uint32_t                        awbRegions[5];
+       /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/
+       enum aa_afmode                  afMode;
+       uint32_t                        afRegions[5];
+       /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/
+       uint8_t                         afTrigger;
+       uint8_t                         videoStabilizationMode;
+};
+
+struct camera2_aa_dm {
+       enum aa_mode                            mode;
+       enum aa_aemode                          aeMode;
+       /*needs check*/
+       uint32_t                                aeRegions[5];
+       /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/
+       int32_t                                 aeExpCompensation;
+       /*needs check*/
+       enum aa_awbmode                         awbMode;
+       uint32_t                                awbRegions[5];
+       /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region.*/
+       enum aa_afmode                          afMode;
+       uint32_t                                afRegions[5];
+       /*5 per region(x1,y1,x2,y2,weight). currently assuming 1 region*/
+       uint8_t                                 afTrigger;
+       enum aa_afstate                         afState;
+       uint8_t                                 videoStabilizationMode;
+};
+
+struct camera2_aa_sm {
+       uint8_t         availableModes[CAMERA2_MAX_AVAILABLE_MODE];
+       /*assuming # of available scene modes = 10*/
        uint32_t        maxRegions;
-       uint8_t         aeAvailableModes[8]; // assuming # of available ae modes = 8
-       rational_t      aeCompensationStep;
+       uint8_t         aeAvailableModes[CAMERA2_MAX_AVAILABLE_MODE];
+       /*assuming # of available ae modes = 8*/
+       struct rational aeCompensationStep;
        int32_t         aeCompensationRange[2];
-       uint32_t        aeAvailableTargetFpsRanges[2][8];
-       uint8_t         aeAvailableAntibandingModes[4];
-       uint8_t         awbAvailableModes[10]; // assuming # of awbAvailableModes = 10
-       uint8_t         afAvailableModes[4]; // assuming # of afAvailableModes = 4
-} camera2_aa_sm_t;
-
-typedef struct camera2_static_metadata {
-       camera2_lens_sm_t                               lens;
-       camera2_sensor_sm_t                     sensor;
-       camera2_flash_sm_t                              flash;
-       camera2_colorcorrection_sm_t    color;
-       camera2_tonemap_sm_t                    tonemap;
-       camera2_scaler_sm_t                             scaler;
-       camera2_jpeg_sm_t                               jpeg;
-       camera2_statistics_sm_t                 statistics;
-       camera2_aa_sm_t                                 aa;
-} camera2_static_metadata_t;
+       uint32_t aeAvailableTargetFpsRanges[CAMERA2_MAX_AVAILABLE_MODE][2];
+       uint8_t         aeAvailableAntibandingModes[CAMERA2_MAX_AVAILABLE_MODE];
+       uint8_t         awbAvailableModes[CAMERA2_MAX_AVAILABLE_MODE];
+       /*assuming # of awbAvailableModes = 10*/
+       uint8_t         afAvailableModes[CAMERA2_MAX_AVAILABLE_MODE];
+       /*assuming # of afAvailableModes = 4*/
+};
+
+struct camera2_lens_usm {
+       /** Frame delay between sending command and applying frame data */
+       uint32_t        focusDistanceFrameDelay;
+};
+
+struct camera2_sensor_usm {
+       /** Frame delay between sending command and applying frame data */
+       uint32_t        exposureTimeFrameDelay;
+       uint32_t        frameDurationFrameDelay;
+       uint32_t        sensitivityFrameDelay;
+};
+
+struct camera2_flash_usm {
+       /** Frame delay between sending command and applying frame data */
+       uint32_t        flashModeFrameDelay;
+       uint32_t        firingPowerFrameDelay;
+       uint64_t        firingTimeFrameDelay;
+};
+
+struct camera2_ctl {
+       struct camera2_request_ctl              request;
+       struct camera2_lens_ctl                 lens;
+       struct camera2_sensor_ctl               sensor;
+       struct camera2_flash_ctl                flash;
+       struct camera2_hotpixel_ctl             hotpixel;
+       struct camera2_demosaic_ctl             demosaic;
+       struct camera2_noisereduction_ctl       noise;
+       struct camera2_shading_ctl              shading;
+       struct camera2_geometric_ctl            geometric;
+       struct camera2_colorcorrection_ctl      color;
+       struct camera2_tonemap_ctl              tonemap;
+       struct camera2_edge_ctl                 edge;
+       struct camera2_scaler_ctl               scaler;
+       struct camera2_jpeg_ctl                 jpeg;
+       struct camera2_stats_ctl                stats;
+       struct camera2_aa_ctl                   aa;
+};
+
+struct camera2_dm {
+       struct camera2_request_dm               request;
+       struct camera2_lens_dm                  lens;
+       struct camera2_sensor_dm                sensor;
+       struct camera2_flash_dm                 flash;
+       struct camera2_hotpixel_dm              hotpixel;
+       struct camera2_demosaic_dm              demosaic;
+       struct camera2_noisereduction_dm        noise;
+       struct camera2_shading_dm               shading;
+       struct camera2_geometric_dm             geometric;
+       struct camera2_colorcorrection_dm       color;
+       struct camera2_tonemap_dm               tonemap;
+       struct camera2_edge_dm                  edge;
+       struct camera2_scaler_dm                scaler;
+       struct camera2_jpeg_dm                  jpeg;
+       struct camera2_stats_dm                 stats;
+       struct camera2_aa_dm                    aa;
+};
+
+struct camera2_sm {
+       struct camera2_lens_sm                  lens;
+       struct camera2_sensor_sm                sensor;
+       struct camera2_flash_sm                 flash;
+       struct camera2_colorcorrection_sm       color;
+       struct camera2_tonemap_sm               tonemap;
+       struct camera2_scaler_sm                scaler;
+       struct camera2_jpeg_sm                  jpeg;
+       struct camera2_statistics_sm            statistics;
+       struct camera2_aa_sm                    aa;
+
+       /** User-defined(ispfw specific) static metadata. */
+       struct camera2_lens_usm                 lensUd;
+       struct camera2_sensor_usm               sensorUd;
+       struct camera2_flash_usm                flashUd;
+};
+
+/** \brief
+       User-defined control for lens.
+*/
+struct camera2_lens_uctl {
+       /** It depends by af algorithm(normally 255 or 1023) */
+       uint32_t        maxPos;
+       /** Some actuator support slew rate control. */
+       uint32_t        slewRate;
+};
+
+/** \brief
+       User-defined metadata for lens.
+*/
+struct camera2_lens_udm {
+       /** It depends by af algorithm(normally 255 or 1023) */
+       uint32_t        maxPos;
+       /** Some actuator support slew rate control. */
+       uint32_t        slewRate;
+};
+
+/** \brief
+       User-defined control for sensor.
+*/
+struct camera2_sensor_uctl {
+       /** Dynamic frame duration.
+       This feature is decided to max. value between
+       'sensor.exposureTime'+alpha and 'sensor.frameDuration'.
+       */
+       uint64_t        dynamicFrameDuration;
+};
+
+/** \brief
+       Structure for SET_CAM_CONTROL command.
+*/
+struct camera2_uctl {
+       /**     \brief
+               Set sensor, lens, flash control for next frame.
+               \remarks
+               This flag can be combined.
+               [0 bit] sensor
+               [1 bit] lens
+               [2 bit] flash
+       */
+       uint32_t                        uUpdateBitMap;
+
+       /** For debugging */
+       uint32_t uFrameNumber;
+
+       struct camera2_lens_ctl         lens;
+       /** ispfw specific control(user-defined) of lens. */
+       struct camera2_lens_uctl        lensUd;
+
+       struct camera2_sensor_ctl       sensor;
+       /** ispfw specific control(user-defined) of sensor. */
+       struct camera2_sensor_uctl      sensorUd;
+
+       struct camera2_flash_ctl        flash;
+};
+
+struct camera2_udm {
+       struct camera2_lens_udm         lens;
+};
+
+struct camera2_shot {
+       /*google standard area*/
+       struct camera2_ctl      ctl;
+       struct camera2_dm       dm;
+       /*user defined area*/
+       struct camera2_uctl     uctl;
+       struct camera2_udm      udm;
+       /*magic : 23456789*/
+       uint32_t                magicNumber;
+};
+
+/** \brief
+       Structure for interfacing between HAL and driver.
+*/
+struct camera2_shot_ext {
+       /**     \brief
+               stream control
+               \remarks
+               [0] disable stream out
+               [1] enable stream out
+       */
+       uint32_t                request_sensor;
+       uint32_t                request_scc;
+       uint32_t                request_scp;
+       struct camera2_shot     shot;
+};
+
+#define CAM_LENS_CMD           (0x1 << 0x0)
+#define CAM_SENSOR_CMD         (0x1 << 0x1)
+#define CAM_FLASH_CMD          (0x1 << 0x2)
 
 #endif