RequestManager::~RequestManager()
{
+ ALOGV("%s", __FUNCTION__);
+ if (m_metadataConverter != NULL) {
+ delete m_metadataConverter;
+ m_metadataConverter = NULL;
+ }
+
return;
}
m_IsAfModeUpdateRequired(false),
m_IsAfTriggerRequired(false),
m_IsAfLockRequired(false),
+ m_wideAspect(false),
m_afTriggerId(0),
m_halDevice(dev),
m_need_streamoff(0),
m_exynosVideoCSC = csc_init(cscMethod);
if (m_exynosVideoCSC == NULL)
ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
- csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, PREVIEW_GSC_NODE_NUM);
+ csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
+
ALOGV("DEBUG(%s): END", __FUNCTION__);
}
csc_deinit(m_exynosVideoCSC);
m_exynosVideoCSC = NULL;
+
if (m_streamThreads[1] != NULL) {
while (!m_streamThreads[1]->IsTerminated())
{
m_mainThread = NULL;
}
- for(i = 0; i < m_camera_info.sensor.buffers; i++)
+ if (m_requestManager != NULL) {
+ delete m_requestManager;
+ m_requestManager = NULL;
+ }
+
+ if (m_BayerManager != NULL) {
+ delete m_BayerManager;
+ m_BayerManager = NULL;
+ }
+// for(i = 0; i < m_camera_info.sensor.buffers; i++)
+ for (i = 0; i < NUM_BAYER_BUFFERS; i++)
freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
for(i = 0; i < m_camera_info.capture.buffers; i++)
}
ALOGV("== stream_on :: capture");
- cam_int_streamon(&(m_camera_info.capture));
+ if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
+ ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
+ } else {
+ m_camera_info.capture.status = true;
+ }
}
void ExynosCameraHWInterface2::StartISP()
allocCase = 2;
}
}
+ if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)) {
+ m_wideAspect = true;
+ }
+ else {
+ m_wideAspect = false;
+ }
+ ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
+
if (allocCase == 0 || allocCase == 2) {
*stream_id = 0;
*stream_id = 1;
- m_streamThreads[1] = new StreamThread(this, *stream_id);
- AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
+ if (!(m_streamThreads[*stream_id].get())) {
+ ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
+ m_streamThreads[1] = new StreamThread(this, *stream_id);
+ allocCase = 0;
+ }
+ else {
+ if ((m_streamThreads[*stream_id].get())->m_activated == true) {
+ ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
+ allocCase = 1;
+ }
+ else {
+ ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
+ allocCase = 2;
+ }
+ }
+
+ AllocatedStream = (StreamThread*)(m_streamThreads[*stream_id].get());
fd = m_camera_info.capture.fd;
usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
}
else if (stream_id == 1) {
targetStreamParms = &(m_streamThreads[1]->m_parameters);
+ // TODO : make clear stream off case
+ m_need_streamoff = 0;
+
+ if (m_camera_info.capture.status == false) {
+ /* capture */
+ m_camera_info.capture.buffers = 8;
+ cam_int_s_fmt(&(m_camera_info.capture));
+ cam_int_reqbufs(&(m_camera_info.capture));
+ for (i = 0; i < m_camera_info.capture.buffers; i++) {
+ ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
+ cam_int_qbuf(&(m_camera_info.capture), i);
+ }
+
+ if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
+ ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
+ } else {
+ m_camera_info.capture.status = true;
+ }
+ }
}
else if (stream_id == 2) {
+ m_need_streamoff = 0;
targetRecordParms = &(m_streamThreads[0]->m_recordParameters);
targetRecordParms->numSvcBuffers = num_buffers;
if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
if(m_need_streamoff == 1) {
+ if (m_sensorThread != NULL) {
+ m_sensorThread->release();
+ /* TODO */
+ usleep(500000);
+ } else {
+ ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
+ }
+
ALOGV("(%s): calling capture streamoff", __FUNCTION__);
- cam_int_streamoff(&(m_camera_info.capture));
+ if (cam_int_streamoff(&(m_camera_info.capture)) < 0) {
+ ALOGE("ERR(%s): capture stream off fail", __FUNCTION__);
+ } else {
+ m_camera_info.capture.status = false;
+ }
+
ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
m_camera_info.capture.buffers = 0;
cam_int_reqbufs(&(m_camera_info.capture));
ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
- if (m_sensorThread != NULL) {
- m_sensorThread->release();
- usleep(500000);
- } else {
- ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
- }
m_isIspStarted = false;
}
- cam_int_s_input(currentNode, m_camera_info.sensor_id);
- cam_int_s_fmt(currentNode);
- cam_int_reqbufs(currentNode);
-
if (m_need_streamoff == 1) {
m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
m_camera_info.capture.buffers = 8;
+ /* isp */
+ cam_int_s_fmt(&(m_camera_info.isp));
cam_int_reqbufs(&(m_camera_info.isp));
+ /* sensor */
+ cam_int_s_fmt(&(m_camera_info.sensor));
cam_int_reqbufs(&(m_camera_info.sensor));
for (i = 0; i < 8; i++) {
cam_int_qbuf(&(m_camera_info.sensor), i);
}
+ /* capture */
+ cam_int_s_fmt(&(m_camera_info.capture));
cam_int_reqbufs(&(m_camera_info.capture));
- cam_int_streamon(&(m_camera_info.capture));
- cam_int_streamon(&(m_camera_info.sensor));
+ for (i = 0; i < m_camera_info.capture.buffers; i++) {
+ ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
+ cam_int_qbuf(&(m_camera_info.capture), i);
+ }
+
+ }
+
+ cam_int_s_input(currentNode, m_camera_info.sensor_id);
+ cam_int_s_fmt(currentNode);
+ cam_int_reqbufs(currentNode);
- m_need_streamoff = 0;
- m_requestManager->SetInitialSkip(2);
- m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
- m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
- }
}
else if (targetStreamParms->streamType == STREAM_TYPE_INDIRECT) {
for(i = 0; i < currentNode->buffers; i++){
}
ALOGV("DEBUG(%s): calling streamon", __FUNCTION__);
- if (targetStreamParms->streamType == 0) {
+ if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
ALOGD("%s(%d), stream id = %d", __FUNCTION__, __LINE__, stream_id);
cam_int_streamon(&(targetStreamParms->node));
}
+ if (m_need_streamoff == 1) {
+ if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
+ ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
+ } else {
+ m_camera_info.capture.status = true;
+ }
+
+ cam_int_streamon(&(m_camera_info.sensor));
+ }
+
ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__);
ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
if(!m_isIspStarted) {
m_isIspStarted = true;
StartISP();
- m_need_streamoff = 1;
}
+ if (m_need_streamoff == 1) {
+ m_requestManager->SetInitialSkip(8);
+ m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
+ m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
+ }
+ m_need_streamoff = 1;
+
return 0;
}
}
targetStream->m_releasing = true;
- targetStream->release();
- while (targetStream->m_releasing)
- {
- ALOGD("stream thread release fail %d", __LINE__);
- usleep(2000);
- }
+ do {
+ ALOGD("stream thread release %d", __LINE__);
+ targetStream->release();
+ usleep(33000);
+ } while (targetStream->m_releasing);
targetStream->m_activated = false;
ALOGV("DEBUG(%s): DONE", __FUNCTION__);
return 0;
numOnHalEmpty = NUM_BAYER_BUFFERS;
}
+BayerBufManager::~BayerBufManager()
+{
+ ALOGV("%s", __FUNCTION__);
+}
+
int BayerBufManager::GetIndexForSensorEnqueue()
{
int ret = 0;
else {
m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, false);
}
+ if (m_wideAspect) {
+// shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
+ shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
+ shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
+ }
+ else {
+// shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
+ }
if (triggered)
shot_ext->shot.ctl.aa.afTrigger = 1;
m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
}
+ if (shot_ext->request_scc) {
+ m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
+ }
ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
shot_ext->request_sensor = 0;
}
- ALOGV("### Sensor Qbuf start(%d) SCP(%d) SCC(%d) DIS(%d)", index, shot_ext->request_scp, shot_ext->request_scc, shot_ext->dis_bypass);
+// ALOGD("### Sensor Qbuf start(%d) SCP(%d) SCC(%d) DIS(%d)", index, shot_ext->request_scp, shot_ext->request_scc, shot_ext->dis_bypass);
cam_int_qbuf(&(m_camera_info.sensor), index);
ALOGV("### Sensor QBUF done");
if (selfStreamParms->svcBufStatus[i] ==ON_DRIVER) cnt_to_dq++;
}
- m_scp_closing = true;
ALOGV("DEBUG(%s): calling stream(%d) streamoff (fd:%d)", __FUNCTION__,
selfThread->m_index, selfStreamParms->fd);
- cam_int_streamoff(&(selfStreamParms->node));
- ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
- if (selfStreamParms->streamType == STREAM_TYPE_DIRECT) {
- ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 (fd:%d)", __FUNCTION__,
- selfThread->m_index, selfStreamParms->fd);
- currentNode->buffers = 0;
- cam_int_reqbufs(currentNode);
- ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
- selfThread->m_index, selfStreamParms->fd);
+ if (cam_int_streamoff(&(selfStreamParms->node)) < 0 ){
+ ALOGE("ERR(%s): stream off fail", __FUNCTION__);
+ } else {
+ if (selfStreamParms->streamType == STREAM_TYPE_DIRECT) {
+ m_scp_closing = true;
+ } else {
+ m_camera_info.capture.status = false;
+ }
}
+ ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
+ ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 (fd:%d)", __FUNCTION__,
+ selfThread->m_index, selfStreamParms->fd);
+ currentNode->buffers = 0;
+ cam_int_reqbufs(currentNode);
+ ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
+ selfThread->m_index, selfStreamParms->fd);
}
- selfThread->m_releasing = false;
- ALOGD("m_releasing set false");
if (selfThread->m_index == 1 && m_resizeBuf.size.s != 0) {
freeCameraMemory(&m_resizeBuf, 1);
}
selfThread->m_isBufferInit = false;
selfThread->m_index = 255;
+ selfThread->m_releasing = false;
+
ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
return;
ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
selfThread->m_index, selfStreamParms->streamType, index);
- m_jpegEncodingFrameCnt = m_ispProcessingFrameCnt;
for (int i = 0; i < selfStreamParms->numSvcBuffers ; i++) {
if (selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] == ON_HAL) {
m_resizeBuf = resizeBufInfo;
- res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
- m_requestManager->GetTimestamp(m_jpegEncodingFrameCnt), &(selfStreamParms->svcBufHandle[selfStreamParms->svcBufIndex]));
+ res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, systemTime(), &(selfStreamParms->svcBufHandle[selfStreamParms->svcBufIndex]));
ALOGV("DEBUG(%s): stream(%d) enqueue_buffer index(%d) to svc done res(%d)",
__FUNCTION__, selfThread->m_index, selfStreamParms->svcBufIndex, res);
{
int i =0 ;
+ int ret = 0;
for (i=0;i<iMemoryNum;i++) {
if (buf->fd.extFd[i] != -1) {
if (buf->virt.extP[i] != (char *)MAP_FAILED) {
- ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
+ ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
+ if (ret < 0)
+ ALOGE("ERR(%s)", __FUNCTION__);
}
ion_free(buf->fd.extFd[i]);
}
free(cam_device);
g_camera_vaild = false;
}
+ if (g_camera2[0] != NULL) {
+ delete static_cast<ExynosCamera2 *>(g_camera2[0]);
+ g_camera2[0] = NULL;
+ }
+
+ if (g_camera2[1] != NULL) {
+ delete static_cast<ExynosCamera2 *>(g_camera2[1]);
+ g_camera2[1] = NULL;
+ }
+
ALOGV("%s: EXIT", __FUNCTION__);
return 0;
}
MetadataConverter::~MetadataConverter()
{
- ALOGV("DEBUG(%s):", __FUNCTION__);
+ ALOGE("DEBUG(%s)destroy!!:", __FUNCTION__);
return;
}
- case ANDROID_HOT_PIXEL_MODE:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
- break;
- dst->ctl.hotpixel.mode = (enum processing_mode)curr_entry.data.u8[0];
- break;
-
-
-
- case ANDROID_DEMOSAIC_MODE:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
- break;
- dst->ctl.demosaic.mode = (enum processing_mode)curr_entry.data.u8[0];
- break;
-
-
-
- case ANDROID_SHADING_MODE:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
- break;
- dst->ctl.shading.mode = (enum processing_mode)curr_entry.data.u8[0];
- break;
-
-
-
- case ANDROID_GEOMETRIC_MODE:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
- break;
- dst->ctl.geometric.mode = (enum processing_mode)curr_entry.data.u8[0];
- break;
-
-
-
- case ANDROID_COLOR_MODE:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
- break;
- dst->ctl.color.mode = (enum colorcorrection_mode)curr_entry.data.u8[0];
- break;
-
- case ANDROID_COLOR_TRANSFORM:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_FLOAT, 9))
- break;
- for (i=0 ; i<curr_entry.count ; i++)
- dst->ctl.color.transform[i] = curr_entry.data.f[i];
- break;
-
-
-
- case ANDROID_TONEMAP_MODE:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
- break;
- dst->ctl.tonemap.mode = (enum tonemap_mode)curr_entry.data.u8[0];
- break;
-
- case ANDROID_TONEMAP_CURVE_RED:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_FLOAT, 32))
- break;
- for (i=0 ; i<curr_entry.count ; i++)
- dst->ctl.tonemap.curveRed[i] = curr_entry.data.f[i];
- break;
-
- case ANDROID_TONEMAP_CURVE_GREEN:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_FLOAT, 32))
- break;
- for (i=0 ; i<curr_entry.count ; i++)
- dst->ctl.tonemap.curveGreen[i] = curr_entry.data.f[i];
- break;
-
- case ANDROID_TONEMAP_CURVE_BLUE:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_FLOAT, 32))
- break;
- for (i=0 ; i<curr_entry.count ; i++)
- dst->ctl.tonemap.curveBlue[i] = curr_entry.data.f[i];
- break;
-
-
-
-
- case ANDROID_SCALER_CROP_REGION:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_INT32, 3))
- break;
- for (i=0 ; i<curr_entry.count ; i++)
- dst->ctl.scaler.cropRegion[i] = curr_entry.data.i32[i];
- break;
-
case ANDROID_JPEG_QUALITY:
case ANDROID_STATS_FACE_DETECT_MODE:
if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
break;
- dst->ctl.stats.faceDetectMode = (enum facedetect_mode)curr_entry.data.u8[0];
- break;
-
- case ANDROID_STATS_HISTOGRAM_MODE:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
- break;
- dst->ctl.stats.histogramMode = (enum stats_mode)curr_entry.data.u8[0];
+ dst->ctl.stats.faceDetectMode = (enum facedetect_mode)(curr_entry.data.u8[0] + 1);
break;
- case ANDROID_STATS_SHARPNESS_MAP_MODE:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
- break;
- dst->ctl.stats.sharpnessMapMode = (enum stats_mode)curr_entry.data.u8[0];
- break;
-
-
-
case ANDROID_CONTROL_CAPTURE_INTENT:
if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
break;
dst->ctl.aa.aeExpCompensation = curr_entry.data.i32[0] + 5;
break;
-
- case ANDROID_CONTROL_AE_ANTIBANDING_MODE:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
- break;
- dst->ctl.aa.aeAntibandingMode = (enum aa_ae_antibanding_mode)curr_entry.data.u8[0];
- break;
-
case ANDROID_CONTROL_AWB_MODE:
if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
break;
&intData, 1))
return NO_MEMORY;
+ byteData = ANDROID_STATS_FACE_DETECTION_OFF;
+ if (0 != add_camera_metadata_entry(dst, ANDROID_STATS_FACE_DETECT_MODE,
+ &byteData, 1))
+ return NO_MEMORY;
ALOGV("(%s): AWB(%d) AE(%d) SCENE(%d) AEComp(%d)", __FUNCTION__,
metadata->dm.aa.awbMode - 1, metadata->dm.aa.aeMode - 1, metadata->ctl.aa.sceneMode - 1,