int cnt = 0;
uint32_t written = 0;
- ALOGD("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
+ ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
int fd = open(fname, O_RDWR | O_CREAT, 0644);
if (fd < 0) {
ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
return;
}
- ALOGD("writing %d bytes to file [%s]", size, fname);
+ ALOGV("writing %d bytes to file [%s]", size, fname);
while (written < size) {
nw = ::write(fd, buf + written, size - written);
if (nw < 0) {
written += nw;
cnt++;
}
- ALOGD("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
+ ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
::close(fd);
}
if (ret < 0)
ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
- node->streamOn = false;
return ret;
}
enum v4l2_buf_type type = node->type;
int ret;
- if (node->streamOn)
- return 0;
ret = exynos_v4l2_streamon(node->fd, type);
if (ret < 0)
- ALOGE("%s: VIDIOC_STREAMON failed (%d)",__FUNCTION__, ret);
- else
- node->streamOn = true;
+ ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
ALOGV("On streaming I/O... ... fd(%d)", node->fd);
int cam_int_streamoff(node_info_t *node)
{
- enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- int ret;
+ enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ int ret;
- if (!node->streamOn)
- return 0;
- ALOGV("Off streaming I/O... fd(%d)", node->fd);
- ret = exynos_v4l2_streamoff(node->fd, type);
+ ALOGV("Off streaming I/O... fd(%d)", node->fd);
+ ret = exynos_v4l2_streamoff(node->fd, type);
if (ret < 0)
ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
- else
- node->streamOn = false;
- return ret;
+ return ret;
}
int isp_int_streamoff(node_info_t *node)
{
- enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- int ret;
+ enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+ int ret;
- ALOGV("Off streaming I/O... fd(%d)", node->fd);
- ret = exynos_v4l2_streamoff(node->fd, type);
+ ALOGV("Off streaming I/O... fd(%d)", node->fd);
+ ret = exynos_v4l2_streamoff(node->fd, type);
if (ret < 0)
ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
- return ret;
+ return ret;
}
int cam_int_dqbuf(node_info_t *node)
RequestManager::RequestManager(SignalDrivenThread* main_thread):
m_numOfEntries(0),
- m_entryInsertionIndex(0),
- m_entryProcessingIndex(0),
- m_entryFrameOutputIndex(0)
+ m_entryInsertionIndex(-1),
+ m_entryProcessingIndex(-1),
+ m_entryFrameOutputIndex(-1),
+ m_frameIndex(-1)
{
m_metadataConverter = new MetadataConverter;
m_mainThread = main_thread;
memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
entries[i].internal_shot.shot.ctl.request.frameCount = -1;
}
- m_sensorPipelineSkipCnt = 8;
+ m_sensorPipelineSkipCnt = 0;
return;
}
}
newEntry->status = REGISTERED;
newEntry->original_request = new_request;
- // TODO : allocate internal_request dynamically
+ memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
newEntry->output_stream_count = newEntry->internal_shot.shot.ctl.request.outputStreams[15];
void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
{
ALOGV("DEBUG(%s):", __FUNCTION__);
- Mutex::Autolock lock(m_requestMutex);
+ int frame_index;
+ request_manager_entry * currentEntry;
- request_manager_entry * currentEntry = &(entries[m_entryFrameOutputIndex]);
+ Mutex::Autolock lock(m_requestMutex);
+ frame_index = GetFrameIndex();
+ currentEntry = &(entries[frame_index]);
if (currentEntry->status != CAPTURED) {
- ALOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
+ ALOGV("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
, m_entryProcessingIndex, m_entryFrameOutputIndex,(int)(currentEntry->status));
return;
}
currentEntry->output_stream_count = 0;
currentEntry->dynamic_meta_vaild = false;
m_numOfEntries--;
- // Dump();
ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
ALOGV("DEBUG(%s):", __FUNCTION__);
Mutex::Autolock lock(m_requestMutex);
status_t res = NO_ERROR;
- int tempFrameOutputIndex = GetNextIndex(m_entryFrameOutputIndex);
+ int tempFrameOutputIndex = GetFrameIndex();
request_manager_entry * currentEntry = &(entries[tempFrameOutputIndex]);
ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
- if (m_completedIndex != tempFrameOutputIndex) {
- ALOGV("DEBUG(%s): frame left behind : completed(%d), preparing(%d)", __FUNCTION__, m_completedIndex,tempFrameOutputIndex);
-
- request_manager_entry * currentEntry2 = &(entries[tempFrameOutputIndex]);
- currentEntry2->status = EMPTY;
- currentEntry2->original_request = NULL;
- memset(&(currentEntry2->internal_shot), 0, sizeof(struct camera2_shot_ext));
- currentEntry2->internal_shot.shot.ctl.request.frameCount = -1;
- currentEntry2->output_stream_count = 0;
- currentEntry2->dynamic_meta_vaild = false;
- m_numOfEntries--;
- // Dump();
- tempFrameOutputIndex = m_completedIndex;
- currentEntry = &(entries[tempFrameOutputIndex]);
- }
-
- if (currentEntry->output_stream_count!=0) {
- ALOGD("DBG(%s): Circular buffer has remaining output : stream_count(%d)", __FUNCTION__, currentEntry->output_stream_count);
- return false;
- }
-
if (currentEntry->status != CAPTURED) {
- ALOGD("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
+ ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
return false;
}
int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
{
- ALOGV("DEBUG(%s):", __FUNCTION__);
+
Mutex::Autolock lock(m_requestMutex);
struct camera2_shot_ext * shot_ext;
struct camera2_shot_ext * request_shot;
int targetStreamIndex = 0;
+ request_manager_entry * newEntry = NULL;
if (m_numOfEntries == 0) {
- ALOGV("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
+ ALOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
return -1;
}
if ((m_entryProcessingIndex == m_entryInsertionIndex)
&& (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
- ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)",
+ ALOGD("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)",
m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
return -1;
}
- request_manager_entry * newEntry = NULL;
int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
+ ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
newEntry = &(entries[newProcessingIndex]);
- request_shot = &newEntry->internal_shot;
+ request_shot = &(newEntry->internal_shot);
if (newEntry->status != REGISTERED) {
- ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
+ ALOGD("DEBUG(%s)(%d): Circular buffer abnormal ", __FUNCTION__, newProcessingIndex);
return -1;
}
+
newEntry->status = REQUESTED;
- shot_ext = (struct camera2_shot_ext *)(buf->virt.extP[1]);
- ALOGV("DEBUG(%s):Writing the info of Framecnt(%d)", __FUNCTION__, request_shot->shot.ctl.request.frameCount);
- memcpy(shot_ext, &newEntry->internal_shot, sizeof(struct camera2_shot_ext));
+ shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
+ memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
+ shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
shot_ext->request_sensor = 1;
shot_ext->dis_bypass = 1;
shot_ext->dnr_bypass = 1;
+ shot_ext->fd_bypass = 1;
+ shot_ext->setfile = 0;
+
for (int i = 0; i < newEntry->output_stream_count; i++) {
targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
if (targetStreamIndex==0) {
ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
shot_ext->request_scp = 1;
- shot_ext->shot.ctl.request.outputStreams[0] = 1;
}
else if (targetStreamIndex == 1) {
ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
shot_ext->request_scc = 1;
- shot_ext->shot.ctl.request.outputStreams[1] = 1;
}
else if (targetStreamIndex == 2) {
ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
}
}
- if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_ON) {
- ALOGV("(%s): AE_ON => ignoring some params", __FUNCTION__);
- shot_ext->shot.ctl.sensor.exposureTime = 0;
- shot_ext->shot.ctl.sensor.sensitivity = 0;
- shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
- // TODO : check frameDuration
- }
- m_entryProcessingIndex = newProcessingIndex;
- // Dump();
- ALOGV("## MarkProcReq DONE totalentry(%d), insert(%d), processing(%d), frame(%d) frameCnt(%d)",
- m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
+ shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
+ shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
+ shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
+ shot_ext->shot.magicNumber = 0x23456789;
+ shot_ext->shot.ctl.sensor.exposureTime = 0;
+ shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
+ shot_ext->shot.ctl.sensor.sensitivity = 0;
+
+ shot_ext->shot.ctl.scaler.cropRegion[0] = 0;
+ shot_ext->shot.ctl.scaler.cropRegion[1] = 0;
+ shot_ext->shot.ctl.scaler.cropRegion[2] = m_cropX;
- return m_entryProcessingIndex;
+ m_entryProcessingIndex = newProcessingIndex;
+ return newProcessingIndex;
}
void RequestManager::NotifyStreamOutput(int frameCnt, int stream_id)
void RequestManager::CheckCompleted(int index)
{
ALOGV("DEBUG(%s): reqIndex(%d) current Count(%d)", __FUNCTION__, index, entries[index].output_stream_count);
- if (entries[index].output_stream_count == 0 && entries[index].dynamic_meta_vaild) {
- ALOGV("DEBUG(%s): index[%d] completed and sending SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__, index);
- // Dump();
- m_completedIndex = index;
- m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
- }
+ SetFrameIndex(index);
+ m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
return;
}
-void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext, int frameCnt)
+void RequestManager::SetFrameIndex(int index)
+{
+ Mutex::Autolock lock(m_requestMutex);
+ m_frameIndex = index;
+}
+
+int RequestManager::GetFrameIndex()
+{
+ return m_frameIndex;
+}
+
+void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
{
int index;
struct camera2_shot_ext * request_shot;
nsecs_t timeStamp;
+ int i;
- ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
+ ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
- index = FindEntryIndexByFrameCnt(frameCnt);
- if (index == -1) {
- ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
+ for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
+ if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
+ && (entries[i].status == CAPTURED))
+ break;
+ }
+
+ if (i == NUM_MAX_REQUEST_MGR_ENTRY){
+ ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
return;
}
- request_manager_entry * newEntry = &(entries[index]);
+ request_manager_entry * newEntry = &(entries[i]);
request_shot = &(newEntry->internal_shot);
newEntry->dynamic_meta_vaild = true;
timeStamp = request_shot->shot.dm.sensor.timeStamp;
- memcpy(&request_shot->shot.dm, &shot_ext->shot.dm, sizeof(struct camera2_dm));
+ memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
request_shot->shot.dm.sensor.timeStamp = timeStamp;
- CheckCompleted(index);
+ CheckCompleted(i);
}
void RequestManager::DumpInfoWithIndex(int index)
}
request_manager_entry * newEntry = &(entries[index]);
- request_shot = &newEntry->internal_shot;
+ request_shot = &(newEntry->internal_shot);
shot_ext->request_sensor = 1;
+ shot_ext->dis_bypass = 1;
+ shot_ext->dnr_bypass = 1;
+ shot_ext->fd_bypass = 1;
+ shot_ext->setfile = 0;
+
shot_ext->request_scc = 0;
shot_ext->request_scp = 0;
+
shot_ext->shot.ctl.request.outputStreams[0] = 0;
shot_ext->shot.ctl.request.outputStreams[1] = 0;
shot_ext->shot.ctl.request.outputStreams[2] = 0;
- memcpy(&shot_ext->shot.ctl, &request_shot->shot.ctl, sizeof(struct camera2_ctl));
+
for (int i = 0; i < newEntry->output_stream_count; i++) {
- // TODO : match with actual stream index;
- targetStreamIndex = request_shot->shot.ctl.request.outputStreams[i];
+ targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
if (targetStreamIndex==0) {
- ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerP", __FUNCTION__, i);
+ ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
shot_ext->request_scp = 1;
- shot_ext->shot.ctl.request.outputStreams[0] = 1;
}
else if (targetStreamIndex == 1) {
- ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerC", __FUNCTION__, i);
+ ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
shot_ext->request_scc = 1;
- shot_ext->shot.ctl.request.outputStreams[1] = 1;
}
else if (targetStreamIndex == 2) {
- ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerP (record)", __FUNCTION__, i);
+ ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
shot_ext->request_scp = 1;
shot_ext->shot.ctl.request.outputStreams[2] = 1;
}
else {
- ALOGV("DEBUG(%s): outputstreams item[%d] has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
+ ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
}
}
+
}
int RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
}
-uint64_t RequestManager::GetTimestamp(int frameCnt)
+uint64_t RequestManager::GetTimestamp(int index)
{
- int index = FindEntryIndexByFrameCnt(frameCnt);
+
if (index == -1) {
- ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
+ ALOGE("ERR(%s): Cannot find entry ", __FUNCTION__);
return 0;
}
int RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
{
- int tempIndex, i;
- if (m_sensorPipelineSkipCnt > 0) {
- m_sensorPipelineSkipCnt--;
- return -1;
- }
+ int i;
+
if (m_numOfEntries == 0) {
- ALOGD("(%s): No Entry found", __FUNCTION__);
+ ALOGV("(%s): No Entry found", __FUNCTION__);
return -1;
}
- tempIndex = GetNextIndex(m_entryFrameOutputIndex);
+
for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
- if (entries[tempIndex].status == REQUESTED) {
- entries[tempIndex].status = CAPTURED;
- return entries[tempIndex].internal_shot.shot.ctl.request.frameCount;
- }
- else if (entries[tempIndex].status == CAPTURED) {
- tempIndex = GetNextIndex(tempIndex);
+ if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
continue;
+
+ if (entries[i].status == REQUESTED) {
+ entries[i].status = CAPTURED;
+ return entries[i].internal_shot.shot.ctl.request.frameCount;
}
- else {
- ALOGE("(%s): enry state abnormal status(%d)", __FUNCTION__, entries[tempIndex].status);
- Dump();
- return -1;
- }
+
}
+
+ ALOGD("(%s): No Entry found", __FUNCTION__);
+
return -1;
}
m_sensorPipelineSkipCnt = count;
}
+int RequestManager::GetSkipCnt()
+{
+ ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
+ if (m_sensorPipelineSkipCnt == 0)
+ return m_sensorPipelineSkipCnt;
+ else
+ return --m_sensorPipelineSkipCnt;
+}
+
void RequestManager::Dump(void)
{
int i = 0;
request_manager_entry * currentEntry;
- ALOGV("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)",
+ ALOGD("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)",
m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
currentEntry = &(entries[i]);
- ALOGV("[%2d] status[%d] frameCnt[%3d] numOutput[%d]", i,
+ ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%d outstream[1]-%d", i,
currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
- currentEntry->output_stream_count);
+ currentEntry->output_stream_count,
+ currentEntry->internal_shot.shot.ctl.request.outputStreams[0],
+ currentEntry->internal_shot.shot.ctl.request.outputStreams[1]);
}
}
m_isRequestQueueNull(true),
m_isSensorThreadOn(false),
m_isSensorStarted(false),
+ m_isIspStarted(false),
m_ionCameraClient(0),
m_initFlag1(false),
m_initFlag2(false),
m_scp_closing(false),
m_scp_closed(false),
m_halDevice(dev),
+ m_need_streamoff(0),
m_cameraId(cameraId)
{
ALOGV("DEBUG(%s):", __FUNCTION__);
m_BayerManager = new BayerBufManager();
m_mainThread = new MainThread(this);
+ InitializeISPChain();
m_sensorThread = new SensorThread(this);
- m_ispThread = new IspThread(this);
m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
ALOGV("DEBUG(%s): created sensorthread ################", __FUNCTION__);
- usleep(1600000);
m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
CSC_METHOD cscMethod = CSC_METHOD_HW;
ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
{
- ALOGD("%s: ENTER", __FUNCTION__);
+ ALOGV("%s: ENTER", __FUNCTION__);
this->release();
- ALOGD("%s: EXIT", __FUNCTION__);
+ ALOGV("%s: EXIT", __FUNCTION__);
}
void ExynosCameraHWInterface2::release()
ALOGD("%s: ENTER", __func__);
m_closing = true;
- while (!m_scp_closed)
- usleep(1000);
+ if (m_streamThreads[1] != NULL) {
+ m_streamThreads[1]->release();
+ m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
+ }
+
+ if (m_streamThreads[0] != NULL) {
+ m_streamThreads[0]->release();
+ m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
+ }
+
if (m_ispThread != NULL) {
m_ispThread->release();
}
m_mainThread->release();
}
- if (m_streamThreads[0] != NULL) {
- m_streamThreads[0]->release();
- m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
- }
-
- if (m_streamThreads[1] != NULL) {
- m_streamThreads[1]->release();
- m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
- }
-
-
if (m_exynosPictureCSC)
csc_deinit(m_exynosPictureCSC);
m_exynosPictureCSC = NULL;
csc_deinit(m_exynosVideoCSC);
m_exynosVideoCSC = NULL;
+ if (m_streamThreads[1] != NULL) {
+ while (!m_streamThreads[1]->IsTerminated())
+ {
+ ALOGD("Waiting for ISP thread is tetminated");
+ usleep(100000);
+ }
+ m_streamThreads[1] = NULL;
+ }
+
+ if (m_streamThreads[0] != NULL) {
+ while (!m_streamThreads[0]->IsTerminated())
+ {
+ ALOGD("Waiting for sensor thread is tetminated");
+ usleep(100000);
+ }
+ m_streamThreads[0] = NULL;
+ }
+
if (m_ispThread != NULL) {
while (!m_ispThread->IsTerminated())
- usleep(1000);
+ {
+ ALOGD("Waiting for isp thread is tetminated");
+ usleep(100000);
+ }
m_ispThread = NULL;
}
if (m_sensorThread != NULL) {
while (!m_sensorThread->IsTerminated())
- usleep(1000);
+ {
+ ALOGD("Waiting for sensor thread is tetminated");
+ usleep(100000);
+ }
m_sensorThread = NULL;
}
- if (m_mainThread != NULL) {
+ if (m_mainThread != NULL) {
while (!m_mainThread->IsTerminated())
- usleep(1000);
+ {
+ ALOGD("Waiting for main thread is tetminated");
+ usleep(100000);
+ }
m_mainThread = NULL;
}
- if (m_streamThreads[0] != NULL) {
- while (!m_streamThreads[0]->IsTerminated())
- usleep(1000);
- m_streamThreads[0] = NULL;
- }
-
- if (m_streamThreads[1] != NULL) {
- while (!m_streamThreads[1]->IsTerminated())
- usleep(1000);
- m_streamThreads[1] = NULL;
- }
-
for(i = 0; i < m_camera_info.sensor.buffers; i++)
freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
}
ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
- res = exynos_v4l2_close(m_fd_scp);
+ res = exynos_v4l2_close(m_fd_scp);
if (res != NO_ERROR ) {
ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
}
ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
deleteIonClient(m_ionCameraClient);
-
- ALOGD("%s: EXIT", __func__);
+
+ ALOGV("%s: EXIT", __func__);
+}
+
+void ExynosCameraHWInterface2::InitializeISPChain()
+{
+ char node_name[30];
+ int fd = 0;
+ int i;
+
+ /* Open Sensor */
+ memset(&node_name, 0x00, sizeof(char[30]));
+ sprintf(node_name, "%s%d", NODE_PREFIX, 40);
+ fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+
+ if (fd < 0) {
+ ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+ }
+ else {
+ ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+ }
+ m_camera_info.sensor.fd = fd;
+
+ /* Open ISP */
+ memset(&node_name, 0x00, sizeof(char[30]));
+ sprintf(node_name, "%s%d", NODE_PREFIX, 41);
+ fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+
+ if (fd < 0) {
+ ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+ }
+ else {
+ ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+ }
+ m_camera_info.isp.fd = fd;
+
+ /* Open ScalerC */
+ memset(&node_name, 0x00, sizeof(char[30]));
+ sprintf(node_name, "%s%d", NODE_PREFIX, 42);
+ fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+
+ if (fd < 0) {
+ ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+ }
+ else {
+ ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+ }
+ m_camera_info.capture.fd = fd;
+
+ /* Open ScalerP */
+ memset(&node_name, 0x00, sizeof(char[30]));
+ sprintf(node_name, "%s%d", NODE_PREFIX, 44);
+ fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+ if (fd < 0) {
+ ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
+ }
+ else {
+ ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
+ }
+ m_fd_scp = fd;
+
+ if(m_cameraId == 0)
+ m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
+ else
+ m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
+
+ memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
+ m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
+ m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
+
+ m_camera_info.dummy_shot.dis_bypass = 1;
+ m_camera_info.dummy_shot.dnr_bypass = 1;
+ m_camera_info.dummy_shot.fd_bypass = 1;
+
+ /*sensor setting*/
+ m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
+ m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
+ m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
+
+ m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
+ m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
+
+ /*request setting*/
+ m_camera_info.dummy_shot.request_sensor = 1;
+ m_camera_info.dummy_shot.request_scc = 0;
+ m_camera_info.dummy_shot.request_scp = 0;
+ m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
+ m_camera_info.dummy_shot.shot.ctl.request.outputStreams[1] = 0;
+ m_camera_info.dummy_shot.shot.ctl.request.outputStreams[2] = 0;
+
+ m_camera_info.sensor.width = m_camera2->getSensorRawW();
+ m_camera_info.sensor.height = m_camera2->getSensorRawH();
+
+ m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
+ m_camera_info.sensor.planes = 2;
+ m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
+ m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
+ m_camera_info.sensor.ionClient = m_ionCameraClient;
+
+ for(i = 0; i < m_camera_info.sensor.buffers; i++){
+ initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
+ m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
+ m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
+ allocCameraMemory(m_camera_info.sensor.ionClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
+ }
+
+ m_camera_info.isp.width = m_camera_info.sensor.width;
+ m_camera_info.isp.height = m_camera_info.sensor.height;
+ m_camera_info.isp.format = m_camera_info.sensor.format;
+ m_camera_info.isp.planes = m_camera_info.sensor.planes;
+ m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
+ m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
+ m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
+ m_camera_info.isp.ionClient = m_ionCameraClient;
+
+ for(i = 0; i < m_camera_info.isp.buffers; i++){
+ initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
+ m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0];
+ m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1];
+ m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0];
+ m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1];
+ m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0];
+ m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1];
+ };
+
+ /* init ISP */
+ cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
+ cam_int_s_fmt(&(m_camera_info.isp));
+ ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
+ cam_int_reqbufs(&(m_camera_info.isp));
+ ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
+ ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__);
+
+ /* init Sensor */
+ cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
+ ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__);
+ if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
+ ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__);
+ }
+ ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__);
+ cam_int_reqbufs(&(m_camera_info.sensor));
+ ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__);
+ for (i = 0; i < m_camera_info.sensor.buffers; i++) {
+ ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
+ memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
+ sizeof(struct camera2_shot_ext));
+ m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
+ m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
+ cam_int_qbuf(&(m_camera_info.sensor), i);
+ }
+ ALOGV("== stream_on :: .sensor");
+ cam_int_streamon(&(m_camera_info.sensor));
+
+ /* init Capture */
+ m_camera_info.capture.width = m_camera2->getSensorW();
+ m_camera_info.capture.height = m_camera2->getSensorH();
+ m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
+ m_camera_info.capture.planes = 1;
+ m_camera_info.capture.buffers = 8;
+ m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
+ m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
+ m_camera_info.capture.ionClient = m_ionCameraClient;
+
+ for(i = 0; i < m_camera_info.capture.buffers; i++){
+ initCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
+ m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
+ allocCameraMemory(m_camera_info.capture.ionClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
+ }
+
+ cam_int_s_input(&(m_camera_info.capture), m_camera_info.sensor_id);
+ cam_int_s_fmt(&(m_camera_info.capture));
+ ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
+ cam_int_reqbufs(&(m_camera_info.capture));
+ ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
+
+ for (i = 0; i < m_camera_info.capture.buffers; i++) {
+ ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
+ cam_int_qbuf(&(m_camera_info.capture), i);
+ }
+
+ ALOGV("== stream_on :: capture");
+ cam_int_streamon(&(m_camera_info.capture));
+}
+
+void ExynosCameraHWInterface2::StartISP()
+{
+ int i;
+
+ for (i = 0; i < m_camera_info.isp.buffers; i++) {
+ ALOGV("DEBUG(%s): isp initial QBUF [%d]", __FUNCTION__, i);
+ cam_int_qbuf(&(m_camera_info.isp), i);
+ }
+
+ ALOGV("== stream_on :: isp");
+ cam_int_streamon(&(m_camera_info.isp));
+
+ for (i = 0; i < m_camera_info.isp.buffers; i++) {
+ ALOGV("DEBUG(%s): isp initial DQBUF [%d]", __FUNCTION__, i);
+ cam_int_dqbuf(&(m_camera_info.isp));
+ }
+ exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
}
int ExynosCameraHWInterface2::getCameraId() const
return 0;
}
m_isRequestQueueNull = false;
- if (m_requestManager->GetNumEntries() == 0)
- m_requestManager->SetInitialSkip(5);
m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
return 0;
}
int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
{
- ALOGD("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__, width, height, format);
+ ALOGV("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__, width, height, format);
char node_name[30];
int fd = 0, allocCase = 0;
StreamThread *AllocatedStream;
if (allocCase == 0) {
m_streamThreads[0] = new StreamThread(this, *stream_id);
-
-
- memset(&node_name, 0x00, sizeof(char[30]));
- sprintf(node_name, "%s%d", NODE_PREFIX, 44);
- fd = exynos_v4l2_open(node_name, O_RDWR, 0);
- if (fd < 0) {
- ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
- }
- else {
- ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
- }
- m_fd_scp = fd;
}
AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
m_scp_flushing = false;
m_scp_flushing = false;
m_scp_closing = false;
m_scp_closed = false;
- m_requestManager->SetDefaultParameters(width);
- m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = width;
+ m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
+ m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
return 0;
}
else if (allocCase == 1) {
return 0;
}
}
- else if (format == HAL_PIXEL_FORMAT_BLOB
+ else if (format == HAL_PIXEL_FORMAT_BLOB
&& m_camera2->isSupportedJpegResolution(width, height)) {
*stream_id = 1;
currentNode->ionClient = targetStreamParms->ionClient;
if (targetStreamParms->streamType == STREAM_TYPE_DIRECT) {
+ if(m_need_streamoff == 1) {
+ ALOGV("(%s): calling capture streamoff", __FUNCTION__);
+ cam_int_streamoff(&(m_camera_info.capture));
+ ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
+
+ m_camera_info.capture.buffers = 0;
+ ALOGV("DEBUG(%s): capture calling reqbuf 0 ", __FUNCTION__);
+ cam_int_reqbufs(&(m_camera_info.capture));
+ ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
+
+ if (m_sensorThread != NULL) {
+ m_sensorThread->release();
+ usleep(500000);
+ } else {
+ ALOGE("+++++++ sensor thread is NULL %d", __LINE__);
+ }
+ m_isIspStarted = false;
+ }
+
cam_int_s_input(currentNode, m_camera_info.sensor_id);
cam_int_s_fmt(currentNode);
cam_int_reqbufs(currentNode);
+
+ if (m_need_streamoff == 1) {
+ m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
+ m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
+ m_camera_info.capture.buffers = 8;
+ cam_int_reqbufs(&(m_camera_info.isp));
+ cam_int_reqbufs(&(m_camera_info.sensor));
+
+ for (i = 0; i < 8; i++) {
+ ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
+ memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
+ sizeof(struct camera2_shot_ext));
+ m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
+ m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
+ cam_int_qbuf(&(m_camera_info.sensor), i);
+ }
+
+ cam_int_reqbufs(&(m_camera_info.capture));
+ cam_int_streamon(&(m_camera_info.capture));
+ cam_int_streamon(&(m_camera_info.sensor));
+
+ m_need_streamoff = 0;
+ m_requestManager->SetInitialSkip(2);
+ m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
+ m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
+ }
}
else if (targetStreamParms->streamType == STREAM_TYPE_INDIRECT) {
for(i = 0; i < currentNode->buffers; i++){
}
}
}
+
ALOGV("DEBUG(%s): calling streamon", __FUNCTION__);
- cam_int_streamon(&(targetStreamParms->node));
- ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__);
+ if (targetStreamParms->streamType == 0) {
+ ALOGD("%s(%d), stream id = %d", __FUNCTION__, __LINE__, stream_id);
+ cam_int_streamon(&(targetStreamParms->node));
+ }
+
+ ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__);
ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
+
+ if(!m_isIspStarted) {
+ m_isIspStarted = true;
+ StartISP();
+ m_need_streamoff = 1;
+ }
+
return 0;
}
}
else if (stream_id == 2 && m_recordingEnabled) {
m_recordingEnabled = false;
+ m_needsRecordBufferInit = true;
return 0;
}
else {
targetStream->m_releasing = true;
targetStream->release();
while (targetStream->m_releasing)
+ {
+ ALOGD("stream thread release fail %d", __LINE__);
usleep(2000);
+ }
targetStream->m_activated = false;
ALOGV("DEBUG(%s): DONE", __FUNCTION__);
return 0;
{
ALOGV("DEBUG(%s) : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
- // sanity check
- if (index != sensorDequeueHead) {
- ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorDequeueHead);
- return -1;
- }
if (entries[index].status != BAYER_ON_SENSOR) {
- ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
+ ALOGE("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
index, entries[index].status, BAYER_ON_SENSOR);
return -1;
}
entries[index].status = BAYER_ON_HAL_FILLED;
- entries[index].reqFrameCnt = reqFrameCnt;
- entries[index].timeStamp = *timeStamp;
numOnHalFilled++;
numOnSensor--;
- sensorDequeueHead = GetNextIndex(index);
- ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
- __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
+
return 0;
}
MainThread * selfThread = ((MainThread*)self);
int res = 0;
+ int ret;
+
ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
if (currentSignal & SIGNAL_THREAD_RELEASE) {
if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
- if (m_requestManager->IsRequestQueueFull()==false
- && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST) {
+ if (m_requestManager->IsRequestQueueFull()==false) {
m_requestQueueOps->dequeue_request(m_requestQueueOps, ¤tRequest);
if (NULL == currentRequest) {
- ALOGV("DEBUG(%s): dequeue_request returned NULL ", __FUNCTION__);
+ ALOGE("DEBUG(%s)(0x%x): dequeue_request returned NULL ", __FUNCTION__, currentSignal);
m_isRequestQueueNull = true;
}
else {
m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
- if (m_requestManager->IsRequestQueueFull()==false
- && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST)
+ if (m_requestManager->IsRequestQueueFull()==false)
selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
/*while (1)*/ {
- m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame);
+ ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame);
+ if (ret == false)
+ ALOGD("++++++ PrepareFrame ret = %d", ret);
+
m_requestManager->DeregisterRequest(&deregisteredRequest);
- m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
- m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, ¤tFrame);
+
+ ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
+ if (ret < 0)
+ ALOGD("++++++ free_request ret = %d", ret);
+
+ ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, ¤tFrame);
+ if (ret < 0)
+ ALOGD("++++++ dequeue_frame ret = %d", ret);
+
if (currentFrame==NULL) {
- ALOGD("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
+ ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
}
else {
ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
if (res==0) {
ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
- }
- else {
- ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
- }
- }
- if (!m_isRequestQueueNull) {
- selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
- }
-
- if (getInProgressCount()>0) {
- ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
- m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
- }
- }
- ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
- return;
-}
-
-void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
-{
- ALOGV("DEBUG(%s): ", __FUNCTION__ );
- SensorThread * selfThread = ((SensorThread*)self);
- char node_name[30];
- int fd = 0;
- int i =0, j=0;
-
- if(m_cameraId == 0)
- m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
- else
- m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
-
- memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
- m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
- m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
-
- m_camera_info.dummy_shot.dis_bypass = 1;
- m_camera_info.dummy_shot.dnr_bypass = 1;
-
- /*sensor setting*/
- m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
- m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
- m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
-
- m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
- m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
- //m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = 1920;
-
- /*request setting*/
- m_camera_info.dummy_shot.request_sensor = 1;
- m_camera_info.dummy_shot.request_scc = 0;
- m_camera_info.dummy_shot.request_scp = 0;
- m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
- m_camera_info.dummy_shot.shot.ctl.request.outputStreams[1] = 0;
- m_camera_info.dummy_shot.shot.ctl.request.outputStreams[2] = 0;
-
- /*sensor init*/
- memset(&node_name, 0x00, sizeof(char[30]));
- sprintf(node_name, "%s%d", NODE_PREFIX, 40);
- fd = exynos_v4l2_open(node_name, O_RDWR, 0);
-
- if (fd < 0) {
- ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
- }
- else {
- ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
- }
- m_camera_info.sensor.fd = fd;
-
- m_camera_info.sensor.width = m_camera2->getSensorRawW();
- m_camera_info.sensor.height = m_camera2->getSensorRawH();
-
- m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
- m_camera_info.sensor.planes = 2;
- m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
- m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
- m_camera_info.sensor.ionClient = m_ionCameraClient;
+ }
+ else {
+ ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
+ }
+ }
+ if (!m_isRequestQueueNull) {
+ selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
+ }
- for(i = 0; i < m_camera_info.sensor.buffers; i++){
- initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
- m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
- m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
- allocCameraMemory(m_camera_info.sensor.ionClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
+ if (getInProgressCount()>0) {
+ ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
+ m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
+ }
}
+ ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
+ return;
+}
- m_initFlag1 = true;
-
-
- while (!m_initFlag2) // temp
- usleep(100000);
- ALOGV("DEBUG(%s): END of SensorThreadInitialize ", __FUNCTION__);
+void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
+{
+ ALOGV("DEBUG(%s): ", __FUNCTION__ );
+ /* will add */
return;
}
void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
{
- ALOGV("#### common Section");
- ALOGV("#### magic(%x) ",
+ ALOGD("#### common Section");
+ ALOGD("#### magic(%x) ",
shot_ext->shot.magicNumber);
- ALOGV("#### ctl Section");
- ALOGV("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
+ ALOGD("#### ctl Section");
+ ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
shot_ext->shot.ctl.request.metadataMode,
shot_ext->shot.ctl.lens.aperture,
shot_ext->shot.ctl.sensor.exposureTime,
shot_ext->shot.ctl.sensor.sensitivity,
shot_ext->shot.ctl.aa.awbMode);
- ALOGV("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) pv(%d) rec(%d)",
+ ALOGD("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) pv(%d) rec(%d)",
shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
shot_ext->shot.ctl.request.outputStreams[0],
shot_ext->shot.ctl.request.outputStreams[2]);
- ALOGV("#### DM Section");
- ALOGV("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
+ ALOGD("#### DM Section");
+ ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
shot_ext->shot.dm.request.metadataMode,
shot_ext->shot.dm.lens.aperture,
shot_ext->shot.dm.sensor.exposureTime,
uint32_t currentSignal = self->GetProcessingSignal();
SensorThread * selfThread = ((SensorThread*)self);
int index;
+ int index_isp;
status_t res;
nsecs_t frameTime;
int bayersOnSensor = 0, bayersOnIsp = 0;
+ int j = 0;
+ bool isCapture = false;
ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
if (currentSignal & SIGNAL_THREAD_RELEASE) {
- ALOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
+ ALOGV("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
cam_int_streamoff(&(m_camera_info.sensor));
ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
cam_int_reqbufs(&(m_camera_info.sensor));
ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
-
+
ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
isp_int_streamoff(&(m_camera_info.isp));
ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
-
+
m_camera_info.isp.buffers = 0;
ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
cam_int_reqbufs(&(m_camera_info.isp));
exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
- ALOGD("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
+ ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
return;
}
{
ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
int targetStreamIndex = 0, i=0;
- int matchedFrameCnt, processingReqIndex;
+ int matchedFrameCnt = -1, processingReqIndex;
struct camera2_shot_ext *shot_ext;
- if (!m_isSensorStarted)
- {
- m_isSensorStarted = true;
- ALOGD("(%s): calling preview streamon", __FUNCTION__);
- cam_int_streamon(&(m_streamThreads[0]->m_parameters.node));
- ALOGD("(%s): calling isp streamon done", __FUNCTION__);
- for (i = 0; i < m_camera_info.isp.buffers; i++) {
- ALOGV("DEBUG(%s): isp initial QBUF [%d]", __FUNCTION__, i);
- cam_int_qbuf(&(m_camera_info.isp), i);
- }
-
- cam_int_streamon(&(m_camera_info.isp));
-
- for (i = 0; i < m_camera_info.isp.buffers; i++) {
- ALOGV("DEBUG(%s): isp initial DQBUF [%d]", __FUNCTION__, i);
- cam_int_dqbuf(&(m_camera_info.isp));
- }
+ struct camera2_shot_ext *shot_ext_capture;
- ALOGV("DEBUG(%s): calling isp sctrl done", __FUNCTION__);
- exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
- ALOGV("DEBUG(%s): calling sensor sctrl done", __FUNCTION__);
-
- }
-
- ALOGV("### Sensor DQBUF start");
+ /* dqbuf from sensor */
+ ALOGV("Sensor DQbuf start");
index = cam_int_dqbuf(&(m_camera_info.sensor));
- frameTime = systemTime();
- ALOGV("### Sensor DQBUF done BayerIndex(%d)", index);
shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
+
+ m_recordOutput = shot_ext->shot.ctl.request.outputStreams[2];
+
matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
- ALOGV("### Matched(%d) last(%d), dqbuf timestamp(%lld)", matchedFrameCnt, lastFrameCnt
- , shot_ext->shot.dm.sensor.timeStamp);
+
if (matchedFrameCnt != -1) {
- while (matchedFrameCnt == lastFrameCnt) {
- m_BayerManager->MarkSensorDequeue(index, -1, &frameTime);
- ALOGV("### Sensor DQBUF start");
- index = cam_int_dqbuf(&(m_camera_info.sensor));
frameTime = systemTime();
- ALOGV("### Sensor DQBUF done BayerIndex(%d)", index);
- shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
- matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
- ALOGV("### Matched(%d) last(%d)", matchedFrameCnt, lastFrameCnt);
- }
- lastFrameCnt = matchedFrameCnt;
- m_scp_closing = false;
- m_scp_closed = false;
- }
- m_BayerManager->MarkSensorDequeue(index, matchedFrameCnt, &frameTime);
-
m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
- ALOGV("### Sensor DQed BayerIndex[%d] passing to ISP. frameCnt(%d) timestamp(%lld)",
- index, matchedFrameCnt, frameTime);
-
- if (!(m_ispThread.get()))
- return;
-
- m_ispThread->SetSignal(SIGNAL_ISP_START_BAYER_INPUT);
-
- while (m_BayerManager->GetNumOnSensor() <= NUM_SENSOR_QBUF) {
-
- index = m_BayerManager->GetIndexForSensorEnqueue();
- if (index == -1) {
- ALOGE("ERR(%s) No free Bayer buffer", __FUNCTION__);
- break;
- }
- processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
-
- shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
- if (processingReqIndex == -1) {
- ALOGV("DEBUG(%s) req underrun => inserting bubble to BayerIndex(%d)", __FUNCTION__, index);
- memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(struct camera2_shot_ext));
+ m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt);
+ ALOGD("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
+ index,
+ shot_ext->shot.ctl.request.frameCount,
+ shot_ext->request_scp,
+ shot_ext->request_scc,
+ shot_ext->dis_bypass, sizeof(camera2_shot));
+
+ if(shot_ext->request_scc == 1) {
+ isCapture = true;
}
- m_BayerManager->MarkSensorEnqueue(index);
- if (m_scp_closing || m_scp_closed) {
- ALOGV("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
- shot_ext->request_scc = 0;
- shot_ext->request_scp = 0;
- shot_ext->request_sensor = 0;
+ if(isCapture)
+ {
+ for(j = 0; j < m_camera_info.isp.buffers; j++)
+ {
+ shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
+ shot_ext_capture->request_scc = 1;
+ }
}
- ALOGV("### Sensor QBUF start BayerIndex[%d]", index);
- cam_int_qbuf(&(m_camera_info.sensor), index);
- ALOGV("### Sensor QBUF done");
- }
- if (!m_closing){
- selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
- }
- return;
- }
- return;
-}
-void ExynosCameraHWInterface2::m_ispThreadInitialize(SignalDrivenThread * self)
-{
- ALOGV("DEBUG(%s): ", __FUNCTION__ );
- IspThread * selfThread = ((IspThread*)self);
- char node_name[30];
- int fd = 0;
- int i =0, j=0;
+ cam_int_qbuf(&(m_camera_info.isp), index);
+ //m_ispThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
+ usleep(10000);
+ if(isCapture)
+ {
+ for(j = 0; j < m_camera_info.isp.buffers; j++)
+ {
+ shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
+ ALOGD("shot_ext_capture[%d] scp = %d, scc = %d", j, shot_ext_capture->request_scp, shot_ext_capture->request_scc);
+// DumpInfoWithShot(shot_ext_capture);
+ }
+ }
- while (!m_initFlag1) //temp
- usleep(100000);
- /*isp init*/
- memset(&node_name, 0x00, sizeof(char[30]));
- sprintf(node_name, "%s%d", NODE_PREFIX, 41);
- fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+ ALOGV("### isp DQBUF start");
+ index_isp = cam_int_dqbuf(&(m_camera_info.isp));
+ //m_previewOutput = 0;
- if (fd < 0) {
- ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
- }
- else {
- ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
- }
- m_camera_info.isp.fd = fd;
+ if(isCapture)
+ {
+ for(j = 0; j < m_camera_info.isp.buffers; j++)
+ {
+ shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
+ ALOGD("shot_ext_capture[%d] scp = %d, scc = %d", j, shot_ext_capture->request_scp, shot_ext_capture->request_scc);
+// DumpInfoWithShot(shot_ext_capture);
+ }
+ }
+ shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
- m_camera_info.isp.width = m_camera_info.sensor.width;
- m_camera_info.isp.height = m_camera_info.sensor.height;
- m_camera_info.isp.format = m_camera_info.sensor.format;
- m_camera_info.isp.planes = m_camera_info.sensor.planes;
- m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
- m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
- m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
+ ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) shot_size(%d)",
+ index,
+ shot_ext->shot.ctl.request.frameCount,
+ shot_ext->request_scp,
+ shot_ext->request_scc,
+ shot_ext->dis_bypass, sizeof(camera2_shot));
- for(i = 0; i < m_camera_info.isp.buffers; i++){
- initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
- m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0];
- m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1];
- m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0];
- m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1];
- m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0];
- m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1];
- };
+ if(isCapture) {
+ ALOGD("======= request_scc is 1");
+ m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
- cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
- cam_int_s_fmt(&(m_camera_info.isp));
- ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
- cam_int_reqbufs(&(m_camera_info.isp));
- ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
- ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__);
+ for(j = 0; j < m_camera_info.isp.buffers; j++)
+ {
+ shot_ext_capture = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[j].virt.extP[1]);
+ shot_ext_capture->request_scc = 0;
+ }
- cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
- ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__);
- if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
- ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__);
- }
- ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__);
- cam_int_reqbufs(&(m_camera_info.sensor));
- ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__);
- for (i = 0; i < m_camera_info.sensor.buffers; i++) {
- ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
- memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
- sizeof(struct camera2_shot_ext));
- m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
+ isCapture = false;
+ }
- cam_int_qbuf(&(m_camera_info.sensor), i);
- m_BayerManager->MarkSensorEnqueue(i);
- }
- ALOGE("== stream_on :: m_camera_info.sensor");
- cam_int_streamon(&(m_camera_info.sensor));
+ if (shot_ext->request_scp) {
+ m_previewOutput = 1;
+ m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
+ }
+ ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
+ shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
+ if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
+ ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
+ m_scp_closed = true;
+ }
+ else
+ m_scp_closed = false;
-/*capture init*/
- memset(&node_name, 0x00, sizeof(char[30]));
- sprintf(node_name, "%s%d", NODE_PREFIX, 42);
- fd = exynos_v4l2_open(node_name, O_RDWR, 0);
+ m_requestManager->ApplyDynamicMetadata(shot_ext);
+ }
- if (fd < 0) {
- ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
- }
- else {
- ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
- }
- m_camera_info.capture.fd = fd;
+ processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
+ if (processingReqIndex == -1)
+ {
+ ALOGE("DEBUG(%s) req underrun => inserting bubble to BayerIndex(%d)", __FUNCTION__, index);
+ }
- m_camera_info.capture.width = m_camera2->getSensorW();
- m_camera_info.capture.height = m_camera2->getSensorH();
- m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
- m_camera_info.capture.planes = 1;
- m_camera_info.capture.buffers = 8;
- m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
- m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
- m_camera_info.capture.ionClient = m_ionCameraClient;
+ shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
+ if (m_scp_closing || m_scp_closed) {
+ ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
+ shot_ext->request_scc = 0;
+ shot_ext->request_scp = 0;
+ shot_ext->request_sensor = 0;
+ }
- for(i = 0; i < m_camera_info.capture.buffers; i++){
- initCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
- m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
- allocCameraMemory(m_camera_info.capture.ionClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
- }
+ ALOGD("### Sensor Qbuf start(%d) SCP(%d) SCC(%d) DIS(%d)", index, shot_ext->request_scp, shot_ext->request_scc, shot_ext->dis_bypass);
- cam_int_s_input(&(m_camera_info.capture), m_camera_info.sensor_id);
- cam_int_s_fmt(&(m_camera_info.capture));
- ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
- cam_int_reqbufs(&(m_camera_info.capture));
- ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
+ cam_int_qbuf(&(m_camera_info.sensor), index);
+ ALOGV("### Sensor QBUF done");
- for (i = 0; i < m_camera_info.capture.buffers; i++) {
- ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
- cam_int_qbuf(&(m_camera_info.capture), i);
+ if (!m_closing){
+ selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
+ }
+ return;
}
+ return;
+}
- ALOGE("== stream_on :: m_camera_info.capture");
- cam_int_streamon(&(m_camera_info.capture));
-
- m_initFlag2 = true;
- ALOGV("DEBUG(%s): END of IspThreadInitialize ", __FUNCTION__);
+void ExynosCameraHWInterface2::m_ispThreadInitialize(SignalDrivenThread * self)
+{
+ ALOGV("DEBUG(%s): ", __FUNCTION__ );
+ /* will add */
return;
}
void ExynosCameraHWInterface2::m_ispThreadFunc(SignalDrivenThread * self)
{
- uint32_t currentSignal = self->GetProcessingSignal();
- IspThread * selfThread = ((IspThread*)self);
- int index;
- status_t res;
- ALOGV("DEBUG(%s): m_ispThreadFunc (%x)", __FUNCTION__, currentSignal);
-
- if (currentSignal & SIGNAL_THREAD_RELEASE) {
- ALOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
-
- ALOGV("(%s): calling capture streamoff", __FUNCTION__);
- cam_int_streamoff(&(m_camera_info.capture));
- ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
-
- m_camera_info.capture.buffers = 0;
- ALOGV("DEBUG(%s): capture calling reqbuf 0 ", __FUNCTION__);
- cam_int_reqbufs(&(m_camera_info.capture));
- ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
-
- ALOGD("(%s): EXIT processing SIGNAL_THREAD_RELEASE ", __FUNCTION__);
- selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
- return;
- }
-
- if (currentSignal & SIGNAL_ISP_START_BAYER_INPUT)
- {
- struct camera2_shot_ext *shot_ext;
- int bayerIndexToEnqueue = 0;
- int processingFrameCnt = 0;
-
- ALOGV("DEBUG(%s): IspThread processing SIGNAL_ISP_START_BAYER_INPUT", __FUNCTION__);
-
- bayerIndexToEnqueue = m_BayerManager->GetIndexForIspEnqueue(&processingFrameCnt);
- shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[bayerIndexToEnqueue].virt.extP[1]);
-
- ALOGV("### isp QBUF start bayerIndex[%d] for frameCnt(%d)", bayerIndexToEnqueue, processingFrameCnt);
-
- if (processingFrameCnt != -1) {
- ALOGV("### writing output stream info");
- m_requestManager->UpdateIspParameters(shot_ext, processingFrameCnt);
- }
- else {
- memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(struct camera2_shot_ext));
- }
- if (m_scp_flushing) {
- shot_ext->request_scp = 1;
- }
- if (m_scp_closing || m_scp_closed) {
- ALOGV("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
- shot_ext->request_scc = 0;
- shot_ext->request_scp = 0;
- shot_ext->request_sensor = 0;
- }
- cam_int_qbuf(&(m_camera_info.isp), bayerIndexToEnqueue);
- ALOGV("### isp QBUF done bayerIndex[%d] scp(%d)", bayerIndexToEnqueue, shot_ext->request_scp);
- m_BayerManager->MarkIspEnqueue(bayerIndexToEnqueue);
-
- if (m_BayerManager->GetNumOnHalFilled() != 0) {
- // input has priority
- selfThread->SetSignal(SIGNAL_ISP_START_BAYER_INPUT);
- return;
- }
- else {
- selfThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
- }
- }
-
- if (currentSignal & SIGNAL_ISP_START_BAYER_DEQUEUE)
- {
- struct camera2_shot_ext *shot_ext;
- int bayerIndexToDequeue = 0;
- int processingFrameCnt = 0;
- ALOGV("DEBUG(%s): IspThread processing SIGNAL_ISP_START_BAYER_DEQUEUE", __FUNCTION__);
- bayerIndexToDequeue = m_BayerManager->GetIndexForIspDequeue(&processingFrameCnt);
- m_ispProcessingFrameCnt = processingFrameCnt;
- m_previewOutput = 0;
- m_recordOutput = 0;
- shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[bayerIndexToDequeue].virt.extP[1]);
- if (processingFrameCnt != -1 || m_scp_flushing) // bubble
- {
- if (shot_ext->request_scc) {
- m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
- }
- m_previewOutput = shot_ext->shot.ctl.request.outputStreams[0];
- m_recordOutput = shot_ext->shot.ctl.request.outputStreams[2];
- if (m_previewOutput || m_recordOutput) {
- m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
- }
- }
- ALOGV("### isp DQBUF start");
- index = cam_int_dqbuf(&(m_camera_info.isp));
- ALOGV("### isp DQBUF done bayerIndex(%d) for frameCnt(%d)", index, processingFrameCnt);
- shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
- ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
- shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
- if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
- ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
- m_scp_closed = true;
- }
- else
- m_scp_closed = false;
- if (processingFrameCnt != -1) {
- m_requestManager->ApplyDynamicMetadata(shot_ext, processingFrameCnt);
- }
- m_BayerManager->MarkIspDequeue(index);
- if (m_BayerManager->GetNumOnIsp() != 0) {
- selfThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
- }
- }
-
+ ALOGV("DEBUG(%s): ", __FUNCTION__ );
+ /* will add */
return;
}
int i, index = -1, cnt_to_dq = 0;
status_t res;
ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
-
-
+ ALOGD("(%s):(%d) SIGNAL_THREAD_RELEASE", __FUNCTION__, selfStreamParms->streamType);
if (selfThread->m_isBufferInit) {
for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
selfThread->m_index, selfStreamParms->fd);
}
- selfThread->m_releasing = false;
}
+ selfThread->m_releasing = false;
+ ALOGD("m_releasing set false");
if (selfThread->m_index == 1 && m_resizeBuf.size.s != 0) {
freeCameraMemory(&m_resizeBuf, 1);
}
void *virtAddr[3];
int i, j;
int index;
+ nsecs_t timestamp;
+
ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING",
__FUNCTION__,selfThread->m_index);
+
+ if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT)
+ {
+ ALOGD("stream(%s) processing SIGNAL_STREAM_DATA_COMING",
+ __FUNCTION__,selfThread->m_index);
+ }
+
if (!(selfThread->m_isBufferInit)) {
for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
}
selfThread->m_isBufferInit = true;
}
-
+
if (m_recordingEnabled && m_needsRecordBufferInit) {
ALOGV("DEBUG(%s): Recording Buffer Initialization numsvcbuf(%d)",
__FUNCTION__, selfRecordParms->numSvcBuffers);
- int checkingIndex = 0;
- bool found = false;
+ int checkingIndex = 0;
+ bool found = false;
for ( i=0 ; i < selfRecordParms->numSvcBuffers; i++) {
res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
if (res != NO_ERROR || buf == NULL) {
selfRecordParms->outputWidth, selfRecordParms->outputHeight, virtAddr) != 0) {
ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
}
- else {
+ else {
ALOGV("DEBUG(%s): [record] locked img buf plane0(%x) plane1(%x) plane2(%x)",
__FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
-
}
found = false;
for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
}
ALOGV("DEBUG(%s): [record] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
if (!found) break;
- index = checkingIndex;
+ index = checkingIndex;
if (index == -1) {
- ALOGD("ERR(%s): could not find buffer index", __FUNCTION__);
+ ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
}
else {
ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
if (selfStreamParms->svcBufStatus[index] != ON_DRIVER)
- ALOGD("DBG(%s): DQed buffer status abnormal (%d) ",
+ ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
__FUNCTION__, selfStreamParms->svcBufStatus[index]);
selfStreamParms->svcBufStatus[index] = ON_HAL;
}
res = selfRecordParms->streamOps->enqueue_buffer(selfRecordParms->streamOps,
- m_requestManager->GetTimestamp(m_ispProcessingFrameCnt),
+ systemTime(),
&(selfRecordParms->svcBufHandle[selfRecordParms->svcBufIndex]));
ALOGV("DEBUG(%s): stream(%d) record enqueue_buffer to svc done res(%d)", __FUNCTION__,
selfThread->m_index, res);
selfRecordParms->svcBufStatus[selfRecordParms->svcBufIndex] = ON_SERVICE;
selfRecordParms->numSvcBufsInHal--;
}
-
- m_requestManager->NotifyStreamOutput(m_ispProcessingFrameCnt, 2);
-
}
- if (m_previewOutput) {
+ if (m_previewOutput && m_requestManager->GetSkipCnt() <= 0) {
+
+ ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
- m_requestManager->GetTimestamp(m_ispProcessingFrameCnt), &(selfStreamParms->svcBufHandle[index]));
+ m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()),
+ &(selfStreamParms->svcBufHandle[index]));
+
ALOGV("DEBUG(%s): stream(%d) enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
}
else {
else {
selfStreamParms->svcBufStatus[index] = ON_HAL;
}
- m_requestManager->NotifyStreamOutput(m_ispProcessingFrameCnt, selfThread->m_index);
}
else if (selfStreamParms->streamType == STREAM_TYPE_INDIRECT) {
ExynosRect jpegRect;
ExynosBuffer resizeBufInfo;
ExynosRect m_orgPictureRect;
- ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
+ ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
selfThread->m_index, selfStreamParms->streamType);
index = cam_int_dqbuf(&(selfStreamParms->node));
- ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
+ ALOGD("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
selfThread->m_index, selfStreamParms->streamType, index);
m_jpegEncodingFrameCnt = m_ispProcessingFrameCnt;
m_orgPictureRect.w, m_orgPictureRect.h,
&cropX, &cropY,
&cropW, &cropH,
- 0);
+ 0);
ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
__FUNCTION__, cropX, cropY, cropW, cropH);
jpegRect.h = m_orgPictureRect.h;
jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
- m_requestManager->NotifyStreamOutput(m_jpegEncodingFrameCnt, selfThread->m_index);
if (yuv2Jpeg(&m_resizeBuf, &selfStreamParms->svcBuffers[selfStreamParms->svcBufIndex], &jpegRect) == false)
ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
cam_int_qbuf(&(selfStreamParms->node), index);
selfRecordParms->numSvcBufsInHal ++;
ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
selfRecordParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
+
const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
-
bool found = false;
int checkingIndex = 0;
for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
}
}
ALOGV("DEBUG(%s): recording dequeueed_buffer found index(%d)", __FUNCTION__, found);
- if (!found) break;
+
+ if (!found) {
+ break;
+ }
+
index = checkingIndex;
if (selfRecordParms->svcBufStatus[index] == ON_SERVICE) {
selfRecordParms->svcBufStatus[index] = ON_HAL;
}
else {
ALOGV("DEBUG(%s): record bufstatus abnormal [%d] status = %d", __FUNCTION__,
- index, selfRecordParms->svcBufStatus[index]);
+ index, selfRecordParms->svcBufStatus[index]);
}
} while (0);
}
v4l2_buf.length = currentNode->planes;
v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
- v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
- v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
+ v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
+ v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
v4l2_buf.m.planes[plane_index].length = currentBuf->size.extS[plane_index];
ALOGV("DEBUG(%s): plane(%d): fd(%d) length(%d)",
ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
goto jpeg_encode_done;
}
-#if 0
- if (m_curCameraInfo->thumbnailW != 0 && m_curCameraInfo->thumbnailH != 0) {
- int thumbW = 0, thumbH = 0;
- mExifInfo.enableThumb = true;
- if (rect->w < 320 || rect->h < 240) {
- thumbW = 160;
- thumbH = 120;
- } else {
- thumbW = m_curCameraInfo->thumbnailW;
- thumbH = m_curCameraInfo->thumbnailH;
- }
- if (jpegEnc.setThumbnailSize(thumbW, thumbH)) {
- LOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, thumbW, thumbH);
- goto jpeg_encode_done;
- }
- if (0 < m_jpegThumbnailQuality && m_jpegThumbnailQuality <= 100) {
- if (jpegEnc.setThumbnailQuality(m_jpegThumbnailQuality)) {
- LOGE("ERR(%s):jpegEnc.setThumbnailQuality(%d) fail", __FUNCTION__, m_jpegThumbnailQuality);
- goto jpeg_encode_done;
- }
- }
+ mExifInfo.enableThumb = false;
- m_setExifChangedAttribute(&mExifInfo, rect);
- } else
-#endif
- {
- mExifInfo.enableThumb = false;
- }
ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), (int *)yuvSize)) {
ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
ExynosCameraHWInterface2::MainThread::~MainThread()
{
- ALOGD("(%s):", __FUNCTION__);
+ ALOGV("(%s):", __FUNCTION__);
}
void ExynosCameraHWInterface2::MainThread::release()
{
- ALOGD("(%s):", __func__);
+ ALOGV("(%s):", __func__);
SetSignal(SIGNAL_THREAD_RELEASE);
}
ExynosCameraHWInterface2::SensorThread::~SensorThread()
{
- ALOGD("(%s):", __FUNCTION__);
+ ALOGV("(%s):", __FUNCTION__);
}
void ExynosCameraHWInterface2::SensorThread::release()
{
- ALOGD("(%s):", __func__);
+ ALOGV("(%s):", __func__);
SetSignal(SIGNAL_THREAD_RELEASE);
}
ExynosCameraHWInterface2::IspThread::~IspThread()
{
- ALOGD("(%s):", __FUNCTION__);
+ ALOGV("(%s):", __FUNCTION__);
}
void ExynosCameraHWInterface2::IspThread::release()
{
- ALOGD("(%s):", __func__);
+ ALOGV("(%s):", __func__);
SetSignal(SIGNAL_THREAD_RELEASE);
}
ExynosCameraHWInterface2::StreamThread::~StreamThread()
{
- ALOGD("(%s):", __FUNCTION__);
+ ALOGV("(%s):", __FUNCTION__);
}
void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
static int HAL2_camera_device_close(struct hw_device_t* device)
{
- ALOGD("%s: ENTER", __FUNCTION__);
+ ALOGV("%s: ENTER", __FUNCTION__);
if (device) {
camera2_device_t *cam_device = (camera2_device_t *)device;
- ALOGD("cam_device(0x%08x):", (unsigned int)cam_device);
- ALOGD("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
+ ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
+ ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
- g_cam2_device = NULL;
+ g_cam2_device = NULL;
free(cam_device);
g_camera_vaild = false;
}
- ALOGD("%s: EXIT", __FUNCTION__);
+ ALOGV("%s: EXIT", __FUNCTION__);
return 0;
}
const struct camera2_device *dev,
uint32_t stream_id)
{
- ALOGD("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
+ ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
if (!g_camera_vaild)
return 0;
return obj(dev)->releaseStream(stream_id);
static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
{
- ALOGD("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
+ ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
-
+
status_t res;
if (cameraId == 0) {
int cameraId = atoi(id);
g_camera_vaild = false;
- ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
+ ALOGV("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
return -EINVAL;
}
- ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
+ ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
if (g_cam2_device) {
if (obj(g_cam2_device)->getCameraId() == cameraId) {
ALOGV("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
while (g_cam2_device)
usleep(10000);
- /*ALOGE("ERR(%s):Cannot open camera %d. camera %d is already running!",
- __FUNCTION__, cameraId, obj(g_cam2_device)->getCameraId());
- return -ENOSYS;*/
}
}
g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
- ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
+ ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
if (!g_cam2_device)
return -ENOMEM;