newEntry->original_request = new_request;
// TODO : allocate internal_request dynamically
m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
- newEntry->output_stream_count = newEntry->internal_shot.shot.ctl.request.id; // temp
+ newEntry->output_stream_count = newEntry->internal_shot.shot.ctl.request.outputStreams[15];
m_numOfEntries++;
m_entryInsertionIndex = newInsertionIndex;
return false;
}
m_entryFrameOutputIndex = tempFrameOutputIndex;
- m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 10, 500); //estimated
+ m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 15, 500); //estimated
res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
m_tempFrameMetadata);
if (res!=NO_ERROR) {
ALOGV("DEBUG(%s):", __FUNCTION__);
Mutex::Autolock lock(m_requestMutex);
struct camera2_shot_ext * shot_ext;
+ struct camera2_shot_ext * request_shot;
int targetStreamIndex = 0;
if (m_numOfEntries == 0) {
int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
newEntry = &(entries[newProcessingIndex]);
-
+ request_shot = &newEntry->internal_shot;
if (newEntry->status != REGISTERED) {
ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
return -1;
}
newEntry->status = REQUESTED;
- // TODO : replace the codes below with a single memcpy of pre-converted 'shot'
shot_ext = (struct camera2_shot_ext *)(buf->virt.extP[1]);
- memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
+ ALOGV("DEBUG(%s):Writing the info of Framecnt(%d)", __FUNCTION__, request_shot->shot.ctl.request.frameCount);
+ memcpy(shot_ext, &newEntry->internal_shot, sizeof(struct camera2_shot_ext));
shot_ext->request_sensor = 1;
shot_ext->dis_bypass = 1;
shot_ext->dnr_bypass = 1;
for (int i = 0; i < newEntry->output_stream_count; i++) {
- // TODO : match with actual stream index;
targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
if (targetStreamIndex==0) {
ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
}
}
- shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
- shot_ext->shot.magicNumber = 0x23456789;
- shot_ext->shot.ctl.sensor.exposureTime = 0;
- shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
- shot_ext->shot.ctl.sensor.sensitivity = 0;
-
- shot_ext->shot.ctl.scaler.cropRegion[0] = 0;
- shot_ext->shot.ctl.scaler.cropRegion[1] = 0;
- shot_ext->shot.ctl.scaler.cropRegion[2] = m_cropX;
-
+ if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_ON) {
+ ALOGV("(%s): AE_ON => ignoring some params", __FUNCTION__);
+ shot_ext->shot.ctl.sensor.exposureTime = 0;
+ shot_ext->shot.ctl.sensor.sensitivity = 0;
+ shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
+ // TODO : check frameDuration
+ }
m_entryProcessingIndex = newProcessingIndex;
// Dump();
void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext, int frameCnt)
{
int index;
+ struct camera2_shot_ext * request_shot;
+ nsecs_t timeStamp;
ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
}
request_manager_entry * newEntry = &(entries[index]);
+ request_shot = &(newEntry->internal_shot);
newEntry->dynamic_meta_vaild = true;
- // TODO : move some code of PrepareFrame here
+ timeStamp = request_shot->shot.dm.sensor.timeStamp;
+ memcpy(&request_shot->shot.dm, &shot_ext->shot.dm, sizeof(struct camera2_dm));
+ request_shot->shot.dm.sensor.timeStamp = timeStamp;
CheckCompleted(index);
}
currMetadata->shot.ctl.request.frameCount,
currMetadata->shot.ctl.sensor.exposureTime,
currMetadata->shot.ctl.sensor.sensitivity);
- if (currMetadata->shot.ctl.request.id==0)
+ if (currMetadata->shot.ctl.request.outputStreams[15] == 0)
ALOGV("#### No output stream selected");
- else if (currMetadata->shot.ctl.request.id==1)
+ else if (currMetadata->shot.ctl.request.outputStreams[15] == 1)
ALOGV("#### OutputStreamId : %d", currMetadata->shot.ctl.request.outputStreams[0]);
- else if (currMetadata->shot.ctl.request.id==2)
+ else if (currMetadata->shot.ctl.request.outputStreams[15] == 2)
ALOGV("#### OutputStreamId : %d, %d", currMetadata->shot.ctl.request.outputStreams[0],
currMetadata->shot.ctl.request.outputStreams[1]);
else
- ALOGV("#### OutputStream num (%d) abnormal ", currMetadata->shot.ctl.request.id);
+ ALOGV("#### OutputStream num (%d) abnormal ", currMetadata->shot.ctl.request.outputStreams[15]);
}
-void RequestManager::UpdateOutputStreamInfo(struct camera2_shot_ext *shot_ext, int frameCnt)
+void RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt)
{
int index, targetStreamIndex;
+ struct camera2_shot_ext * request_shot;
ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
if (frameCnt < 0)
}
request_manager_entry * newEntry = &(entries[index]);
+ request_shot = &newEntry->internal_shot;
shot_ext->request_sensor = 1;
shot_ext->request_scc = 0;
shot_ext->request_scp = 0;
shot_ext->shot.ctl.request.outputStreams[1] = 0;
shot_ext->shot.ctl.request.outputStreams[2] = 0;
+ memcpy(&shot_ext->shot.ctl, &request_shot->shot.ctl, sizeof(struct camera2_ctl));
for (int i = 0; i < newEntry->output_stream_count; i++) {
// TODO : match with actual stream index;
- targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
+ targetStreamIndex = request_shot->shot.ctl.request.outputStreams[i];
if (targetStreamIndex==0) {
ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerP", __FUNCTION__, i);
ALOGV("#### magic(%x) ",
shot_ext->shot.magicNumber);
ALOGV("#### ctl Section");
- ALOGV("#### metamode(%d) exposureTime(%lld) duration(%lld) ISO(%d) ",
+ ALOGV("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
shot_ext->shot.ctl.request.metadataMode,
+ shot_ext->shot.ctl.lens.aperture,
shot_ext->shot.ctl.sensor.exposureTime,
shot_ext->shot.ctl.sensor.frameDuration,
- shot_ext->shot.ctl.sensor.sensitivity);
+ shot_ext->shot.ctl.sensor.sensitivity,
+ shot_ext->shot.ctl.aa.awbMode);
ALOGV("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) pv(%d) rec(%d)",
shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
shot_ext->shot.ctl.request.outputStreams[2]);
ALOGV("#### DM Section");
- ALOGV("#### metamode(%d) exposureTime(%lld) duration(%lld) ISO(%d) timestamp(%lld)",
+ ALOGV("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
shot_ext->shot.dm.request.metadataMode,
+ shot_ext->shot.dm.lens.aperture,
shot_ext->shot.dm.sensor.exposureTime,
shot_ext->shot.dm.sensor.frameDuration,
shot_ext->shot.dm.sensor.sensitivity,
- shot_ext->shot.dm.sensor.timeStamp);
+ shot_ext->shot.dm.sensor.timeStamp,
+ shot_ext->shot.dm.aa.awbMode,
+ shot_ext->shot.dm.request.frameCount );
}
void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
if (processingFrameCnt != -1) {
ALOGV("### writing output stream info");
- m_requestManager->UpdateOutputStreamInfo(shot_ext, processingFrameCnt);
+ m_requestManager->UpdateIspParameters(shot_ext, processingFrameCnt);
}
else {
memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(struct camera2_shot_ext));
if (request == NULL || dst_ext == NULL)
return BAD_VALUE;
- dst = &(dst_ext->shot);
+ memset((void*)dst_ext, 0, sizeof(struct camera2_shot_ext));
+ dst = &dst_ext->shot;
+ dst->magicNumber = 0x23456789;
num_entry = (uint32_t)get_camera_metadata_data_count(request);
for (index = 0 ; index < num_entry ; index++) {
break;
case ANDROID_JPEG_GPS_COORDINATES:
- if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_DOUBLE, 2)) // needs check
+ if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_DOUBLE, 3))
break;
for (i=0 ; i<curr_entry.count ; i++)
dst->ctl.jpeg.gpsCoordinates[i] = curr_entry.data.d[i];
case ANDROID_JPEG_GPS_PROCESSING_METHOD:
if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 32))
break;
- dst->ctl.jpeg.gpsProcessingMethod = curr_entry.data.u8[0];
+ for (i=0 ; i<curr_entry.count ; i++)
+ dst_ext->gpsProcessingMethod[i] = curr_entry.data.u8[i];
break;
case ANDROID_JPEG_GPS_TIMESTAMP:
case ANDROID_CONTROL_EFFECT_MODE:
if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
break;
- dst->ctl.aa.effectMode = (enum aa_effect_mode)curr_entry.data.u8[0];
+ dst->ctl.aa.effectMode = (enum aa_effect_mode)(curr_entry.data.u8[0] + 1);
break;
case ANDROID_CONTROL_SCENE_MODE:
if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
break;
- dst->ctl.aa.sceneMode = (enum aa_scene_mode)curr_entry.data.u8[0];
+ dst->ctl.aa.sceneMode = (enum aa_scene_mode)(curr_entry.data.u8[0] + 1);
break;
case ANDROID_CONTROL_VIDEO_STABILIZATION_MODE:
case ANDROID_CONTROL_AE_MODE:
if (NO_ERROR != CheckEntryTypeMismatch(&curr_entry, TYPE_BYTE, 1))
break;
- dst->ctl.aa.aeMode= (enum aa_aemode)curr_entry.data.u8[0];
+ dst->ctl.aa.aeMode = (enum aa_aemode)(curr_entry.data.u8[0] + 1);
break;
case ANDROID_CONTROL_AE_REGIONS:
dst->ctl.request.outputStreams[i] = curr_entry.data.u8[i];
ALOGV("DEBUG(%s): OUTPUT_STREAM[%d] = %d ", __FUNCTION__, i, (int)(dst->ctl.request.outputStreams[i]));
}
- dst->ctl.request.id = curr_entry.count; // temporary
+ dst->ctl.request.outputStreams[15] = curr_entry.count;
break;
case ANDROID_REQUEST_FRAME_COUNT:
status_t MetadataConverter::ToDynamicMetadata(struct camera2_shot_ext * metadata_ext, camera_metadata_t * dst)
{
status_t res;
- struct camera2_shot * metadata = &(metadata_ext->shot);
+ struct camera2_shot * metadata = &metadata_ext->shot;
uint8_t byteData;
uint32_t intData;
- ALOGV("DEBUG(%s): TEMP version using original request METADATA", __FUNCTION__);
if (0 != add_camera_metadata_entry(dst, ANDROID_REQUEST_ID,
&(metadata->ctl.request.id), 1))
return NO_MEMORY;
&(metadata->ctl.request.metadataMode), 1))
return NO_MEMORY;
- // needs check!
if (0 != add_camera_metadata_entry(dst, ANDROID_REQUEST_FRAME_COUNT,
&(metadata->ctl.request.frameCount), 1))
return NO_MEMORY;
+ if (0 != add_camera_metadata_entry(dst, ANDROID_SENSOR_TIMESTAMP,
+ &metadata->dm.sensor.timeStamp, 1))
+ return NO_MEMORY;
+
+ if (0 != add_camera_metadata_entry(dst, ANDROID_SENSOR_EXPOSURE_TIME,
+ &metadata->dm.sensor.exposureTime, 1))
+ return NO_MEMORY;
+
+ if (0 != add_camera_metadata_entry(dst, ANDROID_LENS_APERTURE,
+ &metadata->dm.lens.aperture, 1))
+ return NO_MEMORY;
+
+ ALOGV("(%s): ID(%d) METAMODE(%d) FrameCnt(%d) Timestamp(%lld) exposure(%lld) aper(%f)", __FUNCTION__,
+ metadata->ctl.request.id, metadata->ctl.request.metadataMode, metadata->ctl.request.frameCount,
+ metadata->dm.sensor.timeStamp, metadata->dm.sensor.exposureTime, metadata->dm.lens.aperture);
+
+
+ byteData = metadata->dm.aa.awbMode - 1;
+ if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_AWB_MODE,
+ &byteData, 1))
+ return NO_MEMORY;
+
+ byteData = metadata->dm.aa.aeMode - 1;
+ if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_AE_MODE,
+ &byteData, 1))
+ return NO_MEMORY;
+
+ byteData = metadata->ctl.aa.sceneMode - 1;
+ if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_SCENE_MODE,
+ &byteData, 1))
+ return NO_MEMORY;
+
+ byteData = metadata->ctl.aa.effectMode - 1;
+ if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_EFFECT_MODE,
+ &byteData, 1))
+ return NO_MEMORY;
+
+ intData = metadata->ctl.aa.aeExpCompensation;
+ if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_AE_EXP_COMPENSATION,
+ &intData, 1))
+ return NO_MEMORY;
+
+
+ ALOGV("(%s): AWB(%d) AE(%d) SCENE(%d) EFFECT(%d) AEComp(%d)", __FUNCTION__,
+ metadata->dm.aa.awbMode - 1, metadata->dm.aa.aeMode - 1, metadata->ctl.aa.sceneMode - 1,
+ metadata->ctl.aa.effectMode - 1, metadata->ctl.aa.aeExpCompensation );
+
if (metadata->ctl.request.metadataMode == METADATA_MODE_NONE) {
ALOGV("DEBUG(%s): METADATA_MODE_NONE", __FUNCTION__);
return NO_ERROR;
}
-
- ALOGV("DEBUG(%s): METADATA_MODE_FULL", __FUNCTION__);
-
- if (0 != add_camera_metadata_entry(dst, ANDROID_SENSOR_TIMESTAMP,
- &(metadata->dm.sensor.timeStamp), 1))
- return NO_MEMORY;
- ALOGV("DEBUG(%s): Timestamp: %lld", __FUNCTION__, metadata->dm.sensor.timeStamp);
return NO_ERROR;
- if (0 != add_camera_metadata_entry(dst, ANDROID_SENSOR_EXPOSURE_TIME,
- &(metadata->dm.sensor.exposureTime), 1))
- return NO_MEMORY;
+
if (0 != add_camera_metadata_entry(dst, ANDROID_SENSOR_FRAME_DURATION,
&(metadata->dm.sensor.frameDuration), 1))
&(metadata->dm.aa.effect_mode), 1))
return NO_MEMORY;
- if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_AE_MODE,
- &(metadata->dm.aa.aeMode), 1))
- return NO_MEMORY;
+
if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_AE_REGIONS,
&(metadata->dm.aa.aeRegions), 5))
return NO_MEMORY;
- if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_AE_EXP_COMPENSATION,
- &(metadata->dm.aa.aeExpCompensation), 1))
- return NO_MEMORY;
+
if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_AE_STATE,
&(metadata->dm.aa.aeState), 1))
return NO_MEMORY;
- if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_AWB_MODE,
- &(metadata->dm.aa.awbMode), 1))
- return NO_MEMORY;
if (0 != add_camera_metadata_entry(dst, ANDROID_CONTROL_AWB_REGIONS,
&(metadata->dm.aa.awbRegions), 5))