3 ** Copyright 2008, The Android Open Source Project
4 ** Copyright 2012, Samsung Electronics Co. LTD
6 ** Licensed under the Apache License, Version 2.0 (the "License");
7 ** you may not use this file except in compliance with the License.
8 ** You may obtain a copy of the License at
10 ** http://www.apache.org/licenses/LICENSE-2.0
12 ** Unless required by applicable law or agreed to in writing, software
13 ** distributed under the License is distributed on an "AS IS" BASIS,
14 ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 ** See the License for the specific language governing permissions and
16 ** limitations under the License.
20 * \file ExynosCameraHWInterface2.cpp
21 * \brief source file for Android Camera API 2.0 HAL
22 * \author Sungjoong Kang(sj3.kang@samsung.com)
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
34 //#define LOG_NDEBUG 0
35 #define LOG_TAG "ExynosCameraHAL2"
36 #include <utils/Log.h>
39 #include "ExynosCameraHWInterface2.h"
40 #include "exynos_format.h"
44 void m_savePostView(const char *fname
, uint8_t *buf
, uint32_t size
)
50 ALOGV("opening file [%s], address[%x], size(%d)", fname
, (unsigned int)buf
, size
);
51 int fd
= open(fname
, O_RDWR
| O_CREAT
, 0644);
53 ALOGE("failed to create file [%s]: %s", fname
, strerror(errno
));
57 ALOGV("writing %d bytes to file [%s]", size
, fname
);
58 while (written
< size
) {
59 nw
= ::write(fd
, buf
+ written
, size
- written
);
61 ALOGE("failed to write to file %d [%s]: %s",written
,fname
, strerror(errno
));
67 ALOGV("done writing %d bytes to file [%s] in %d passes",size
, fname
, cnt
);
71 int get_pixel_depth(uint32_t fmt
)
76 case V4L2_PIX_FMT_JPEG
:
80 case V4L2_PIX_FMT_NV12
:
81 case V4L2_PIX_FMT_NV21
:
82 case V4L2_PIX_FMT_YUV420
:
83 case V4L2_PIX_FMT_YVU420M
:
84 case V4L2_PIX_FMT_NV12M
:
85 case V4L2_PIX_FMT_NV12MT
:
89 case V4L2_PIX_FMT_RGB565
:
90 case V4L2_PIX_FMT_YUYV
:
91 case V4L2_PIX_FMT_YVYU
:
92 case V4L2_PIX_FMT_UYVY
:
93 case V4L2_PIX_FMT_VYUY
:
94 case V4L2_PIX_FMT_NV16
:
95 case V4L2_PIX_FMT_NV61
:
96 case V4L2_PIX_FMT_YUV422P
:
97 case V4L2_PIX_FMT_SBGGR10
:
98 case V4L2_PIX_FMT_SBGGR12
:
99 case V4L2_PIX_FMT_SBGGR16
:
103 case V4L2_PIX_FMT_RGB32
:
107 ALOGE("Get depth failed(format : %d)", fmt
);
114 int cam_int_s_fmt(node_info_t
*node
)
116 struct v4l2_format v4l2_fmt
;
117 unsigned int framesize
;
120 memset(&v4l2_fmt
, 0, sizeof(struct v4l2_format
));
122 v4l2_fmt
.type
= node
->type
;
123 framesize
= (node
->width
* node
->height
* get_pixel_depth(node
->format
)) / 8;
125 if (node
->planes
>= 1) {
126 v4l2_fmt
.fmt
.pix_mp
.width
= node
->width
;
127 v4l2_fmt
.fmt
.pix_mp
.height
= node
->height
;
128 v4l2_fmt
.fmt
.pix_mp
.pixelformat
= node
->format
;
129 v4l2_fmt
.fmt
.pix_mp
.field
= V4L2_FIELD_ANY
;
131 ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__
);
134 /* Set up for capture */
135 ret
= exynos_v4l2_s_fmt(node
->fd
, &v4l2_fmt
);
138 ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__
, ret
);
144 int cam_int_reqbufs(node_info_t
*node
)
146 struct v4l2_requestbuffers req
;
149 req
.count
= node
->buffers
;
150 req
.type
= node
->type
;
151 req
.memory
= node
->memory
;
153 ret
= exynos_v4l2_reqbufs(node
->fd
, &req
);
156 ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__
,node
->fd
, ret
);
161 int cam_int_qbuf(node_info_t
*node
, int index
)
163 struct v4l2_buffer v4l2_buf
;
164 struct v4l2_plane planes
[VIDEO_MAX_PLANES
];
168 v4l2_buf
.m
.planes
= planes
;
169 v4l2_buf
.type
= node
->type
;
170 v4l2_buf
.memory
= node
->memory
;
171 v4l2_buf
.index
= index
;
172 v4l2_buf
.length
= node
->planes
;
174 for(i
= 0; i
< node
->planes
; i
++){
175 v4l2_buf
.m
.planes
[i
].m
.fd
= (int)(node
->buffer
[index
].fd
.extFd
[i
]);
176 v4l2_buf
.m
.planes
[i
].length
= (unsigned long)(node
->buffer
[index
].size
.extS
[i
]);
179 ret
= exynos_v4l2_qbuf(node
->fd
, &v4l2_buf
);
182 ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__
, index
, ret
);
187 int cam_int_streamon(node_info_t
*node
)
189 enum v4l2_buf_type type
= node
->type
;
193 ret
= exynos_v4l2_streamon(node
->fd
, type
);
196 ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__
, node
->fd
,ret
);
198 ALOGV("On streaming I/O... ... fd(%d)", node
->fd
);
203 int cam_int_streamoff(node_info_t
*node
)
205 enum v4l2_buf_type type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
209 ALOGV("Off streaming I/O... fd(%d)", node
->fd
);
210 ret
= exynos_v4l2_streamoff(node
->fd
, type
);
213 ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__
, ret
);
218 int isp_int_streamoff(node_info_t
*node
)
220 enum v4l2_buf_type type
= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
;
223 ALOGV("Off streaming I/O... fd(%d)", node
->fd
);
224 ret
= exynos_v4l2_streamoff(node
->fd
, type
);
227 ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__
, ret
);
232 int cam_int_dqbuf(node_info_t
*node
)
234 struct v4l2_buffer v4l2_buf
;
235 struct v4l2_plane planes
[VIDEO_MAX_PLANES
];
238 v4l2_buf
.type
= node
->type
;
239 v4l2_buf
.memory
= node
->memory
;
240 v4l2_buf
.m
.planes
= planes
;
241 v4l2_buf
.length
= node
->planes
;
243 ret
= exynos_v4l2_dqbuf(node
->fd
, &v4l2_buf
);
245 ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__
, ret
);
247 return v4l2_buf
.index
;
250 int cam_int_dqbuf(node_info_t
*node
, int num_plane
)
252 struct v4l2_buffer v4l2_buf
;
253 struct v4l2_plane planes
[VIDEO_MAX_PLANES
];
256 v4l2_buf
.type
= node
->type
;
257 v4l2_buf
.memory
= node
->memory
;
258 v4l2_buf
.m
.planes
= planes
;
259 v4l2_buf
.length
= num_plane
;
261 ret
= exynos_v4l2_dqbuf(node
->fd
, &v4l2_buf
);
263 ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__
, ret
);
265 return v4l2_buf
.index
;
268 int cam_int_s_input(node_info_t
*node
, int index
)
272 ret
= exynos_v4l2_s_input(node
->fd
, index
);
274 ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__
, ret
);
280 gralloc_module_t
const* ExynosCameraHWInterface2::m_grallocHal
;
282 RequestManager::RequestManager(SignalDrivenThread
* main_thread
):
284 m_lastCompletedFrameCnt(-1),
289 m_vdisBubbleEn(false)
291 m_metadataConverter
= new MetadataConverter
;
292 m_mainThread
= main_thread
;
294 m_sensorPipelineSkipCnt
= 0;
298 RequestManager::~RequestManager()
300 ALOGV("%s", __FUNCTION__
);
301 if (m_metadataConverter
!= NULL
) {
302 delete m_metadataConverter
;
303 m_metadataConverter
= NULL
;
310 void RequestManager::ResetEntry()
312 Mutex::Autolock
lock(m_requestMutex
);
313 Mutex::Autolock
lock2(m_numOfEntriesLock
);
314 for (int i
=0 ; i
<NUM_MAX_REQUEST_MGR_ENTRY
; i
++) {
315 memset(&(entries
[i
]), 0x00, sizeof(request_manager_entry_t
));
316 entries
[i
].internal_shot
.shot
.ctl
.request
.frameCount
= -1;
319 m_entryInsertionIndex
= -1;
320 m_entryProcessingIndex
= -1;
321 m_entryFrameOutputIndex
= -1;
324 int RequestManager::GetNumEntries()
326 Mutex::Autolock
lock(m_numOfEntriesLock
);
327 return m_numOfEntries
;
330 void RequestManager::SetDefaultParameters(int cropX
)
335 bool RequestManager::IsRequestQueueFull()
337 Mutex::Autolock
lock(m_requestMutex
);
338 Mutex::Autolock
lock2(m_numOfEntriesLock
);
339 if (m_numOfEntries
>=NUM_MAX_REQUEST_MGR_ENTRY
)
345 void RequestManager::RegisterRequest(camera_metadata_t
* new_request
, int * afMode
, uint32_t * afRegion
)
347 ALOGV("DEBUG(%s):", __FUNCTION__
);
349 Mutex::Autolock
lock(m_requestMutex
);
350 Mutex::Autolock
lock2(m_numOfEntriesLock
);
352 request_manager_entry
* newEntry
= NULL
;
353 int newInsertionIndex
= GetNextIndex(m_entryInsertionIndex
);
354 ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__
,newInsertionIndex
, m_numOfEntries
);
357 newEntry
= &(entries
[newInsertionIndex
]);
359 if (newEntry
->status
!=EMPTY
) {
360 ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__
);
363 newEntry
->status
= REGISTERED
;
364 newEntry
->original_request
= new_request
;
365 memset(&(newEntry
->internal_shot
), 0, sizeof(struct camera2_shot_ext
));
366 m_metadataConverter
->ToInternalShot(new_request
, &(newEntry
->internal_shot
));
367 newEntry
->output_stream_count
= 0;
368 if (newEntry
->internal_shot
.shot
.ctl
.request
.outputStreams
[0] & MASK_OUTPUT_SCP
)
369 newEntry
->output_stream_count
++;
371 if (newEntry
->internal_shot
.shot
.ctl
.request
.outputStreams
[0] & MASK_OUTPUT_SCC
)
372 newEntry
->output_stream_count
++;
375 m_entryInsertionIndex
= newInsertionIndex
;
378 *afMode
= (int)(newEntry
->internal_shot
.shot
.ctl
.aa
.afMode
);
379 afRegion
[0] = newEntry
->internal_shot
.shot
.ctl
.aa
.afRegions
[0];
380 afRegion
[1] = newEntry
->internal_shot
.shot
.ctl
.aa
.afRegions
[1];
381 afRegion
[2] = newEntry
->internal_shot
.shot
.ctl
.aa
.afRegions
[2];
382 afRegion
[3] = newEntry
->internal_shot
.shot
.ctl
.aa
.afRegions
[3];
383 ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
384 m_numOfEntries
,m_entryInsertionIndex
,m_entryProcessingIndex
, m_entryFrameOutputIndex
, newEntry
->internal_shot
.shot
.ctl
.request
.frameCount
);
387 void RequestManager::DeregisterRequest(camera_metadata_t
** deregistered_request
)
389 ALOGV("DEBUG(%s):", __FUNCTION__
);
391 request_manager_entry
* currentEntry
;
393 Mutex::Autolock
lock(m_requestMutex
);
394 Mutex::Autolock
lock2(m_numOfEntriesLock
);
396 frame_index
= GetCompletedIndex();
397 currentEntry
= &(entries
[frame_index
]);
398 if (currentEntry
->status
!= COMPLETED
) {
399 CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
,
400 m_entryProcessingIndex
, frame_index
,(int)(currentEntry
->status
));
403 if (deregistered_request
) *deregistered_request
= currentEntry
->original_request
;
405 m_lastCompletedFrameCnt
= currentEntry
->internal_shot
.shot
.ctl
.request
.frameCount
;
407 currentEntry
->status
= EMPTY
;
408 currentEntry
->original_request
= NULL
;
409 memset(&(currentEntry
->internal_shot
), 0, sizeof(struct camera2_shot_ext
));
410 currentEntry
->internal_shot
.shot
.ctl
.request
.frameCount
= -1;
411 currentEntry
->output_stream_count
= 0;
413 ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
414 m_numOfEntries
,m_entryInsertionIndex
,m_entryProcessingIndex
, m_entryFrameOutputIndex
);
416 CheckCompleted(GetNextIndex(frame_index
));
420 bool RequestManager::PrepareFrame(size_t* num_entries
, size_t* frame_size
,
421 camera_metadata_t
** prepared_frame
, int afState
)
423 ALOGV("DEBUG(%s):", __FUNCTION__
);
424 Mutex::Autolock
lock(m_requestMutex
);
425 status_t res
= NO_ERROR
;
426 int tempFrameOutputIndex
= GetCompletedIndex();
427 request_manager_entry
* currentEntry
= &(entries
[tempFrameOutputIndex
]);
428 ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__
,
429 m_entryProcessingIndex
, m_entryFrameOutputIndex
, m_entryInsertionIndex
, m_completedIndex
);
431 if (currentEntry
->status
!= COMPLETED
) {
432 ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__
, (int)(currentEntry
->status
));
436 m_entryFrameOutputIndex
= tempFrameOutputIndex
;
437 m_tempFrameMetadata
= place_camera_metadata(m_tempFrameMetadataBuf
, 2000, 35, 500); //estimated
438 add_camera_metadata_entry(m_tempFrameMetadata
, ANDROID_CONTROL_AF_STATE
, &afState
, 1);
439 res
= m_metadataConverter
->ToDynamicMetadata(&(currentEntry
->internal_shot
),
440 m_tempFrameMetadata
);
442 ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__
, res
);
445 *num_entries
= get_camera_metadata_entry_count(m_tempFrameMetadata
);
446 *frame_size
= get_camera_metadata_size(m_tempFrameMetadata
);
447 *prepared_frame
= m_tempFrameMetadata
;
448 ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex
,
449 currentEntry
->internal_shot
.shot
.ctl
.request
.frameCount
, currentEntry
->internal_shot
.shot
.dm
.sensor
.timeStamp
);
454 int RequestManager::MarkProcessingRequest(ExynosBuffer
* buf
)
456 struct camera2_shot_ext
* shot_ext
;
457 struct camera2_shot_ext
* request_shot
;
458 int targetStreamIndex
= 0;
459 request_manager_entry
* newEntry
= NULL
;
460 static int count
= 0;
462 Mutex::Autolock
lock(m_requestMutex
);
463 Mutex::Autolock
lock2(m_numOfEntriesLock
);
464 if (m_numOfEntries
== 0) {
465 CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__
);
469 if ((m_entryProcessingIndex
== m_entryInsertionIndex
)
470 && (entries
[m_entryProcessingIndex
].status
== REQUESTED
|| entries
[m_entryProcessingIndex
].status
== CAPTURED
)) {
471 ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)",
472 m_numOfEntries
,m_entryInsertionIndex
,m_entryProcessingIndex
, m_entryFrameOutputIndex
);
476 int newProcessingIndex
= GetNextIndex(m_entryProcessingIndex
);
477 ALOGV("DEBUG(%s): index(%d)", __FUNCTION__
, newProcessingIndex
);
479 newEntry
= &(entries
[newProcessingIndex
]);
480 request_shot
= &(newEntry
->internal_shot
);
481 if (newEntry
->status
!= REGISTERED
) {
482 CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__
, newProcessingIndex
, m_numOfEntries
, newEntry
->status
);
483 for (int i
= 0; i
< NUM_MAX_REQUEST_MGR_ENTRY
; i
++) {
484 CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i
, entries
[i
].output_stream_count
, entries
[i
].internal_shot
.shot
.ctl
.request
.frameCount
);
489 newEntry
->status
= REQUESTED
;
491 shot_ext
= (struct camera2_shot_ext
*)buf
->virt
.extP
[1];
493 memset(shot_ext
, 0x00, sizeof(struct camera2_shot_ext
));
494 shot_ext
->shot
.ctl
.request
.frameCount
= request_shot
->shot
.ctl
.request
.frameCount
;
495 shot_ext
->request_sensor
= 1;
496 shot_ext
->dis_bypass
= 1;
497 shot_ext
->dnr_bypass
= 1;
498 shot_ext
->fd_bypass
= 1;
499 shot_ext
->setfile
= 0;
501 targetStreamIndex
= newEntry
->internal_shot
.shot
.ctl
.request
.outputStreams
[0];
502 shot_ext
->shot
.ctl
.request
.outputStreams
[0] = targetStreamIndex
;
503 if (targetStreamIndex
& MASK_OUTPUT_SCP
)
504 shot_ext
->request_scp
= 1;
506 if (targetStreamIndex
& MASK_OUTPUT_SCC
)
507 shot_ext
->request_scc
= 1;
509 if (shot_ext
->shot
.ctl
.stats
.faceDetectMode
!= FACEDETECT_MODE_OFF
)
510 shot_ext
->fd_bypass
= 0;
513 shot_ext
->shot
.ctl
.aa
.mode
= AA_CONTROL_AUTO
;
515 shot_ext
->shot
.ctl
.aa
.mode
= AA_CONTROL_NONE
;
518 shot_ext
->shot
.ctl
.request
.metadataMode
= METADATA_MODE_FULL
;
519 shot_ext
->shot
.ctl
.stats
.faceDetectMode
= FACEDETECT_MODE_FULL
;
520 shot_ext
->shot
.magicNumber
= 0x23456789;
521 shot_ext
->shot
.ctl
.sensor
.exposureTime
= 0;
522 shot_ext
->shot
.ctl
.sensor
.frameDuration
= 33*1000*1000;
523 shot_ext
->shot
.ctl
.sensor
.sensitivity
= 0;
526 shot_ext
->shot
.ctl
.scaler
.cropRegion
[0] = newEntry
->internal_shot
.shot
.ctl
.scaler
.cropRegion
[0];
527 shot_ext
->shot
.ctl
.scaler
.cropRegion
[1] = newEntry
->internal_shot
.shot
.ctl
.scaler
.cropRegion
[1];
528 shot_ext
->shot
.ctl
.scaler
.cropRegion
[2] = newEntry
->internal_shot
.shot
.ctl
.scaler
.cropRegion
[2];
530 m_entryProcessingIndex
= newProcessingIndex
;
531 return newProcessingIndex
;
534 void RequestManager::NotifyStreamOutput(int frameCnt
)
538 Mutex::Autolock
lock(m_requestMutex
);
539 ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__
, frameCnt
);
541 index
= FindEntryIndexByFrameCnt(frameCnt
);
543 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__
, frameCnt
);
546 ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__
, frameCnt
, entries
[index
].output_stream_count
);
548 entries
[index
].output_stream_count
--; //TODO : match stream id also
549 CheckCompleted(index
);
552 void RequestManager::CheckCompleted(int index
)
554 if ((entries
[index
].status
== METADONE
|| entries
[index
].status
== COMPLETED
)
555 && (entries
[index
].output_stream_count
<= 0)){
556 ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__
,
557 index
, entries
[index
].internal_shot
.shot
.ctl
.request
.frameCount
);
558 entries
[index
].status
= COMPLETED
;
559 if (m_lastCompletedFrameCnt
+ 1 == (int)entries
[index
].internal_shot
.shot
.ctl
.request
.frameCount
)
560 m_mainThread
->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE
);
564 int RequestManager::GetCompletedIndex()
566 return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt
+ 1);
569 void RequestManager::pushSensorQ(int index
)
571 Mutex::Autolock
lock(m_requestMutex
);
572 m_sensorQ
.push_back(index
);
575 int RequestManager::popSensorQ()
577 List
<int>::iterator sensor_token
;
580 Mutex::Autolock
lock(m_requestMutex
);
582 if(m_sensorQ
.size() == 0)
585 sensor_token
= m_sensorQ
.begin()++;
586 index
= *sensor_token
;
587 m_sensorQ
.erase(sensor_token
);
592 void RequestManager::releaseSensorQ()
594 List
<int>::iterator r
;
596 Mutex::Autolock
lock(m_requestMutex
);
597 ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__
, m_sensorQ
.size());
599 while(m_sensorQ
.size() > 0){
600 r
= m_sensorQ
.begin()++;
606 void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext
*shot_ext
)
609 struct camera2_shot_ext
* request_shot
;
613 Mutex::Autolock
lock(m_requestMutex
);
614 ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.request
.frameCount
);
616 for (i
= 0 ; i
< NUM_MAX_REQUEST_MGR_ENTRY
; i
++) {
617 if((entries
[i
].internal_shot
.shot
.ctl
.request
.frameCount
== shot_ext
->shot
.ctl
.request
.frameCount
)
618 && (entries
[i
].status
== CAPTURED
)){
619 entries
[i
].status
= METADONE
;
624 if (i
== NUM_MAX_REQUEST_MGR_ENTRY
){
625 ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.request
.frameCount
);
629 request_manager_entry
* newEntry
= &(entries
[i
]);
630 request_shot
= &(newEntry
->internal_shot
);
632 timeStamp
= request_shot
->shot
.dm
.sensor
.timeStamp
;
633 memcpy(&(request_shot
->shot
.dm
), &(shot_ext
->shot
.dm
), sizeof(struct camera2_dm
));
634 request_shot
->shot
.dm
.sensor
.timeStamp
= timeStamp
;
635 m_lastTimeStamp
= timeStamp
;
639 void RequestManager::UpdateIspParameters(struct camera2_shot_ext
*shot_ext
, int frameCnt
, ctl_request_info_t
*ctl_info
)
641 int index
, targetStreamIndex
;
642 struct camera2_shot_ext
* request_shot
;
644 ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__
, frameCnt
);
648 index
= FindEntryIndexByFrameCnt(frameCnt
);
650 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__
, frameCnt
);
654 request_manager_entry
* newEntry
= &(entries
[index
]);
655 request_shot
= &(newEntry
->internal_shot
);
656 memcpy(&(shot_ext
->shot
.ctl
), &(request_shot
->shot
.ctl
), sizeof(struct camera2_ctl
));
657 shot_ext
->shot
.ctl
.request
.frameCount
= frameCnt
;
658 shot_ext
->request_sensor
= 1;
659 shot_ext
->dis_bypass
= 1;
660 shot_ext
->dnr_bypass
= 1;
661 shot_ext
->fd_bypass
= 1;
662 shot_ext
->drc_bypass
= 1;
663 shot_ext
->setfile
= 0;
665 shot_ext
->request_scc
= 0;
666 shot_ext
->request_scp
= 0;
668 shot_ext
->isReprocessing
= request_shot
->isReprocessing
;
669 shot_ext
->reprocessInput
= request_shot
->reprocessInput
;
670 shot_ext
->shot
.ctl
.request
.outputStreams
[0] = 0;
672 shot_ext
->awb_mode_dm
= request_shot
->awb_mode_dm
;
674 shot_ext
->shot
.ctl
.scaler
.cropRegion
[0] = request_shot
->shot
.ctl
.scaler
.cropRegion
[0];
675 shot_ext
->shot
.ctl
.scaler
.cropRegion
[1] = request_shot
->shot
.ctl
.scaler
.cropRegion
[1];
676 shot_ext
->shot
.ctl
.scaler
.cropRegion
[2] = request_shot
->shot
.ctl
.scaler
.cropRegion
[2];
678 // mapping flash UI mode from aeMode
679 if (request_shot
->shot
.ctl
.aa
.aeMode
>= AA_AEMODE_ON
) {
680 if (request_shot
->shot
.ctl
.aa
.captureIntent
== AA_CAPTURE_INTENT_PREVIEW
)
681 ctl_info
->flash
.i_flashMode
= request_shot
->shot
.ctl
.aa
.aeMode
;
682 else if (request_shot
->shot
.ctl
.aa
.captureIntent
== AA_CAPTURE_INTENT_VIDEO_RECORD
)
683 ctl_info
->flash
.i_flashMode
= request_shot
->shot
.ctl
.aa
.aeMode
;
684 request_shot
->shot
.ctl
.aa
.aeMode
= AA_AEMODE_ON
;
687 // Apply ae/awb lock or unlock
688 if (request_shot
->ae_lock
== AEMODE_LOCK_ON
)
689 request_shot
->shot
.ctl
.aa
.aeMode
= AA_AEMODE_LOCKED
;
690 if (request_shot
->awb_lock
== AWBMODE_LOCK_ON
)
691 request_shot
->shot
.ctl
.aa
.awbMode
= AA_AWBMODE_LOCKED
;
693 if (m_lastAaMode
== request_shot
->shot
.ctl
.aa
.mode
) {
694 shot_ext
->shot
.ctl
.aa
.mode
= (enum aa_mode
)(0);
697 shot_ext
->shot
.ctl
.aa
.mode
= request_shot
->shot
.ctl
.aa
.mode
;
698 m_lastAaMode
= (int)(shot_ext
->shot
.ctl
.aa
.mode
);
700 if (m_lastAeMode
== request_shot
->shot
.ctl
.aa
.aeMode
) {
701 shot_ext
->shot
.ctl
.aa
.aeMode
= (enum aa_aemode
)(0);
704 shot_ext
->shot
.ctl
.aa
.aeMode
= request_shot
->shot
.ctl
.aa
.aeMode
;
705 m_lastAeMode
= (int)(shot_ext
->shot
.ctl
.aa
.aeMode
);
707 if (m_lastAwbMode
== request_shot
->shot
.ctl
.aa
.awbMode
) {
708 shot_ext
->shot
.ctl
.aa
.awbMode
= (enum aa_awbmode
)(0);
711 shot_ext
->shot
.ctl
.aa
.awbMode
= request_shot
->shot
.ctl
.aa
.awbMode
;
712 m_lastAwbMode
= (int)(shot_ext
->shot
.ctl
.aa
.awbMode
);
714 if (m_lastAeComp
== request_shot
->shot
.ctl
.aa
.aeExpCompensation
) {
715 shot_ext
->shot
.ctl
.aa
.aeExpCompensation
= 0;
718 shot_ext
->shot
.ctl
.aa
.aeExpCompensation
= request_shot
->shot
.ctl
.aa
.aeExpCompensation
;
719 m_lastAeComp
= (int)(shot_ext
->shot
.ctl
.aa
.aeExpCompensation
);
722 if (request_shot
->shot
.ctl
.aa
.videoStabilizationMode
&& m_vdisEnable
) {
723 m_vdisBubbleEn
= true;
724 shot_ext
->dis_bypass
= 0;
725 shot_ext
->dnr_bypass
= 0;
727 m_vdisBubbleEn
= false;
728 shot_ext
->dis_bypass
= 1;
729 shot_ext
->dnr_bypass
= 1;
732 shot_ext
->shot
.ctl
.aa
.afTrigger
= 0;
734 targetStreamIndex
= newEntry
->internal_shot
.shot
.ctl
.request
.outputStreams
[0];
735 shot_ext
->shot
.ctl
.request
.outputStreams
[0] = targetStreamIndex
;
736 if (targetStreamIndex
& MASK_OUTPUT_SCP
)
737 shot_ext
->request_scp
= 1;
739 if (targetStreamIndex
& MASK_OUTPUT_SCC
)
740 shot_ext
->request_scc
= 1;
742 if (shot_ext
->shot
.ctl
.stats
.faceDetectMode
!= FACEDETECT_MODE_OFF
)
743 shot_ext
->fd_bypass
= 0;
745 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[0] = request_shot
->shot
.ctl
.aa
.aeTargetFpsRange
[0];
746 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[1] = request_shot
->shot
.ctl
.aa
.aeTargetFpsRange
[1];
748 ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__
,
749 (int)(shot_ext
->shot
.ctl
.aa
.mode
), (int)(shot_ext
->shot
.ctl
.aa
.aeMode
),
750 (int)(shot_ext
->shot
.ctl
.aa
.aeExpCompensation
), (int)(shot_ext
->shot
.ctl
.aa
.awbMode
),
751 (int)(shot_ext
->shot
.ctl
.aa
.afMode
));
754 bool RequestManager::IsVdisEnable(void)
756 return m_vdisBubbleEn
;
759 int RequestManager::FindEntryIndexByFrameCnt(int frameCnt
)
761 for (int i
= 0 ; i
< NUM_MAX_REQUEST_MGR_ENTRY
; i
++) {
762 if ((int)entries
[i
].internal_shot
.shot
.ctl
.request
.frameCount
== frameCnt
)
768 void RequestManager::RegisterTimestamp(int frameCnt
, nsecs_t
* frameTime
)
770 int index
= FindEntryIndexByFrameCnt(frameCnt
);
772 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__
, frameCnt
);
776 request_manager_entry
* currentEntry
= &(entries
[index
]);
777 if (currentEntry
->internal_shot
.isReprocessing
== 1) {
778 ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__
,
779 index
, frameCnt
, currentEntry
->internal_shot
.shot
.dm
.sensor
.timeStamp
);
781 currentEntry
->internal_shot
.shot
.dm
.sensor
.timeStamp
= *((uint64_t*)frameTime
);
782 ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__
,
783 index
, frameCnt
, currentEntry
->internal_shot
.shot
.dm
.sensor
.timeStamp
);
788 nsecs_t
RequestManager::GetTimestampByFrameCnt(int frameCnt
)
790 int index
= FindEntryIndexByFrameCnt(frameCnt
);
792 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__
, frameCnt
, m_lastTimeStamp
);
793 return m_lastTimeStamp
;
796 return GetTimestamp(index
);
799 nsecs_t
RequestManager::GetTimestamp(int index
)
801 Mutex::Autolock
lock(m_requestMutex
);
802 if (index
< 0 || index
>= NUM_MAX_REQUEST_MGR_ENTRY
) {
803 ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__
, index
);
807 request_manager_entry
* currentEntry
= &(entries
[index
]);
808 nsecs_t frameTime
= currentEntry
->internal_shot
.shot
.dm
.sensor
.timeStamp
;
809 if (frameTime
== 0) {
810 ALOGV("DEBUG(%s): timestamp null, returning saved value", __FUNCTION__
);
811 frameTime
= m_lastTimeStamp
;
813 ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__
, index
, frameTime
);
817 uint8_t RequestManager::GetOutputStreamByFrameCnt(int frameCnt
)
819 int index
= FindEntryIndexByFrameCnt(frameCnt
);
821 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__
, frameCnt
);
825 return GetOutputStream(index
);
828 uint8_t RequestManager::GetOutputStream(int index
)
830 Mutex::Autolock
lock(m_requestMutex
);
831 if (index
< 0 || index
>= NUM_MAX_REQUEST_MGR_ENTRY
) {
832 ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__
, index
);
836 request_manager_entry
* currentEntry
= &(entries
[index
]);
837 return currentEntry
->internal_shot
.shot
.ctl
.request
.outputStreams
[0];
840 camera2_shot_ext
* RequestManager::GetInternalShotExtByFrameCnt(int frameCnt
)
842 int index
= FindEntryIndexByFrameCnt(frameCnt
);
844 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__
, frameCnt
);
848 return GetInternalShotExt(index
);
851 camera2_shot_ext
* RequestManager::GetInternalShotExt(int index
)
853 Mutex::Autolock
lock(m_requestMutex
);
854 if (index
< 0 || index
>= NUM_MAX_REQUEST_MGR_ENTRY
) {
855 ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__
, index
);
859 request_manager_entry
* currentEntry
= &(entries
[index
]);
860 return ¤tEntry
->internal_shot
;
863 int RequestManager::FindFrameCnt(struct camera2_shot_ext
* shot_ext
)
865 Mutex::Autolock
lock(m_requestMutex
);
868 if (m_numOfEntries
== 0) {
869 CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__
);
873 for (i
= 0 ; i
< NUM_MAX_REQUEST_MGR_ENTRY
; i
++) {
874 if(entries
[i
].internal_shot
.shot
.ctl
.request
.frameCount
!= shot_ext
->shot
.ctl
.request
.frameCount
)
877 if (entries
[i
].status
== REQUESTED
) {
878 entries
[i
].status
= CAPTURED
;
879 return entries
[i
].internal_shot
.shot
.ctl
.request
.frameCount
;
881 CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.request
.frameCount
, i
, entries
[i
].status
);
884 CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.request
.frameCount
);
889 void RequestManager::SetInitialSkip(int count
)
891 ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__
, count
, m_sensorPipelineSkipCnt
);
892 if (count
> m_sensorPipelineSkipCnt
)
893 m_sensorPipelineSkipCnt
= count
;
896 int RequestManager::GetSkipCnt()
898 ALOGV("(%s): skip cnt(%d)", __FUNCTION__
, m_sensorPipelineSkipCnt
);
899 if (m_sensorPipelineSkipCnt
== 0)
900 return m_sensorPipelineSkipCnt
;
902 return --m_sensorPipelineSkipCnt
;
905 void RequestManager::Dump(void)
908 request_manager_entry
* currentEntry
;
909 Mutex::Autolock
lock(m_numOfEntriesLock
);
910 ALOGD("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)",
911 m_numOfEntries
,m_entryInsertionIndex
,m_entryProcessingIndex
, m_entryFrameOutputIndex
);
913 for (i
= 0 ; i
< NUM_MAX_REQUEST_MGR_ENTRY
; i
++) {
914 currentEntry
= &(entries
[i
]);
915 ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i
,
916 currentEntry
->status
, currentEntry
->internal_shot
.shot
.ctl
.request
.frameCount
,
917 currentEntry
->output_stream_count
,
918 currentEntry
->internal_shot
.shot
.ctl
.request
.outputStreams
[0]);
922 int RequestManager::GetNextIndex(int index
)
925 if (index
>= NUM_MAX_REQUEST_MGR_ENTRY
)
931 int RequestManager::GetPrevIndex(int index
)
935 index
= NUM_MAX_REQUEST_MGR_ENTRY
-1;
940 ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId
, camera2_device_t
*dev
, ExynosCamera2
* camera
, int *openInvalid
):
941 m_requestQueueOps(NULL
),
942 m_frameQueueOps(NULL
),
943 m_callbackCookie(NULL
),
944 m_numOfRemainingReqInSvc(0),
945 m_isRequestQueuePending(false),
946 m_isRequestQueueNull(true),
948 m_ionCameraClient(0),
949 m_isIspStarted(false),
950 m_sccLocalBufferValid(false),
951 m_cameraId(cameraId
),
952 m_scp_closing(false),
958 m_jpegEncodingCount(0),
959 m_scpForceSuspended(false),
960 m_afState(HAL_AFSTATE_INACTIVE
),
963 m_afMode2(NO_CHANGE
),
964 m_IsAfModeUpdateRequired(false),
965 m_IsAfTriggerRequired(false),
966 m_IsAfLockRequired(false),
967 m_serviceAfState(ANDROID_CONTROL_AF_STATE_INACTIVE
),
968 m_afPendingTriggerId(0),
969 m_afModeWaitingCnt(0),
970 m_scpOutputSignalCnt(0),
971 m_scpOutputImageCnt(0),
972 m_nightCaptureCnt(0),
973 m_nightCaptureFrameCnt(0),
978 ALOGD("(%s): ENTER", __FUNCTION__
);
982 m_exynosPictureCSC
= NULL
;
983 m_exynosVideoCSC
= NULL
;
986 ret
= hw_get_module(GRALLOC_HARDWARE_MODULE_ID
, (const hw_module_t
**)&m_grallocHal
);
988 ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__
);
992 m_ionCameraClient
= createIonClient(m_ionCameraClient
);
993 if(m_ionCameraClient
== 0)
994 ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__
);
997 m_BayerManager
= new BayerBufManager();
998 m_mainThread
= new MainThread(this);
999 m_requestManager
= new RequestManager((SignalDrivenThread
*)(m_mainThread
.get()));
1000 *openInvalid
= InitializeISPChain();
1001 if (*openInvalid
< 0) {
1002 ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__
);
1004 // 1. close video nodes
1006 res
= exynos_v4l2_close(m_camera_info
.scp
.fd
);
1007 if (res
!= NO_ERROR
) {
1008 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__
, res
);
1011 res
= exynos_v4l2_close(m_camera_info
.capture
.fd
);
1012 if (res
!= NO_ERROR
) {
1013 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__
, res
);
1016 res
= exynos_v4l2_close(m_camera_info
.sensor
.fd
);
1017 if (res
!= NO_ERROR
) {
1018 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__
, res
);
1021 res
= exynos_v4l2_close(m_camera_info
.isp
.fd
);
1022 if (res
!= NO_ERROR
) {
1023 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__
, res
);
1026 m_sensorThread
= new SensorThread(this);
1027 m_mainThread
->Start("MainThread", PRIORITY_DEFAULT
, 0);
1028 m_sensorThread
->Start("SensorThread", PRIORITY_DEFAULT
, 0);
1029 ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__
);
1031 for (int i
= 0 ; i
< STREAM_ID_LAST
+1 ; i
++)
1032 m_subStreams
[i
].type
= SUBSTREAM_TYPE_NONE
;
1033 CSC_METHOD cscMethod
= CSC_METHOD_HW
;
1034 m_exynosPictureCSC
= csc_init(cscMethod
);
1035 if (m_exynosPictureCSC
== NULL
)
1036 ALOGE("ERR(%s): csc_init() fail", __FUNCTION__
);
1037 csc_set_hw_property(m_exynosPictureCSC
, CSC_HW_PROPERTY_FIXED_NODE
, PICTURE_GSC_NODE_NUM
);
1038 csc_set_hw_property(m_exynosPictureCSC
, CSC_HW_PROPERTY_HW_TYPE
, CSC_HW_TYPE_GSCALER
);
1040 m_exynosVideoCSC
= csc_init(cscMethod
);
1041 if (m_exynosVideoCSC
== NULL
)
1042 ALOGE("ERR(%s): csc_init() fail", __FUNCTION__
);
1043 csc_set_hw_property(m_exynosVideoCSC
, CSC_HW_PROPERTY_FIXED_NODE
, VIDEO_GSC_NODE_NUM
);
1044 csc_set_hw_property(m_exynosVideoCSC
, CSC_HW_PROPERTY_HW_TYPE
, CSC_HW_TYPE_GSCALER
);
1046 m_setExifFixedAttribute();
1048 // contol information clear
1050 m_ctlInfo
.flash
.i_flashMode
= AA_AEMODE_ON
;
1051 m_ctlInfo
.flash
.m_afFlashDoneFlg
= false;
1052 m_ctlInfo
.flash
.m_flashEnableFlg
= false;
1053 m_ctlInfo
.flash
.m_flashFrameCount
= 0;
1054 m_ctlInfo
.flash
.m_flashCnt
= 0;
1055 m_ctlInfo
.flash
.m_flashTimeOut
= 0;
1056 m_ctlInfo
.flash
.m_flashDecisionResult
= false;
1057 m_ctlInfo
.flash
.m_flashTorchMode
= false;
1058 m_ctlInfo
.flash
.m_precaptureState
= 0;
1059 m_ctlInfo
.flash
.m_precaptureTriggerId
= 0;
1061 m_ctlInfo
.ae
.aeStateNoti
= AE_STATE_INACTIVE
;
1063 m_ctlInfo
.af
.m_afTriggerTimeOut
= 0;
1065 m_ctlInfo
.scene
.prevSceneMode
= AA_SCENE_MODE_MAX
;
1067 ALOGD("(%s): EXIT", __FUNCTION__
);
1070 ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
1072 ALOGD("(%s): ENTER", __FUNCTION__
);
1074 ALOGD("(%s): EXIT", __FUNCTION__
);
1077 void ExynosCameraHWInterface2::release()
1080 ALOGD("(HAL2::release): ENTER");
1082 if (m_streamThreads
[1] != NULL
) {
1083 m_streamThreads
[1]->release();
1084 m_streamThreads
[1]->SetSignal(SIGNAL_THREAD_TERMINATE
);
1087 if (m_streamThreads
[0] != NULL
) {
1088 m_streamThreads
[0]->release();
1089 m_streamThreads
[0]->SetSignal(SIGNAL_THREAD_TERMINATE
);
1092 if (m_sensorThread
!= NULL
) {
1093 m_sensorThread
->release();
1096 if (m_mainThread
!= NULL
) {
1097 m_mainThread
->release();
1100 if (m_exynosPictureCSC
)
1101 csc_deinit(m_exynosPictureCSC
);
1102 m_exynosPictureCSC
= NULL
;
1104 if (m_exynosVideoCSC
)
1105 csc_deinit(m_exynosVideoCSC
);
1106 m_exynosVideoCSC
= NULL
;
1108 if (m_streamThreads
[1] != NULL
) {
1109 ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
1110 while (!m_streamThreads
[1]->IsTerminated())
1111 usleep(SIG_WAITING_TICK
);
1112 ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 1 termination");
1113 m_streamThreads
[1] = NULL
;
1116 if (m_streamThreads
[0] != NULL
) {
1117 ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
1118 while (!m_streamThreads
[0]->IsTerminated())
1119 usleep(SIG_WAITING_TICK
);
1120 ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 0 termination");
1121 m_streamThreads
[0] = NULL
;
1124 if (m_sensorThread
!= NULL
) {
1125 ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
1126 while (!m_sensorThread
->IsTerminated())
1127 usleep(SIG_WAITING_TICK
);
1128 ALOGD("(HAL2::release): END Waiting for (indirect) sensor thread termination");
1129 m_sensorThread
= NULL
;
1132 if (m_mainThread
!= NULL
) {
1133 ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
1134 while (!m_mainThread
->IsTerminated())
1135 usleep(SIG_WAITING_TICK
);
1136 ALOGD("(HAL2::release): END Waiting for (indirect) main thread termination");
1137 m_mainThread
= NULL
;
1140 if (m_requestManager
!= NULL
) {
1141 delete m_requestManager
;
1142 m_requestManager
= NULL
;
1145 if (m_BayerManager
!= NULL
) {
1146 delete m_BayerManager
;
1147 m_BayerManager
= NULL
;
1149 for (i
= 0; i
< NUM_BAYER_BUFFERS
; i
++)
1150 freeCameraMemory(&m_camera_info
.sensor
.buffer
[i
], m_camera_info
.sensor
.planes
);
1152 if (m_sccLocalBufferValid
) {
1153 for (i
= 0; i
< NUM_SCC_BUFFERS
; i
++)
1154 #ifdef ENABLE_FRAME_SYNC
1155 freeCameraMemory(&m_sccLocalBuffer
[i
], 2);
1157 freeCameraMemory(&m_sccLocalBuffer
[i
], 1);
1161 for (i
= 0; i
< NUM_SCC_BUFFERS
; i
++)
1162 freeCameraMemory(&m_camera_info
.capture
.buffer
[i
], m_camera_info
.capture
.planes
);
1165 ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__
);
1166 res
= exynos_v4l2_close(m_camera_info
.sensor
.fd
);
1167 if (res
!= NO_ERROR
) {
1168 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__
, res
);
1171 ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__
);
1172 res
= exynos_v4l2_close(m_camera_info
.isp
.fd
);
1173 if (res
!= NO_ERROR
) {
1174 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__
, res
);
1177 ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__
);
1178 res
= exynos_v4l2_close(m_camera_info
.capture
.fd
);
1179 if (res
!= NO_ERROR
) {
1180 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__
, res
);
1183 ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__
);
1184 res
= exynos_v4l2_close(m_camera_info
.scp
.fd
);
1185 if (res
!= NO_ERROR
) {
1186 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__
, res
);
1188 ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__
);
1189 deleteIonClient(m_ionCameraClient
);
1191 ALOGD("(HAL2::release): EXIT");
1194 int ExynosCameraHWInterface2::InitializeISPChain()
1202 memset(&node_name
, 0x00, sizeof(char[30]));
1203 sprintf(node_name
, "%s%d", NODE_PREFIX
, 40);
1204 fd
= exynos_v4l2_open(node_name
, O_RDWR
, 0);
1207 ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__
,node_name
, fd
);
1210 ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__
,node_name
, fd
);
1212 m_camera_info
.sensor
.fd
= fd
;
1215 memset(&node_name
, 0x00, sizeof(char[30]));
1216 sprintf(node_name
, "%s%d", NODE_PREFIX
, 41);
1217 fd
= exynos_v4l2_open(node_name
, O_RDWR
, 0);
1220 ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__
,node_name
, fd
);
1223 ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__
,node_name
, fd
);
1225 m_camera_info
.isp
.fd
= fd
;
1228 memset(&node_name
, 0x00, sizeof(char[30]));
1229 sprintf(node_name
, "%s%d", NODE_PREFIX
, 42);
1230 fd
= exynos_v4l2_open(node_name
, O_RDWR
, 0);
1233 ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__
,node_name
, fd
);
1236 ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__
,node_name
, fd
);
1238 m_camera_info
.capture
.fd
= fd
;
1241 memset(&node_name
, 0x00, sizeof(char[30]));
1242 sprintf(node_name
, "%s%d", NODE_PREFIX
, 44);
1243 fd
= exynos_v4l2_open(node_name
, O_RDWR
, 0);
1245 ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__
,node_name
, fd
);
1248 ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__
,node_name
, fd
);
1250 m_camera_info
.scp
.fd
= fd
;
1253 m_camera_info
.sensor_id
= SENSOR_NAME_S5K4E5
;
1255 m_camera_info
.sensor_id
= SENSOR_NAME_S5K6A3
;
1257 memset(&m_camera_info
.dummy_shot
, 0x00, sizeof(struct camera2_shot_ext
));
1258 m_camera_info
.dummy_shot
.shot
.ctl
.request
.metadataMode
= METADATA_MODE_FULL
;
1259 m_camera_info
.dummy_shot
.shot
.magicNumber
= 0x23456789;
1261 m_camera_info
.dummy_shot
.dis_bypass
= 1;
1262 m_camera_info
.dummy_shot
.dnr_bypass
= 1;
1263 m_camera_info
.dummy_shot
.fd_bypass
= 1;
1266 m_camera_info
.dummy_shot
.shot
.ctl
.sensor
.exposureTime
= 0;
1267 m_camera_info
.dummy_shot
.shot
.ctl
.sensor
.frameDuration
= 0;
1268 m_camera_info
.dummy_shot
.shot
.ctl
.sensor
.sensitivity
= 0;
1270 m_camera_info
.dummy_shot
.shot
.ctl
.scaler
.cropRegion
[0] = 0;
1271 m_camera_info
.dummy_shot
.shot
.ctl
.scaler
.cropRegion
[1] = 0;
1274 m_camera_info
.dummy_shot
.request_sensor
= 1;
1275 m_camera_info
.dummy_shot
.request_scc
= 0;
1276 m_camera_info
.dummy_shot
.request_scp
= 0;
1277 m_camera_info
.dummy_shot
.shot
.ctl
.request
.outputStreams
[0] = 0;
1279 m_camera_info
.sensor
.width
= m_camera2
->getSensorRawW();
1280 m_camera_info
.sensor
.height
= m_camera2
->getSensorRawH();
1282 m_camera_info
.sensor
.format
= V4L2_PIX_FMT_SBGGR16
;
1283 m_camera_info
.sensor
.planes
= 2;
1284 m_camera_info
.sensor
.buffers
= NUM_BAYER_BUFFERS
;
1285 m_camera_info
.sensor
.type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
1286 m_camera_info
.sensor
.memory
= V4L2_MEMORY_DMABUF
;
1288 for(i
= 0; i
< m_camera_info
.sensor
.buffers
; i
++){
1290 initCameraMemory(&m_camera_info
.sensor
.buffer
[i
], m_camera_info
.sensor
.planes
);
1291 m_camera_info
.sensor
.buffer
[i
].size
.extS
[0] = m_camera_info
.sensor
.width
*m_camera_info
.sensor
.height
*2;
1292 m_camera_info
.sensor
.buffer
[i
].size
.extS
[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1293 res
= allocCameraMemory(m_ionCameraClient
, &m_camera_info
.sensor
.buffer
[i
], m_camera_info
.sensor
.planes
, 1<<1);
1295 ALOGE("ERROR(%s): failed to allocateCameraMemory for sensor buffer %d", __FUNCTION__
, i
);
1296 // Free allocated sensor buffers
1297 for (int j
= 0; j
< i
; j
++) {
1298 freeCameraMemory(&m_camera_info
.sensor
.buffer
[j
], m_camera_info
.sensor
.planes
);
1304 m_camera_info
.isp
.width
= m_camera_info
.sensor
.width
;
1305 m_camera_info
.isp
.height
= m_camera_info
.sensor
.height
;
1306 m_camera_info
.isp
.format
= m_camera_info
.sensor
.format
;
1307 m_camera_info
.isp
.planes
= m_camera_info
.sensor
.planes
;
1308 m_camera_info
.isp
.buffers
= m_camera_info
.sensor
.buffers
;
1309 m_camera_info
.isp
.type
= V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE
;
1310 m_camera_info
.isp
.memory
= V4L2_MEMORY_DMABUF
;
1312 for(i
= 0; i
< m_camera_info
.isp
.buffers
; i
++){
1313 initCameraMemory(&m_camera_info
.isp
.buffer
[i
], m_camera_info
.isp
.planes
);
1314 m_camera_info
.isp
.buffer
[i
].size
.extS
[0] = m_camera_info
.sensor
.buffer
[i
].size
.extS
[0];
1315 m_camera_info
.isp
.buffer
[i
].size
.extS
[1] = m_camera_info
.sensor
.buffer
[i
].size
.extS
[1];
1316 m_camera_info
.isp
.buffer
[i
].fd
.extFd
[0] = m_camera_info
.sensor
.buffer
[i
].fd
.extFd
[0];
1317 m_camera_info
.isp
.buffer
[i
].fd
.extFd
[1] = m_camera_info
.sensor
.buffer
[i
].fd
.extFd
[1];
1318 m_camera_info
.isp
.buffer
[i
].virt
.extP
[0] = m_camera_info
.sensor
.buffer
[i
].virt
.extP
[0];
1319 m_camera_info
.isp
.buffer
[i
].virt
.extP
[1] = m_camera_info
.sensor
.buffer
[i
].virt
.extP
[1];
1323 ret
= cam_int_s_input(&(m_camera_info
.isp
), m_camera_info
.sensor_id
);
1325 ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ", __FUNCTION__
, m_camera_info
.sensor_id
);
1328 cam_int_s_fmt(&(m_camera_info
.isp
));
1329 ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__
);
1330 cam_int_reqbufs(&(m_camera_info
.isp
));
1331 ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__
);
1332 ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__
);
1335 cam_int_s_input(&(m_camera_info
.sensor
), m_camera_info
.sensor_id
);
1336 ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__
);
1337 if (cam_int_s_fmt(&(m_camera_info
.sensor
))< 0) {
1338 ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__
);
1340 ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__
);
1341 cam_int_reqbufs(&(m_camera_info
.sensor
));
1342 ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__
);
1343 for (i
= 0; i
< m_camera_info
.sensor
.buffers
; i
++) {
1344 ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__
, i
);
1345 m_camera_info
.dummy_shot
.shot
.ctl
.sensor
.frameDuration
= 33*1000*1000; // apply from frame #1
1346 m_camera_info
.dummy_shot
.shot
.ctl
.request
.frameCount
= -1;
1347 memcpy( m_camera_info
.sensor
.buffer
[i
].virt
.extP
[1], &(m_camera_info
.dummy_shot
),
1348 sizeof(struct camera2_shot_ext
));
1351 for (i
= 0; i
< NUM_MIN_SENSOR_QBUF
; i
++)
1352 cam_int_qbuf(&(m_camera_info
.sensor
), i
);
1354 for (i
= NUM_MIN_SENSOR_QBUF
; i
< m_camera_info
.sensor
.buffers
; i
++)
1355 m_requestManager
->pushSensorQ(i
);
1357 ALOGV("== stream_on :: sensor");
1358 cam_int_streamon(&(m_camera_info
.sensor
));
1359 m_camera_info
.sensor
.status
= true;
1362 m_camera_info
.capture
.width
= m_camera2
->getSensorW();
1363 m_camera_info
.capture
.height
= m_camera2
->getSensorH();
1364 m_camera_info
.capture
.format
= V4L2_PIX_FMT_YUYV
;
1365 #ifdef ENABLE_FRAME_SYNC
1366 m_camera_info
.capture
.planes
= 2;
1368 m_camera_info
.capture
.planes
= 1;
1370 m_camera_info
.capture
.buffers
= NUM_SCC_BUFFERS
;
1371 m_camera_info
.capture
.type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
1372 m_camera_info
.capture
.memory
= V4L2_MEMORY_DMABUF
;
1374 m_camera_info
.capture
.status
= false;
1379 void ExynosCameraHWInterface2::StartSCCThread(bool threadExists
)
1381 ALOGV("(%s)", __FUNCTION__
);
1382 StreamThread
*AllocatedStream
;
1383 stream_parameters_t newParameters
;
1384 uint32_t format_actual
;
1387 if (!threadExists
) {
1388 m_streamThreads
[1] = new StreamThread(this, 1);
1390 AllocatedStream
= (StreamThread
*)(m_streamThreads
[1].get());
1391 if (!threadExists
) {
1392 AllocatedStream
->Start("StreamThread", PRIORITY_DEFAULT
, 0);
1393 m_streamThreadInitialize((SignalDrivenThread
*)AllocatedStream
);
1394 AllocatedStream
->m_numRegisteredStream
= 1;
1396 AllocatedStream
->m_index
= 1;
1398 format_actual
= HAL_PIXEL_FORMAT_YCbCr_422_I
; // YUYV
1400 newParameters
.width
= m_camera2
->getSensorW();
1401 newParameters
.height
= m_camera2
->getSensorH();
1402 newParameters
.format
= format_actual
;
1403 newParameters
.streamOps
= NULL
;
1404 newParameters
.numHwBuffers
= NUM_SCC_BUFFERS
;
1405 #ifdef ENABLE_FRAME_SYNC
1406 newParameters
.planes
= 2;
1408 newParameters
.planes
= 1;
1411 newParameters
.numSvcBufsInHal
= 0;
1413 newParameters
.node
= &m_camera_info
.capture
;
1415 AllocatedStream
->streamType
= STREAM_TYPE_INDIRECT
;
1416 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__
, AllocatedStream
->m_numRegisteredStream
);
1418 if (!threadExists
) {
1419 if (!m_sccLocalBufferValid
) {
1420 for (int i
= 0; i
< m_camera_info
.capture
.buffers
; i
++){
1421 initCameraMemory(&m_camera_info
.capture
.buffer
[i
], newParameters
.node
->planes
);
1422 m_camera_info
.capture
.buffer
[i
].size
.extS
[0] = m_camera_info
.capture
.width
*m_camera_info
.capture
.height
*2;
1423 #ifdef ENABLE_FRAME_SYNC
1424 m_camera_info
.capture
.buffer
[i
].size
.extS
[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1425 allocCameraMemory(m_ionCameraClient
, &m_camera_info
.capture
.buffer
[i
], m_camera_info
.capture
.planes
, 1<<1);
1427 allocCameraMemory(m_ionCameraClient
, &m_camera_info
.capture
.buffer
[i
], m_camera_info
.capture
.planes
);
1429 m_sccLocalBuffer
[i
] = m_camera_info
.capture
.buffer
[i
];
1431 m_sccLocalBufferValid
= true;
1434 if (m_sccLocalBufferValid
) {
1435 for (int i
= 0; i
< m_camera_info
.capture
.buffers
; i
++)
1436 m_camera_info
.capture
.buffer
[i
] = m_sccLocalBuffer
[i
];
1438 ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__
);
1441 cam_int_s_input(newParameters
.node
, m_camera_info
.sensor_id
);
1442 m_camera_info
.capture
.buffers
= NUM_SCC_BUFFERS
;
1443 cam_int_s_fmt(newParameters
.node
);
1444 ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__
);
1445 cam_int_reqbufs(newParameters
.node
);
1446 ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__
);
1448 for (int i
= 0; i
< newParameters
.node
->buffers
; i
++) {
1449 ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__
, i
);
1450 cam_int_qbuf(newParameters
.node
, i
);
1451 newParameters
.svcBufStatus
[i
] = ON_DRIVER
;
1454 ALOGV("== stream_on :: capture");
1455 if (cam_int_streamon(newParameters
.node
) < 0) {
1456 ALOGE("ERR(%s): capture stream on fail", __FUNCTION__
);
1458 m_camera_info
.capture
.status
= true;
1461 AllocatedStream
->setParameter(&newParameters
);
1462 AllocatedStream
->m_activated
= true;
1463 AllocatedStream
->m_isBufferInit
= true;
1466 void ExynosCameraHWInterface2::StartISP()
1468 ALOGV("== stream_on :: isp");
1469 cam_int_streamon(&(m_camera_info
.isp
));
1470 exynos_v4l2_s_ctrl(m_camera_info
.sensor
.fd
, V4L2_CID_IS_S_STREAM
, IS_ENABLE_STREAM
);
1473 int ExynosCameraHWInterface2::getCameraId() const
1478 int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t
*request_src_ops
)
1480 ALOGV("DEBUG(%s):", __FUNCTION__
);
1481 if ((NULL
!= request_src_ops
) && (NULL
!= request_src_ops
->dequeue_request
)
1482 && (NULL
!= request_src_ops
->free_request
) && (NULL
!= request_src_ops
->request_count
)) {
1483 m_requestQueueOps
= (camera2_request_queue_src_ops_t
*)request_src_ops
;
1487 ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__
);
1492 int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1496 ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__
, m_requestManager
->GetNumEntries());
1497 if ((NULL
==m_frameQueueOps
)|| (NULL
==m_requestQueueOps
)) {
1498 ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__
);
1501 m_isRequestQueueNull
= false;
1502 if (m_requestManager
->GetNumEntries() == 0)
1503 m_requestManager
->SetInitialSkip(0);
1505 if (m_isIspStarted
== false) {
1507 m_camera_info
.sensor
.buffers
= NUM_BAYER_BUFFERS
;
1508 m_camera_info
.isp
.buffers
= m_camera_info
.sensor
.buffers
;
1509 cam_int_s_fmt(&(m_camera_info
.isp
));
1510 cam_int_reqbufs(&(m_camera_info
.isp
));
1513 if (m_camera_info
.sensor
.status
== false) {
1514 cam_int_s_fmt(&(m_camera_info
.sensor
));
1515 cam_int_reqbufs(&(m_camera_info
.sensor
));
1517 for (i
= 0; i
< m_camera_info
.sensor
.buffers
; i
++) {
1518 ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__
, i
);
1519 m_camera_info
.dummy_shot
.shot
.ctl
.sensor
.frameDuration
= 33*1000*1000; // apply from frame #1
1520 m_camera_info
.dummy_shot
.shot
.ctl
.request
.frameCount
= -1;
1521 memcpy( m_camera_info
.sensor
.buffer
[i
].virt
.extP
[1], &(m_camera_info
.dummy_shot
),
1522 sizeof(struct camera2_shot_ext
));
1524 for (i
= 0; i
< NUM_MIN_SENSOR_QBUF
; i
++)
1525 cam_int_qbuf(&(m_camera_info
.sensor
), i
);
1527 for (i
= NUM_MIN_SENSOR_QBUF
; i
< m_camera_info
.sensor
.buffers
; i
++)
1528 m_requestManager
->pushSensorQ(i
);
1529 ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__
);
1530 cam_int_streamon(&(m_camera_info
.sensor
));
1531 m_camera_info
.sensor
.status
= true;
1534 if (!(m_streamThreads
[1].get())) {
1535 ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__
);
1536 StartSCCThread(false);
1538 if (m_streamThreads
[1]->m_activated
== false) {
1539 ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__
);
1540 StartSCCThread(true);
1542 if (m_camera_info
.capture
.status
== false) {
1543 m_camera_info
.capture
.buffers
= NUM_SCC_BUFFERS
;
1544 cam_int_s_fmt(&(m_camera_info
.capture
));
1545 ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__
);
1546 cam_int_reqbufs(&(m_camera_info
.capture
));
1547 ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__
);
1549 if (m_streamThreads
[1]->streamType
== STREAM_TYPE_DIRECT
) {
1550 StreamThread
* targetStream
= m_streamThreads
[1].get();
1551 stream_parameters_t
*targetStreamParms
= &(targetStream
->m_parameters
);
1552 node_info_t
*currentNode
= targetStreamParms
->node
;
1554 struct v4l2_buffer v4l2_buf
;
1555 struct v4l2_plane planes
[VIDEO_MAX_PLANES
];
1557 for (i
= 0 ; i
< targetStreamParms
->numSvcBuffers
; i
++) {
1558 v4l2_buf
.m
.planes
= planes
;
1559 v4l2_buf
.type
= currentNode
->type
;
1560 v4l2_buf
.memory
= currentNode
->memory
;
1562 v4l2_buf
.length
= currentNode
->planes
;
1564 ExynosBuffer metaBuf
= targetStreamParms
->metaBuffers
[i
];
1566 if (i
< currentNode
->buffers
) {
1567 #ifdef ENABLE_FRAME_SYNC
1568 v4l2_buf
.m
.planes
[0].m
.fd
= targetStreamParms
->svcBuffers
[i
].fd
.extFd
[0];
1569 v4l2_buf
.m
.planes
[2].m
.fd
= targetStreamParms
->svcBuffers
[i
].fd
.extFd
[1];
1570 v4l2_buf
.m
.planes
[1].m
.fd
= targetStreamParms
->svcBuffers
[i
].fd
.extFd
[2];
1571 v4l2_buf
.length
+= targetStreamParms
->metaPlanes
;
1572 v4l2_buf
.m
.planes
[v4l2_buf
.length
-1].m
.fd
= metaBuf
.fd
.extFd
[0];
1573 v4l2_buf
.m
.planes
[v4l2_buf
.length
-1].length
= metaBuf
.size
.extS
[0];
1575 ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf
.fd
.extFd
[0], metaBuf
.size
.extS
[0], v4l2_buf
.length
);
1577 if (exynos_v4l2_qbuf(currentNode
->fd
, &v4l2_buf
) < 0) {
1578 ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__
, currentNode
->fd
);
1580 ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__
, currentNode
->fd
);
1581 targetStreamParms
->svcBufStatus
[i
] = REQUIRES_DQ_FROM_SVC
;
1584 targetStreamParms
->svcBufStatus
[i
] = ON_SERVICE
;
1590 for (int i
= 0; i
< m_camera_info
.capture
.buffers
; i
++) {
1591 ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__
, i
);
1592 cam_int_qbuf(&(m_camera_info
.capture
), i
);
1595 ALOGV("== stream_on :: capture");
1596 if (cam_int_streamon(&(m_camera_info
.capture
)) < 0) {
1597 ALOGE("ERR(%s): capture stream on fail", __FUNCTION__
);
1599 m_camera_info
.capture
.status
= true;
1602 if (m_scpForceSuspended
) {
1603 m_scpForceSuspended
= false;
1607 if (m_isIspStarted
== false) {
1609 ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__
);
1610 m_requestManager
->SetInitialSkip(6);
1611 m_sensorThread
->Start("SensorThread", PRIORITY_DEFAULT
, 0);
1612 m_isIspStarted
= true;
1614 m_mainThread
->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY
);
1618 int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t
*frame_dst_ops
)
1620 ALOGV("DEBUG(%s):", __FUNCTION__
);
1621 if ((NULL
!= frame_dst_ops
) && (NULL
!= frame_dst_ops
->dequeue_frame
)
1622 && (NULL
!= frame_dst_ops
->cancel_frame
) && (NULL
!=frame_dst_ops
->enqueue_frame
)) {
1623 m_frameQueueOps
= (camera2_frame_queue_dst_ops_t
*)frame_dst_ops
;
1627 ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__
);
1632 int ExynosCameraHWInterface2::getInProgressCount()
1635 int inProgressCount
;
1638 Mutex::Autolock
lock(m_jpegEncoderLock
);
1639 inProgressJpeg
= m_jpegEncodingCount
;
1640 inProgressCount
= m_requestManager
->GetNumEntries();
1642 ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__
,
1643 inProgressCount
, inProgressJpeg
, (inProgressCount
+ inProgressJpeg
));
1644 return (inProgressCount
+ inProgressJpeg
);
1647 int ExynosCameraHWInterface2::flushCapturesInProgress()
1652 int ExynosCameraHWInterface2::constructDefaultRequest(int request_template
, camera_metadata_t
**request
)
1654 ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__
, request_template
);
1656 if (request
== NULL
) return BAD_VALUE
;
1657 if (request_template
< 0 || request_template
>= CAMERA2_TEMPLATE_COUNT
) {
1661 // Pass 1, calculate size and allocate
1662 res
= m_camera2
->constructDefaultRequest(request_template
,
1668 // Pass 2, build request
1669 res
= m_camera2
->constructDefaultRequest(request_template
,
1673 ALOGE("Unable to populate new request for template %d",
1680 int ExynosCameraHWInterface2::allocateStream(uint32_t width
, uint32_t height
, int format
, const camera2_stream_ops_t
*stream_ops
,
1681 uint32_t *stream_id
, uint32_t *format_actual
, uint32_t *usage
, uint32_t *max_buffers
)
1683 ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__
, width
, height
, format
);
1684 bool useDirectOutput
= false;
1685 StreamThread
*AllocatedStream
;
1686 stream_parameters_t newParameters
;
1687 substream_parameters_t
*subParameters
;
1688 StreamThread
*parentStream
;
1692 if ((format
== HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED
|| format
== CAMERA2_HAL_PIXEL_FORMAT_OPAQUE
) &&
1693 m_camera2
->isSupportedResolution(width
, height
)) {
1694 if (!(m_streamThreads
[0].get())) {
1695 ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__
);
1699 if ((m_streamThreads
[0].get())->m_activated
== true) {
1700 ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__
);
1704 ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__
);
1709 // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
1710 if ((width
== 1920 && height
== 1080) || (width
== 1280 && height
== 720)
1711 || (width
== 720 && height
== 480) || (width
== 1440 && height
== 960)
1712 || (width
== 1344 && height
== 896)) {
1713 m_wideAspect
= true;
1715 m_wideAspect
= false;
1717 ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__
, m_wideAspect
);
1719 if (allocCase
== 0 || allocCase
== 2) {
1720 *stream_id
= STREAM_ID_PREVIEW
;
1722 m_streamThreads
[0] = new StreamThread(this, *stream_id
);
1724 AllocatedStream
= (StreamThread
*)(m_streamThreads
[0].get());
1725 AllocatedStream
->Start("StreamThread", PRIORITY_DEFAULT
, 0);
1726 m_streamThreadInitialize((SignalDrivenThread
*)AllocatedStream
);
1728 *format_actual
= HAL_PIXEL_FORMAT_EXYNOS_YV12
;
1729 *usage
= GRALLOC_USAGE_SW_WRITE_OFTEN
;
1731 *usage
|= GRALLOC_USAGE_PRIVATE_CHROMA
;
1734 newParameters
.width
= width
;
1735 newParameters
.height
= height
;
1736 newParameters
.format
= *format_actual
;
1737 newParameters
.streamOps
= stream_ops
;
1738 newParameters
.usage
= *usage
;
1739 newParameters
.numHwBuffers
= NUM_SCP_BUFFERS
;
1740 newParameters
.numOwnSvcBuffers
= *max_buffers
;
1741 newParameters
.planes
= NUM_PLANES(*format_actual
);
1742 newParameters
.metaPlanes
= 1;
1743 newParameters
.numSvcBufsInHal
= 0;
1744 newParameters
.minUndequedBuffer
= 3;
1745 newParameters
.needsIonMap
= true;
1747 newParameters
.node
= &m_camera_info
.scp
;
1748 newParameters
.node
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
1749 newParameters
.node
->memory
= V4L2_MEMORY_DMABUF
;
1751 AllocatedStream
->streamType
= STREAM_TYPE_DIRECT
;
1752 AllocatedStream
->m_index
= 0;
1753 AllocatedStream
->setParameter(&newParameters
);
1754 AllocatedStream
->m_activated
= true;
1755 AllocatedStream
->m_numRegisteredStream
= 1;
1756 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__
, AllocatedStream
->m_numRegisteredStream
);
1757 m_requestManager
->SetDefaultParameters(m_camera2
->getSensorW());
1758 m_camera_info
.dummy_shot
.shot
.ctl
.scaler
.cropRegion
[2] = m_camera2
->getSensorW();
1759 if (m_subStreams
[STREAM_ID_RECORD
].type
!= SUBSTREAM_TYPE_NONE
)
1760 AllocatedStream
->attachSubStream(STREAM_ID_RECORD
, 10);
1761 if (m_subStreams
[STREAM_ID_PRVCB
].type
!= SUBSTREAM_TYPE_NONE
)
1762 AllocatedStream
->attachSubStream(STREAM_ID_PRVCB
, 70);
1764 // set video stabilization killswitch
1765 m_requestManager
->m_vdisEnable
= width
> 352 && height
> 288;
1768 } else if (allocCase
== 1) {
1769 *stream_id
= STREAM_ID_RECORD
;
1771 subParameters
= &m_subStreams
[STREAM_ID_RECORD
];
1772 memset(subParameters
, 0, sizeof(substream_parameters_t
));
1774 parentStream
= (StreamThread
*)(m_streamThreads
[0].get());
1775 if (!parentStream
) {
1779 *format_actual
= HAL_PIXEL_FORMAT_YCbCr_420_SP
; // NV12M
1780 *usage
= GRALLOC_USAGE_SW_WRITE_OFTEN
;
1782 *usage
|= GRALLOC_USAGE_PRIVATE_CHROMA
;
1785 subParameters
->type
= SUBSTREAM_TYPE_RECORD
;
1786 subParameters
->width
= width
;
1787 subParameters
->height
= height
;
1788 subParameters
->format
= *format_actual
;
1789 subParameters
->svcPlanes
= NUM_PLANES(*format_actual
);
1790 subParameters
->streamOps
= stream_ops
;
1791 subParameters
->usage
= *usage
;
1792 subParameters
->numOwnSvcBuffers
= *max_buffers
;
1793 subParameters
->numSvcBufsInHal
= 0;
1794 subParameters
->needBufferInit
= false;
1795 subParameters
->minUndequedBuffer
= 2;
1797 res
= parentStream
->attachSubStream(STREAM_ID_RECORD
, 20);
1798 if (res
!= NO_ERROR
) {
1799 ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__
, res
);
1802 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__
, parentStream
->m_numRegisteredStream
);
1803 ALOGV("(%s): Enabling Record", __FUNCTION__
);
1807 else if ((format
== CAMERA2_HAL_PIXEL_FORMAT_ZSL
)
1808 && ((int32_t)width
== m_camera2
->getSensorW()) && ((int32_t)height
== m_camera2
->getSensorH())) {
1810 if (!(m_streamThreads
[1].get())) {
1811 ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__
);
1812 useDirectOutput
= true;
1815 ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__
);
1816 useDirectOutput
= false;
1818 if (useDirectOutput
) {
1819 *stream_id
= STREAM_ID_ZSL
;
1821 m_streamThreads
[1] = new StreamThread(this, *stream_id
);
1822 AllocatedStream
= (StreamThread
*)(m_streamThreads
[1].get());
1823 AllocatedStream
->Start("StreamThread", PRIORITY_DEFAULT
, 0);
1824 m_streamThreadInitialize((SignalDrivenThread
*)AllocatedStream
);
1826 *format_actual
= HAL_PIXEL_FORMAT_EXYNOS_YV12
;
1828 *format_actual
= HAL_PIXEL_FORMAT_YCbCr_422_I
; // YUYV
1829 *usage
= GRALLOC_USAGE_SW_WRITE_OFTEN
;
1831 *usage
|= GRALLOC_USAGE_PRIVATE_CHROMA
;
1834 newParameters
.width
= width
;
1835 newParameters
.height
= height
;
1836 newParameters
.format
= *format_actual
;
1837 newParameters
.streamOps
= stream_ops
;
1838 newParameters
.usage
= *usage
;
1839 newParameters
.numHwBuffers
= NUM_SCC_BUFFERS
;
1840 newParameters
.numOwnSvcBuffers
= *max_buffers
;
1841 newParameters
.planes
= NUM_PLANES(*format_actual
);
1842 newParameters
.metaPlanes
= 1;
1844 newParameters
.numSvcBufsInHal
= 0;
1845 newParameters
.minUndequedBuffer
= 2;
1846 newParameters
.needsIonMap
= false;
1848 newParameters
.node
= &m_camera_info
.capture
;
1849 newParameters
.node
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
1850 newParameters
.node
->memory
= V4L2_MEMORY_DMABUF
;
1852 AllocatedStream
->streamType
= STREAM_TYPE_DIRECT
;
1853 AllocatedStream
->m_index
= 1;
1854 AllocatedStream
->setParameter(&newParameters
);
1855 AllocatedStream
->m_activated
= true;
1856 AllocatedStream
->m_numRegisteredStream
= 1;
1857 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__
, AllocatedStream
->m_numRegisteredStream
);
1860 bool bJpegExists
= false;
1861 AllocatedStream
= (StreamThread
*)(m_streamThreads
[1].get());
1862 subParameters
= &m_subStreams
[STREAM_ID_JPEG
];
1863 if (subParameters
->type
== SUBSTREAM_TYPE_JPEG
) {
1864 ALOGD("(%s): jpeg stream exists", __FUNCTION__
);
1866 AllocatedStream
->detachSubStream(STREAM_ID_JPEG
);
1868 AllocatedStream
->m_releasing
= true;
1869 ALOGD("START stream thread 1 release %d", __LINE__
);
1871 AllocatedStream
->release();
1872 usleep(SIG_WAITING_TICK
);
1873 } while (AllocatedStream
->m_releasing
);
1874 ALOGD("END stream thread 1 release %d", __LINE__
);
1876 *stream_id
= STREAM_ID_ZSL
;
1878 m_streamThreadInitialize((SignalDrivenThread
*)AllocatedStream
);
1880 *format_actual
= HAL_PIXEL_FORMAT_EXYNOS_YV12
;
1882 *format_actual
= HAL_PIXEL_FORMAT_YCbCr_422_I
; // YUYV
1883 *usage
= GRALLOC_USAGE_SW_WRITE_OFTEN
;
1885 *usage
|= GRALLOC_USAGE_PRIVATE_CHROMA
;
1888 newParameters
.width
= width
;
1889 newParameters
.height
= height
;
1890 newParameters
.format
= *format_actual
;
1891 newParameters
.streamOps
= stream_ops
;
1892 newParameters
.usage
= *usage
;
1893 newParameters
.numHwBuffers
= NUM_SCC_BUFFERS
;
1894 newParameters
.numOwnSvcBuffers
= *max_buffers
;
1895 newParameters
.planes
= NUM_PLANES(*format_actual
);
1896 newParameters
.metaPlanes
= 1;
1898 newParameters
.numSvcBufsInHal
= 0;
1899 newParameters
.minUndequedBuffer
= 2;
1900 newParameters
.needsIonMap
= false;
1902 newParameters
.node
= &m_camera_info
.capture
;
1903 newParameters
.node
->type
= V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE
;
1904 newParameters
.node
->memory
= V4L2_MEMORY_DMABUF
;
1906 AllocatedStream
->streamType
= STREAM_TYPE_DIRECT
;
1907 AllocatedStream
->m_index
= 1;
1908 AllocatedStream
->setParameter(&newParameters
);
1909 AllocatedStream
->m_activated
= true;
1910 AllocatedStream
->m_numRegisteredStream
= 1;
1912 AllocatedStream
->attachSubStream(STREAM_ID_JPEG
, 10);
1914 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__
, AllocatedStream
->m_numRegisteredStream
);
1919 else if (format
== HAL_PIXEL_FORMAT_BLOB
1920 && m_camera2
->isSupportedJpegResolution(width
, height
)) {
1921 *stream_id
= STREAM_ID_JPEG
;
1923 subParameters
= &m_subStreams
[*stream_id
];
1924 memset(subParameters
, 0, sizeof(substream_parameters_t
));
1926 if (!(m_streamThreads
[1].get())) {
1927 ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__
);
1928 StartSCCThread(false);
1930 else if (m_streamThreads
[1]->m_activated
== false) {
1931 ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__
);
1932 StartSCCThread(true);
1934 parentStream
= (StreamThread
*)(m_streamThreads
[1].get());
1936 *format_actual
= HAL_PIXEL_FORMAT_BLOB
;
1937 *usage
= GRALLOC_USAGE_SW_WRITE_OFTEN
;
1939 *usage
|= GRALLOC_USAGE_PRIVATE_CHROMA
;
1942 subParameters
->type
= SUBSTREAM_TYPE_JPEG
;
1943 subParameters
->width
= width
;
1944 subParameters
->height
= height
;
1945 subParameters
->format
= *format_actual
;
1946 subParameters
->svcPlanes
= 1;
1947 parentStream
->m_parameters
.streamOps
=
1948 subParameters
->streamOps
= stream_ops
;
1949 subParameters
->usage
= *usage
;
1950 subParameters
->numOwnSvcBuffers
= *max_buffers
;
1951 subParameters
->numSvcBufsInHal
= 0;
1952 subParameters
->needBufferInit
= false;
1953 subParameters
->minUndequedBuffer
= 2;
1955 res
= parentStream
->attachSubStream(STREAM_ID_JPEG
, 10);
1956 if (res
!= NO_ERROR
) {
1957 ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__
, res
);
1960 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__
, parentStream
->m_numRegisteredStream
);
1961 ALOGV("(%s): Enabling Jpeg", __FUNCTION__
);
1964 else if (format
== HAL_PIXEL_FORMAT_YCrCb_420_SP
|| format
== HAL_PIXEL_FORMAT_YV12
) {
1965 *stream_id
= STREAM_ID_PRVCB
;
1967 subParameters
= &m_subStreams
[STREAM_ID_PRVCB
];
1968 memset(subParameters
, 0, sizeof(substream_parameters_t
));
1970 parentStream
= (StreamThread
*)(m_streamThreads
[0].get());
1971 if (!parentStream
) {
1975 *format_actual
= format
;
1976 *usage
= GRALLOC_USAGE_SW_WRITE_OFTEN
;
1978 *usage
|= GRALLOC_USAGE_PRIVATE_CHROMA
;
1981 subParameters
->type
= SUBSTREAM_TYPE_PRVCB
;
1982 subParameters
->width
= width
;
1983 subParameters
->height
= height
;
1984 subParameters
->format
= *format_actual
;
1985 subParameters
->svcPlanes
= NUM_PLANES(*format_actual
);
1986 parentStream
->m_parameters
.streamOps
=
1987 subParameters
->streamOps
= stream_ops
;
1988 subParameters
->usage
= *usage
;
1989 subParameters
->numOwnSvcBuffers
= *max_buffers
;
1990 subParameters
->numSvcBufsInHal
= 0;
1991 subParameters
->needBufferInit
= false;
1992 subParameters
->minUndequedBuffer
= 2;
1994 if (format
== HAL_PIXEL_FORMAT_YCrCb_420_SP
) {
1995 subParameters
->internalFormat
= HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP
;
1996 subParameters
->internalPlanes
= NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP
);
1999 subParameters
->internalFormat
= HAL_PIXEL_FORMAT_EXYNOS_YV12
;
2000 subParameters
->internalPlanes
= NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12
);
2003 res
= parentStream
->attachSubStream(STREAM_ID_PRVCB
, 20);
2004 if (res
!= NO_ERROR
) {
2005 ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__
, res
);
2008 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__
, parentStream
->m_numRegisteredStream
);
2009 ALOGV("(%s): Enabling previewcb", __FUNCTION__
);
2012 ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__
);
2016 int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id
,
2017 int num_buffers
, buffer_handle_t
*registeringBuffers
)
2021 int plane_index
= 0;
2022 StreamThread
* targetStream
;
2023 stream_parameters_t
*targetStreamParms
;
2024 node_info_t
*currentNode
;
2026 struct v4l2_buffer v4l2_buf
;
2027 struct v4l2_plane planes
[VIDEO_MAX_PLANES
];
2029 ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__
,
2030 stream_id
, num_buffers
, (uint32_t)registeringBuffers
);
2032 if (stream_id
== STREAM_ID_PREVIEW
&& m_streamThreads
[0].get()) {
2033 targetStream
= m_streamThreads
[0].get();
2034 targetStreamParms
= &(m_streamThreads
[0]->m_parameters
);
2037 else if (stream_id
== STREAM_ID_JPEG
|| stream_id
== STREAM_ID_RECORD
|| stream_id
== STREAM_ID_PRVCB
) {
2038 substream_parameters_t
*targetParms
;
2039 targetParms
= &m_subStreams
[stream_id
];
2041 targetParms
->numSvcBuffers
= num_buffers
;
2043 for (i
= 0 ; i
< targetParms
->numSvcBuffers
; i
++) {
2044 ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__
,
2045 i
, stream_id
, (uint32_t)(registeringBuffers
[i
]));
2047 if (m_grallocHal
->lock(m_grallocHal
, registeringBuffers
[i
],
2048 targetParms
->usage
, 0, 0,
2049 targetParms
->width
, targetParms
->height
, virtAddr
) != 0) {
2050 ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__
);
2053 ExynosBuffer currentBuf
;
2054 const private_handle_t
*priv_handle
= reinterpret_cast<const private_handle_t
*>(registeringBuffers
[i
]);
2055 if (targetParms
->svcPlanes
== 1) {
2056 currentBuf
.fd
.extFd
[0] = priv_handle
->fd
;
2057 currentBuf
.size
.extS
[0] = priv_handle
->size
;
2058 currentBuf
.size
.extS
[1] = 0;
2059 currentBuf
.size
.extS
[2] = 0;
2060 } else if (targetParms
->svcPlanes
== 2) {
2061 currentBuf
.fd
.extFd
[0] = priv_handle
->fd
;
2062 currentBuf
.fd
.extFd
[1] = priv_handle
->fd1
;
2064 } else if (targetParms
->svcPlanes
== 3) {
2065 currentBuf
.fd
.extFd
[0] = priv_handle
->fd
;
2066 currentBuf
.fd
.extFd
[1] = priv_handle
->fd1
;
2067 currentBuf
.fd
.extFd
[2] = priv_handle
->fd2
;
2069 for (plane_index
= 0 ; plane_index
< targetParms
->svcPlanes
; plane_index
++) {
2070 currentBuf
.virt
.extP
[plane_index
] = (char *)virtAddr
[plane_index
];
2071 CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
2072 __FUNCTION__
, plane_index
, currentBuf
.fd
.extFd
[plane_index
],
2073 (unsigned int)currentBuf
.virt
.extP
[plane_index
], currentBuf
.size
.extS
[plane_index
]);
2075 targetParms
->svcBufStatus
[i
] = ON_SERVICE
;
2076 targetParms
->svcBuffers
[i
] = currentBuf
;
2077 targetParms
->svcBufHandle
[i
] = registeringBuffers
[i
];
2081 targetParms
->needBufferInit
= true;
2084 else if (stream_id
== STREAM_ID_ZSL
&& m_streamThreads
[1].get()) {
2085 targetStream
= m_streamThreads
[1].get();
2086 targetStreamParms
= &(m_streamThreads
[1]->m_parameters
);
2089 ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__
, stream_id
);
2093 if (targetStream
->streamType
== STREAM_TYPE_DIRECT
) {
2094 if (num_buffers
< targetStreamParms
->numHwBuffers
) {
2095 ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
2096 __FUNCTION__
, num_buffers
, targetStreamParms
->numHwBuffers
);
2100 CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
2101 __FUNCTION__
, targetStreamParms
->format
, targetStreamParms
->width
,
2102 targetStreamParms
->height
, targetStreamParms
->planes
);
2103 targetStreamParms
->numSvcBuffers
= num_buffers
;
2104 currentNode
= targetStreamParms
->node
;
2105 currentNode
->width
= targetStreamParms
->width
;
2106 currentNode
->height
= targetStreamParms
->height
;
2107 currentNode
->format
= HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms
->format
);
2108 currentNode
->planes
= targetStreamParms
->planes
;
2109 currentNode
->buffers
= targetStreamParms
->numHwBuffers
;
2110 cam_int_s_input(currentNode
, m_camera_info
.sensor_id
);
2111 cam_int_s_fmt(currentNode
);
2112 cam_int_reqbufs(currentNode
);
2113 for (i
= 0 ; i
< targetStreamParms
->numSvcBuffers
; i
++) {
2114 ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__
,
2115 i
, (uint32_t)(registeringBuffers
[i
]));
2116 v4l2_buf
.m
.planes
= planes
;
2117 v4l2_buf
.type
= currentNode
->type
;
2118 v4l2_buf
.memory
= currentNode
->memory
;
2120 v4l2_buf
.length
= currentNode
->planes
;
2122 ExynosBuffer currentBuf
;
2123 ExynosBuffer metaBuf
;
2124 const private_handle_t
*priv_handle
= reinterpret_cast<const private_handle_t
*>(registeringBuffers
[i
]);
2126 m_getAlignedYUVSize(currentNode
->format
,
2127 currentNode
->width
, currentNode
->height
, ¤tBuf
);
2129 ALOGV("DEBUG(%s): ion_size(%d), stride(%d), ", __FUNCTION__
, priv_handle
->size
, priv_handle
->stride
);
2130 if (currentNode
->planes
== 1) {
2131 v4l2_buf
.m
.planes
[0].m
.fd
= priv_handle
->fd
;
2132 currentBuf
.fd
.extFd
[0] = priv_handle
->fd
;
2133 currentBuf
.size
.extS
[0] = priv_handle
->size
;
2134 currentBuf
.size
.extS
[1] = 0;
2135 currentBuf
.size
.extS
[2] = 0;
2136 } else if (currentNode
->planes
== 2) {
2137 v4l2_buf
.m
.planes
[0].m
.fd
= priv_handle
->fd
;
2138 v4l2_buf
.m
.planes
[1].m
.fd
= priv_handle
->fd1
;
2139 currentBuf
.fd
.extFd
[0] = priv_handle
->fd
;
2140 currentBuf
.fd
.extFd
[1] = priv_handle
->fd1
;
2142 } else if (currentNode
->planes
== 3) {
2143 v4l2_buf
.m
.planes
[0].m
.fd
= priv_handle
->fd
;
2144 v4l2_buf
.m
.planes
[2].m
.fd
= priv_handle
->fd1
;
2145 v4l2_buf
.m
.planes
[1].m
.fd
= priv_handle
->fd2
;
2146 currentBuf
.fd
.extFd
[0] = priv_handle
->fd
;
2147 currentBuf
.fd
.extFd
[2] = priv_handle
->fd1
;
2148 currentBuf
.fd
.extFd
[1] = priv_handle
->fd2
;
2151 for (plane_index
= 0 ; plane_index
< (int)v4l2_buf
.length
; plane_index
++) {
2152 if (targetStreamParms
->needsIonMap
)
2153 currentBuf
.virt
.extP
[plane_index
] = (char *)ion_map(currentBuf
.fd
.extFd
[plane_index
], currentBuf
.size
.extS
[plane_index
], 0);
2154 v4l2_buf
.m
.planes
[plane_index
].length
= currentBuf
.size
.extS
[plane_index
];
2155 ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)",
2156 __FUNCTION__
, plane_index
, v4l2_buf
.m
.planes
[plane_index
].m
.fd
,
2157 (unsigned int)currentBuf
.virt
.extP
[plane_index
],
2158 v4l2_buf
.m
.planes
[plane_index
].length
);
2161 if (i
< currentNode
->buffers
) {
2164 #ifdef ENABLE_FRAME_SYNC
2165 /* add plane for metadata*/
2166 metaBuf
.size
.extS
[0] = 4*1024;
2167 allocCameraMemory(m_ionCameraClient
, &metaBuf
, 1, 1<<0);
2169 v4l2_buf
.length
+= targetStreamParms
->metaPlanes
;
2170 v4l2_buf
.m
.planes
[v4l2_buf
.length
-1].m
.fd
= metaBuf
.fd
.extFd
[0];
2171 v4l2_buf
.m
.planes
[v4l2_buf
.length
-1].length
= metaBuf
.size
.extS
[0];
2173 ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf
.fd
.extFd
[0], metaBuf
.size
.extS
[0], v4l2_buf
.length
);
2175 if (exynos_v4l2_qbuf(currentNode
->fd
, &v4l2_buf
) < 0) {
2176 ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
2177 __FUNCTION__
, stream_id
, currentNode
->fd
);
2179 ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
2180 __FUNCTION__
, stream_id
, currentNode
->fd
);
2181 targetStreamParms
->svcBufStatus
[i
] = REQUIRES_DQ_FROM_SVC
;
2184 targetStreamParms
->svcBufStatus
[i
] = ON_SERVICE
;
2187 targetStreamParms
->svcBuffers
[i
] = currentBuf
;
2188 targetStreamParms
->metaBuffers
[i
] = metaBuf
;
2189 targetStreamParms
->svcBufHandle
[i
] = registeringBuffers
[i
];
2192 ALOGV("DEBUG(%s): calling streamon stream id = %d", __FUNCTION__
, stream_id
);
2193 cam_int_streamon(targetStreamParms
->node
);
2194 ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__
);
2195 currentNode
->status
= true;
2196 ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__
);
2201 int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id
)
2203 StreamThread
*targetStream
;
2204 status_t res
= NO_ERROR
;
2205 ALOGD("(%s): stream_id(%d)", __FUNCTION__
, stream_id
);
2206 bool releasingScpMain
= false;
2208 if (stream_id
== STREAM_ID_PREVIEW
) {
2209 targetStream
= (StreamThread
*)(m_streamThreads
[0].get());
2210 if (!targetStream
) {
2211 ALOGW("(%s): Stream Not Exists", __FUNCTION__
);
2214 targetStream
->m_numRegisteredStream
--;
2215 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__
, targetStream
->m_numRegisteredStream
);
2216 releasingScpMain
= true;
2217 if (targetStream
->m_parameters
.needsIonMap
) {
2218 for (int i
= 0; i
< targetStream
->m_parameters
.numSvcBuffers
; i
++) {
2219 for (int j
= 0; j
< targetStream
->m_parameters
.planes
; j
++) {
2220 ion_unmap(targetStream
->m_parameters
.svcBuffers
[i
].virt
.extP
[j
],
2221 targetStream
->m_parameters
.svcBuffers
[i
].size
.extS
[j
]);
2222 ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__
, i
, j
,
2223 targetStream
->m_parameters
.svcBuffers
[i
].fd
.extFd
[j
], (unsigned int)(targetStream
->m_parameters
.svcBuffers
[i
].virt
.extP
[j
]));
2227 } else if (stream_id
== STREAM_ID_JPEG
) {
2228 if (m_resizeBuf
.size
.s
!= 0) {
2229 freeCameraMemory(&m_resizeBuf
, 1);
2231 memset(&m_subStreams
[stream_id
], 0, sizeof(substream_parameters_t
));
2233 targetStream
= (StreamThread
*)(m_streamThreads
[1].get());
2234 if (!targetStream
) {
2235 ALOGW("(%s): Stream Not Exists", __FUNCTION__
);
2239 if (targetStream
->detachSubStream(stream_id
) != NO_ERROR
) {
2240 ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__
, res
);
2243 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__
, targetStream
->m_numRegisteredStream
);
2245 } else if (stream_id
== STREAM_ID_RECORD
) {
2246 memset(&m_subStreams
[stream_id
], 0, sizeof(substream_parameters_t
));
2248 targetStream
= (StreamThread
*)(m_streamThreads
[0].get());
2249 if (!targetStream
) {
2250 ALOGW("(%s): Stream Not Exists", __FUNCTION__
);
2254 if (targetStream
->detachSubStream(stream_id
) != NO_ERROR
) {
2255 ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__
, res
);
2259 if (targetStream
->m_numRegisteredStream
!= 0)
2261 } else if (stream_id
== STREAM_ID_PRVCB
) {
2262 if (m_previewCbBuf
.size
.s
!= 0) {
2263 freeCameraMemory(&m_previewCbBuf
, m_subStreams
[stream_id
].internalPlanes
);
2265 memset(&m_subStreams
[stream_id
], 0, sizeof(substream_parameters_t
));
2267 targetStream
= (StreamThread
*)(m_streamThreads
[0].get());
2268 if (!targetStream
) {
2269 ALOGW("(%s): Stream Not Exists", __FUNCTION__
);
2273 if (targetStream
->detachSubStream(stream_id
) != NO_ERROR
) {
2274 ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__
, res
);
2278 if (targetStream
->m_numRegisteredStream
!= 0)
2280 } else if (stream_id
== STREAM_ID_ZSL
) {
2281 targetStream
= (StreamThread
*)(m_streamThreads
[1].get());
2282 if (!targetStream
) {
2283 ALOGW("(%s): Stream Not Exists", __FUNCTION__
);
2287 targetStream
->m_numRegisteredStream
--;
2288 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__
, targetStream
->m_numRegisteredStream
);
2289 if (targetStream
->m_parameters
.needsIonMap
) {
2290 for (int i
= 0; i
< targetStream
->m_parameters
.numSvcBuffers
; i
++) {
2291 for (int j
= 0; j
< targetStream
->m_parameters
.planes
; j
++) {
2292 ion_unmap(targetStream
->m_parameters
.svcBuffers
[i
].virt
.extP
[j
],
2293 targetStream
->m_parameters
.svcBuffers
[i
].size
.extS
[j
]);
2294 ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__
, i
, j
,
2295 targetStream
->m_parameters
.svcBuffers
[i
].fd
.extFd
[j
], (unsigned int)(targetStream
->m_parameters
.svcBuffers
[i
].virt
.extP
[j
]));
2300 ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__
, stream_id
);
2304 if (m_sensorThread
!= NULL
&& releasingScpMain
) {
2305 m_sensorThread
->release();
2306 ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__
);
2307 while (!m_sensorThread
->IsTerminated())
2308 usleep(SIG_WAITING_TICK
);
2309 ALOGD("(%s): END Waiting for (indirect) sensor thread termination", __FUNCTION__
);
2312 if (m_streamThreads
[1]->m_numRegisteredStream
== 0 && m_streamThreads
[1]->m_activated
) {
2313 ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__
);
2314 targetStream
= (StreamThread
*)(m_streamThreads
[1].get());
2315 targetStream
->m_releasing
= true;
2316 ALOGD("START stream thread release %d", __LINE__
);
2318 targetStream
->release();
2319 usleep(SIG_WAITING_TICK
);
2320 } while (targetStream
->m_releasing
);
2321 m_camera_info
.capture
.status
= false;
2322 ALOGD("END stream thread release %d", __LINE__
);
2325 if (releasingScpMain
|| (m_streamThreads
[0].get() != NULL
&& m_streamThreads
[0]->m_numRegisteredStream
== 0 && m_streamThreads
[0]->m_activated
)) {
2326 ALOGV("(%s): deactivating stream thread 0", __FUNCTION__
);
2327 targetStream
= (StreamThread
*)(m_streamThreads
[0].get());
2328 targetStream
->m_releasing
= true;
2329 ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__
, __LINE__
);
2331 targetStream
->release();
2332 usleep(SIG_WAITING_TICK
);
2333 } while (targetStream
->m_releasing
);
2334 ALOGD("(%s): END Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__
, __LINE__
);
2335 targetStream
->SetSignal(SIGNAL_THREAD_TERMINATE
);
2337 if (targetStream
!= NULL
) {
2338 ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__
);
2339 while (!targetStream
->IsTerminated())
2340 usleep(SIG_WAITING_TICK
);
2341 ALOGD("(%s): END Waiting for (indirect) stream thread termination", __FUNCTION__
);
2342 m_streamThreads
[0] = NULL
;
2344 if (m_camera_info
.capture
.status
== true) {
2345 m_scpForceSuspended
= true;
2347 m_isIspStarted
= false;
2349 ALOGV("(%s): END", __FUNCTION__
);
2353 int ExynosCameraHWInterface2::allocateReprocessStream(
2354 uint32_t /*width*/, uint32_t /*height*/, uint32_t /*format*/,
2355 const camera2_stream_in_ops_t
* /*reprocess_stream_ops*/,
2356 uint32_t* /*stream_id*/, uint32_t* /*consumer_usage*/, uint32_t* /*max_buffers*/)
2358 ALOGV("DEBUG(%s):", __FUNCTION__
);
2362 int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
2363 uint32_t output_stream_id
,
2364 const camera2_stream_in_ops_t
*reprocess_stream_ops
,
2366 uint32_t *stream_id
)
2368 ALOGD("(%s): output_stream_id(%d)", __FUNCTION__
, output_stream_id
);
2369 *stream_id
= STREAM_ID_JPEG_REPROCESS
;
2371 m_reprocessStreamId
= *stream_id
;
2372 m_reprocessOps
= reprocess_stream_ops
;
2373 m_reprocessOutputStreamId
= output_stream_id
;
2377 int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id
)
2379 ALOGD("(%s): stream_id(%d)", __FUNCTION__
, stream_id
);
2380 if (stream_id
== STREAM_ID_JPEG_REPROCESS
) {
2381 m_reprocessStreamId
= 0;
2382 m_reprocessOps
= NULL
;
2383 m_reprocessOutputStreamId
= 0;
2389 int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id
, int ext1
, int ext2
)
2391 Mutex::Autolock
lock(m_afModeTriggerLock
);
2392 ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__
, trigger_id
, ext1
, ext2
);
2394 switch (trigger_id
) {
2395 case CAMERA2_TRIGGER_AUTOFOCUS
:
2396 ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__
, ext1
);
2400 case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS
:
2401 ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__
, ext1
);
2404 case CAMERA2_TRIGGER_PRECAPTURE_METERING
:
2405 ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__
, ext1
);
2406 OnPrecaptureMeteringTriggerStart(ext1
);
2414 int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb
, void *user
)
2416 ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__
, (unsigned int)notify_cb
);
2417 m_notifyCb
= notify_cb
;
2418 m_callbackCookie
= user
;
2422 int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t
**ops
)
2424 ALOGV("DEBUG(%s):", __FUNCTION__
);
2429 int ExynosCameraHWInterface2::dump(int /*fd*/)
2431 ALOGV("DEBUG(%s):", __FUNCTION__
);
2435 void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat
, int w
, int h
, ExynosBuffer
*buf
)
2437 switch (colorFormat
) {
2439 case V4L2_PIX_FMT_RGB565
:
2440 case V4L2_PIX_FMT_YUYV
:
2441 case V4L2_PIX_FMT_UYVY
:
2442 case V4L2_PIX_FMT_VYUY
:
2443 case V4L2_PIX_FMT_YVYU
:
2444 buf
->size
.extS
[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat
), w
, h
);
2445 buf
->size
.extS
[1] = 0;
2446 buf
->size
.extS
[2] = 0;
2449 case V4L2_PIX_FMT_NV12
:
2450 case V4L2_PIX_FMT_NV12T
:
2451 case V4L2_PIX_FMT_NV21
:
2452 buf
->size
.extS
[0] = ALIGN(w
, 16) * ALIGN(h
, 16);
2453 buf
->size
.extS
[1] = ALIGN(w
/2, 16) * ALIGN(h
/2, 16);
2454 buf
->size
.extS
[2] = 0;
2456 case V4L2_PIX_FMT_NV12M
:
2457 case V4L2_PIX_FMT_NV12MT_16X16
:
2458 case V4L2_PIX_FMT_NV21M
:
2459 buf
->size
.extS
[0] = ALIGN(w
, 16) * ALIGN(h
, 16);
2460 buf
->size
.extS
[1] = ALIGN(buf
->size
.extS
[0] / 2, 256);
2461 buf
->size
.extS
[2] = 0;
2463 case V4L2_PIX_FMT_NV16
:
2464 case V4L2_PIX_FMT_NV61
:
2465 buf
->size
.extS
[0] = ALIGN(w
, 16) * ALIGN(h
, 16);
2466 buf
->size
.extS
[1] = ALIGN(w
, 16) * ALIGN(h
, 16);
2467 buf
->size
.extS
[2] = 0;
2470 case V4L2_PIX_FMT_YUV420
:
2471 case V4L2_PIX_FMT_YVU420
:
2472 buf
->size
.extS
[0] = (w
* h
);
2473 buf
->size
.extS
[1] = (w
* h
) >> 2;
2474 buf
->size
.extS
[2] = (w
* h
) >> 2;
2476 case V4L2_PIX_FMT_YUV420M
:
2477 case V4L2_PIX_FMT_YVU420M
:
2478 buf
->size
.extS
[0] = ALIGN(w
, 32) * ALIGN(h
, 16);
2479 buf
->size
.extS
[1] = ALIGN(w
/2, 16) * ALIGN(h
/2, 8);
2480 buf
->size
.extS
[2] = ALIGN(w
/2, 16) * ALIGN(h
/2, 8);
2482 case V4L2_PIX_FMT_YUV422P
:
2483 buf
->size
.extS
[0] = ALIGN(w
, 16) * ALIGN(h
, 16);
2484 buf
->size
.extS
[1] = ALIGN(w
/2, 16) * ALIGN(h
/2, 8);
2485 buf
->size
.extS
[2] = ALIGN(w
/2, 16) * ALIGN(h
/2, 8);
2488 ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__
, colorFormat
);
2494 bool ExynosCameraHWInterface2::m_getRatioSize(int src_w
, int src_h
,
2495 int dst_w
, int dst_h
,
2496 int *crop_x
, int *crop_y
,
2497 int *crop_w
, int *crop_h
,
2504 || src_h
!= dst_h
) {
2505 float src_ratio
= 1.0f
;
2506 float dst_ratio
= 1.0f
;
2509 src_ratio
= (float)src_w
/ (float)src_h
;
2512 dst_ratio
= (float)dst_w
/ (float)dst_h
;
2514 if (dst_w
* dst_h
< src_w
* src_h
) {
2515 if (dst_ratio
<= src_ratio
) {
2517 *crop_w
= src_h
* dst_ratio
;
2522 *crop_h
= src_w
/ dst_ratio
;
2525 if (dst_ratio
<= src_ratio
) {
2527 *crop_w
= src_h
* dst_ratio
;
2532 *crop_h
= src_w
/ dst_ratio
;
2538 float zoomLevel
= ((float)zoom
+ 10.0) / 10.0;
2539 *crop_w
= (int)((float)*crop_w
/ zoomLevel
);
2540 *crop_h
= (int)((float)*crop_h
/ zoomLevel
);
2543 #define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2)
2544 unsigned int w_align
= (*crop_w
& (CAMERA_CROP_WIDTH_RESTRAIN_NUM
- 1));
2546 if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM
>> 1) <= w_align
2547 && (int)(*crop_w
+ (CAMERA_CROP_WIDTH_RESTRAIN_NUM
- w_align
)) <= dst_w
) {
2548 *crop_w
+= (CAMERA_CROP_WIDTH_RESTRAIN_NUM
- w_align
);
2554 #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2)
2555 unsigned int h_align
= (*crop_h
& (CAMERA_CROP_HEIGHT_RESTRAIN_NUM
- 1));
2557 if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM
>> 1) <= h_align
2558 && (int)(*crop_h
+ (CAMERA_CROP_HEIGHT_RESTRAIN_NUM
- h_align
)) <= dst_h
) {
2559 *crop_h
+= (CAMERA_CROP_HEIGHT_RESTRAIN_NUM
- h_align
);
2565 *crop_x
= (src_w
- *crop_w
) >> 1;
2566 *crop_y
= (src_h
- *crop_h
) >> 1;
2568 if (*crop_x
& (CAMERA_CROP_WIDTH_RESTRAIN_NUM
>> 1))
2571 if (*crop_y
& (CAMERA_CROP_HEIGHT_RESTRAIN_NUM
>> 1))
2577 BayerBufManager::BayerBufManager()
2579 ALOGV("DEBUG(%s): ", __FUNCTION__
);
2580 for (int i
= 0; i
< NUM_BAYER_BUFFERS
; i
++) {
2581 entries
[i
].status
= BAYER_ON_HAL_EMPTY
;
2582 entries
[i
].reqFrameCnt
= 0;
2584 sensorEnqueueHead
= 0;
2585 sensorDequeueHead
= 0;
2591 numOnHalEmpty
= NUM_BAYER_BUFFERS
;
2594 BayerBufManager::~BayerBufManager()
2596 ALOGV("%s", __FUNCTION__
);
2599 int BayerBufManager::GetIndexForSensorEnqueue()
2602 if (numOnHalEmpty
== 0)
2605 ret
= sensorEnqueueHead
;
2606 ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__
, ret
);
2610 int BayerBufManager::MarkSensorEnqueue(int index
)
2612 ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__
, index
);
2615 if (index
!= sensorEnqueueHead
) {
2616 ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__
, index
, sensorEnqueueHead
);
2619 if (entries
[index
].status
!= BAYER_ON_HAL_EMPTY
) {
2620 ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__
,
2621 index
, entries
[index
].status
, BAYER_ON_HAL_EMPTY
);
2625 entries
[index
].status
= BAYER_ON_SENSOR
;
2626 entries
[index
].reqFrameCnt
= 0;
2629 sensorEnqueueHead
= GetNextIndex(index
);
2630 ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2631 __FUNCTION__
, numOnHalEmpty
, numOnHalFilled
, numOnSensor
, numOnIsp
);
2635 int BayerBufManager::MarkSensorDequeue(int index
, int reqFrameCnt
, nsecs_t
* /*timeStamp*/)
2637 ALOGV("DEBUG(%s) : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__
, index
, reqFrameCnt
);
2639 if (entries
[index
].status
!= BAYER_ON_SENSOR
) {
2640 ALOGE("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__
,
2641 index
, entries
[index
].status
, BAYER_ON_SENSOR
);
2645 entries
[index
].status
= BAYER_ON_HAL_FILLED
;
2652 int BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt
)
2655 if (numOnHalFilled
== 0)
2658 *reqFrameCnt
= entries
[ispEnqueueHead
].reqFrameCnt
;
2659 ret
= ispEnqueueHead
;
2661 ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__
, ret
);
2665 int BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt
)
2671 *reqFrameCnt
= entries
[ispDequeueHead
].reqFrameCnt
;
2672 ret
= ispDequeueHead
;
2674 ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__
, ret
);
2678 int BayerBufManager::MarkIspEnqueue(int index
)
2680 ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__
, index
);
2683 if (index
!= ispEnqueueHead
) {
2684 ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__
, index
, ispEnqueueHead
);
2687 if (entries
[index
].status
!= BAYER_ON_HAL_FILLED
) {
2688 ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__
,
2689 index
, entries
[index
].status
, BAYER_ON_HAL_FILLED
);
2693 entries
[index
].status
= BAYER_ON_ISP
;
2696 ispEnqueueHead
= GetNextIndex(index
);
2697 ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2698 __FUNCTION__
, numOnHalEmpty
, numOnHalFilled
, numOnSensor
, numOnIsp
);
2702 int BayerBufManager::MarkIspDequeue(int index
)
2704 ALOGV("DEBUG(%s) : BayerIndex[%d]", __FUNCTION__
, index
);
2707 if (index
!= ispDequeueHead
) {
2708 ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__
, index
, ispDequeueHead
);
2711 if (entries
[index
].status
!= BAYER_ON_ISP
) {
2712 ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__
,
2713 index
, entries
[index
].status
, BAYER_ON_ISP
);
2717 entries
[index
].status
= BAYER_ON_HAL_EMPTY
;
2718 entries
[index
].reqFrameCnt
= 0;
2721 ispDequeueHead
= GetNextIndex(index
);
2722 ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2723 __FUNCTION__
, numOnHalEmpty
, numOnHalFilled
, numOnSensor
, numOnIsp
);
2727 int BayerBufManager::GetNumOnSensor()
2732 int BayerBufManager::GetNumOnHalFilled()
2734 return numOnHalFilled
;
2737 int BayerBufManager::GetNumOnIsp()
2742 int BayerBufManager::GetNextIndex(int index
)
2745 if (index
>= NUM_BAYER_BUFFERS
)
2751 void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread
* self
)
2753 camera_metadata_t
*currentRequest
= NULL
;
2754 camera_metadata_t
*currentFrame
= NULL
;
2755 size_t numEntries
= 0;
2756 size_t frameSize
= 0;
2757 camera_metadata_t
* preparedFrame
= NULL
;
2758 camera_metadata_t
*deregisteredRequest
= NULL
;
2759 uint32_t currentSignal
= self
->GetProcessingSignal();
2760 MainThread
* selfThread
= ((MainThread
*)self
);
2765 uint32_t afRegion
[4];
2767 ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__
, currentSignal
);
2769 if (currentSignal
& SIGNAL_THREAD_RELEASE
) {
2770 ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__
);
2772 ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__
);
2773 selfThread
->SetSignal(SIGNAL_THREAD_TERMINATE
);
2777 if (currentSignal
& SIGNAL_MAIN_REQ_Q_NOT_EMPTY
) {
2778 ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__
);
2779 if (m_requestManager
->IsRequestQueueFull()==false) {
2780 Mutex::Autolock
lock(m_afModeTriggerLock
);
2781 m_requestQueueOps
->dequeue_request(m_requestQueueOps
, ¤tRequest
);
2782 if (NULL
== currentRequest
) {
2783 ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__
, currentSignal
);
2784 m_isRequestQueueNull
= true;
2785 if (m_requestManager
->IsVdisEnable())
2786 m_vdisBubbleCnt
= 1;
2789 m_requestManager
->RegisterRequest(currentRequest
, &afMode
, afRegion
);
2791 SetAfMode((enum aa_afmode
)afMode
);
2792 SetAfRegion(afRegion
);
2794 m_numOfRemainingReqInSvc
= m_requestQueueOps
->request_count(m_requestQueueOps
);
2795 ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__
, m_numOfRemainingReqInSvc
);
2796 if (m_requestManager
->IsRequestQueueFull()==false)
2797 selfThread
->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY
); // dequeue repeatedly
2799 m_sensorThread
->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING
);
2803 m_isRequestQueuePending
= true;
2807 if (currentSignal
& SIGNAL_MAIN_STREAM_OUTPUT_DONE
) {
2808 ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__
);
2810 ret
= m_requestManager
->PrepareFrame(&numEntries
, &frameSize
, &preparedFrame
, GetAfStateForService());
2812 CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__
, ret
);
2814 m_requestManager
->DeregisterRequest(&deregisteredRequest
);
2816 ret
= m_requestQueueOps
->free_request(m_requestQueueOps
, deregisteredRequest
);
2818 CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__
, ret
);
2820 ret
= m_frameQueueOps
->dequeue_frame(m_frameQueueOps
, numEntries
, frameSize
, ¤tFrame
);
2822 CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__
, ret
);
2824 if (currentFrame
==NULL
) {
2825 ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__
);
2828 ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__
, numEntries
, frameSize
);
2830 res
= append_camera_metadata(currentFrame
, preparedFrame
);
2832 ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__
);
2833 m_frameQueueOps
->enqueue_frame(m_frameQueueOps
, currentFrame
);
2836 ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__
, res
);
2839 if (!m_isRequestQueueNull
) {
2840 selfThread
->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY
);
2843 if (getInProgressCount()>0) {
2844 ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__
);
2845 m_sensorThread
->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING
);
2848 ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__
);
2852 void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext
* shot_ext
)
2854 ALOGD("#### common Section");
2855 ALOGD("#### magic(%x) ",
2856 shot_ext
->shot
.magicNumber
);
2857 ALOGD("#### ctl Section");
2858 ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2859 shot_ext
->shot
.ctl
.request
.metadataMode
,
2860 shot_ext
->shot
.ctl
.lens
.aperture
,
2861 shot_ext
->shot
.ctl
.sensor
.exposureTime
,
2862 shot_ext
->shot
.ctl
.sensor
.frameDuration
,
2863 shot_ext
->shot
.ctl
.sensor
.sensitivity
,
2864 shot_ext
->shot
.ctl
.aa
.awbMode
);
2866 ALOGD("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
2867 shot_ext
->request_sensor
, shot_ext
->request_scp
, shot_ext
->request_scc
,
2868 shot_ext
->shot
.ctl
.request
.outputStreams
[0]);
2870 ALOGD("#### DM Section");
2871 ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2872 shot_ext
->shot
.dm
.request
.metadataMode
,
2873 shot_ext
->shot
.dm
.lens
.aperture
,
2874 shot_ext
->shot
.dm
.sensor
.exposureTime
,
2875 shot_ext
->shot
.dm
.sensor
.frameDuration
,
2876 shot_ext
->shot
.dm
.sensor
.sensitivity
,
2877 shot_ext
->shot
.dm
.sensor
.timeStamp
,
2878 shot_ext
->shot
.dm
.aa
.awbMode
,
2879 shot_ext
->shot
.dm
.request
.frameCount
);
2882 void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext
* shot_ext
)
2885 switch (m_ctlInfo
.flash
.m_flashCnt
) {
2886 case IS_FLASH_STATE_ON
:
2887 ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.request
.frameCount
);
2889 if (m_ctlInfo
.flash
.m_precaptureTriggerId
> 0) {
2890 if (m_ctlInfo
.flash
.m_flashTimeOut
== 0) {
2891 if (m_ctlInfo
.flash
.i_flashMode
== AA_AEMODE_ON_ALWAYS_FLASH
) {
2892 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_ON_ALWAYS
;
2893 m_ctlInfo
.flash
.m_flashTimeOut
= 5;
2895 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_ON
;
2896 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_ON_WAIT
;
2898 m_ctlInfo
.flash
.m_flashTimeOut
--;
2901 if (m_ctlInfo
.flash
.i_flashMode
== AA_AEMODE_ON_ALWAYS_FLASH
) {
2902 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_ON_ALWAYS
;
2903 m_ctlInfo
.flash
.m_flashTimeOut
= 5;
2905 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_ON
;
2906 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_ON_WAIT
;
2909 case IS_FLASH_STATE_ON_WAIT
:
2911 case IS_FLASH_STATE_ON_DONE
:
2912 if (!m_ctlInfo
.flash
.m_afFlashDoneFlg
)
2913 // auto transition at pre-capture trigger
2914 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_AE_AWB_LOCK
;
2916 case IS_FLASH_STATE_AUTO_AE_AWB_LOCK
:
2917 ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.request
.frameCount
);
2918 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_AUTO
;
2919 //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
2920 shot_ext
->shot
.ctl
.aa
.awbMode
= AA_AWBMODE_LOCKED
;
2921 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AE_AWB_LOCK_WAIT
;
2923 case IS_FLASH_STATE_AE_AWB_LOCK_WAIT
:
2924 case IS_FLASH_STATE_AUTO_WAIT
:
2925 shot_ext
->shot
.ctl
.aa
.aeMode
=(enum aa_aemode
)0;
2926 shot_ext
->shot
.ctl
.aa
.awbMode
= (enum aa_awbmode
)0;
2928 case IS_FLASH_STATE_AUTO_DONE
:
2929 ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.request
.frameCount
);
2930 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_OFF
;
2932 case IS_FLASH_STATE_AUTO_OFF
:
2933 ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.request
.frameCount
);
2934 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_OFF
;
2935 m_ctlInfo
.flash
.m_flashEnableFlg
= false;
2937 case IS_FLASH_STATE_CAPTURE
:
2938 ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.request
.frameCount
);
2939 m_ctlInfo
.flash
.m_flashTimeOut
= FLASH_STABLE_WAIT_TIMEOUT
;
2940 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_CAPTURE
;
2941 shot_ext
->request_scc
= 0;
2942 shot_ext
->request_scp
= 0;
2943 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_CAPTURE_WAIT
; // auto transition
2945 case IS_FLASH_STATE_CAPTURE_WAIT
:
2946 shot_ext
->request_scc
= 0;
2947 shot_ext
->request_scp
= 0;
2949 case IS_FLASH_STATE_CAPTURE_JPEG
:
2950 ALOGV("(%s): [Flash] Flash Capture (%d)!!!!!", __FUNCTION__
, (FLASH_STABLE_WAIT_TIMEOUT
-m_ctlInfo
.flash
.m_flashTimeOut
));
2951 shot_ext
->request_scc
= 1;
2952 shot_ext
->request_scp
= 1;
2953 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_CAPTURE_END
; // auto transition
2955 case IS_FLASH_STATE_CAPTURE_END
:
2956 ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.request
.frameCount
);
2957 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_OFF
;
2958 shot_ext
->request_scc
= 0;
2959 shot_ext
->request_scp
= 0;
2960 m_ctlInfo
.flash
.m_flashEnableFlg
= false;
2961 m_ctlInfo
.flash
.m_flashCnt
= 0;
2962 m_ctlInfo
.flash
.m_afFlashDoneFlg
= false;
2964 case IS_FLASH_STATE_NONE
:
2967 ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__
, m_ctlInfo
.flash
.m_flashCnt
);
2971 void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext
* shot_ext
)
2974 switch (m_ctlInfo
.flash
.m_flashCnt
) {
2975 case IS_FLASH_STATE_AUTO_WAIT
:
2976 if (m_ctlInfo
.flash
.m_flashDecisionResult
) {
2977 if (shot_ext
->shot
.dm
.flash
.flashMode
== CAM2_FLASH_MODE_OFF
) {
2978 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_DONE
;
2979 ALOGV("(%s): [Flash] Lis : AUTO -> OFF (%d)", __FUNCTION__
, shot_ext
->shot
.dm
.flash
.flashMode
);
2981 ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__
);
2984 //If flash isn't activated at flash auto mode, skip flash auto control
2985 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_DONE
;
2986 ALOGV("(%s): [Flash] Skip : AUTO -> OFF", __FUNCTION__
);
2992 void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext
* shot_ext
)
2995 switch (m_ctlInfo
.flash
.m_flashCnt
) {
2996 case IS_FLASH_STATE_ON_WAIT
:
2997 if (shot_ext
->shot
.dm
.flash
.decision
> 0) {
2998 // store decision result to skip capture sequenece
2999 ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__
, shot_ext
->shot
.dm
.flash
.decision
);
3000 if (shot_ext
->shot
.dm
.flash
.decision
== 2)
3001 m_ctlInfo
.flash
.m_flashDecisionResult
= false;
3003 m_ctlInfo
.flash
.m_flashDecisionResult
= true;
3004 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_ON_DONE
;
3006 if (m_ctlInfo
.flash
.m_flashTimeOut
== 0) {
3007 ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__
);
3008 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_ON_DONE
;
3009 m_ctlInfo
.flash
.m_flashDecisionResult
= false;
3011 m_ctlInfo
.flash
.m_flashTimeOut
--;
3015 case IS_FLASH_STATE_AE_AWB_LOCK_WAIT
:
3016 if (shot_ext
->shot
.dm
.aa
.awbMode
== AA_AWBMODE_LOCKED
) {
3017 ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__
, shot_ext
->shot
.dm
.aa
.awbMode
);
3018 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_WAIT
;
3020 ALOGV("(%s): [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__
);
3023 case IS_FLASH_STATE_CAPTURE_WAIT
:
3024 if (m_ctlInfo
.flash
.m_flashDecisionResult
) {
3025 if (shot_ext
->shot
.dm
.flash
.firingStable
) {
3026 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_CAPTURE_JPEG
;
3028 if (m_ctlInfo
.flash
.m_flashTimeOut
== 0) {
3029 ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__
);
3030 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_CAPTURE_JPEG
;
3032 ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__
, m_ctlInfo
.flash
.m_flashTimeOut
);
3033 m_ctlInfo
.flash
.m_flashTimeOut
--;
3037 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_CAPTURE_JPEG
;
3043 void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext
* shot_ext
)
3045 switch (m_ctlInfo
.flash
.i_flashMode
) {
3047 // At flash off mode, capture can be done as zsl capture
3048 shot_ext
->shot
.dm
.aa
.aeState
= AE_STATE_CONVERGED
;
3050 case AA_AEMODE_ON_AUTO_FLASH
:
3051 // At flash auto mode, main flash have to be done if pre-flash was done.
3052 if (m_ctlInfo
.flash
.m_flashDecisionResult
&& m_ctlInfo
.flash
.m_afFlashDoneFlg
)
3053 shot_ext
->shot
.dm
.aa
.aeState
= AE_STATE_FLASH_REQUIRED
;
3060 void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext
* shot_ext
)
3062 shot_ext
->shot
.ctl
.aa
.afRegions
[0] = currentAfRegion
[0];
3063 shot_ext
->shot
.ctl
.aa
.afRegions
[1] = currentAfRegion
[1];
3064 shot_ext
->shot
.ctl
.aa
.afRegions
[2] = currentAfRegion
[2];
3065 shot_ext
->shot
.ctl
.aa
.afRegions
[3] = currentAfRegion
[3];
3068 void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion
)
3070 currentAfRegion
[0] = afRegion
[0];
3071 currentAfRegion
[1] = afRegion
[1];
3072 currentAfRegion
[2] = afRegion
[2];
3073 currentAfRegion
[3] = afRegion
[3];
3076 void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext
* shot_ext
, int mode
)
3078 if (m_afState
== HAL_AFSTATE_SCANNING
) {
3079 ALOGD("(%s): restarting trigger ", __FUNCTION__
);
3081 if (m_afState
!= HAL_AFSTATE_NEEDS_COMMAND
)
3082 ALOGD("(%s): wrong trigger state %d", __FUNCTION__
, m_afState
);
3084 m_afState
= HAL_AFSTATE_STARTED
;
3086 ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode
, m_afState
);
3087 shot_ext
->shot
.ctl
.aa
.afTrigger
= 1;
3088 shot_ext
->shot
.ctl
.aa
.afMode
= m_afMode
;
3089 m_IsAfTriggerRequired
= false;
3092 void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread
* self
)
3094 uint32_t currentSignal
= self
->GetProcessingSignal();
3095 SensorThread
* selfThread
= ((SensorThread
*)self
);
3100 int bayersOnSensor
= 0, bayersOnIsp
= 0;
3102 bool isCapture
= false;
3103 ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__
, currentSignal
);
3105 if (currentSignal
& SIGNAL_THREAD_RELEASE
) {
3106 CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__
);
3108 ALOGV("(%s): calling sensor streamoff", __FUNCTION__
);
3109 cam_int_streamoff(&(m_camera_info
.sensor
));
3110 ALOGV("(%s): calling sensor streamoff done", __FUNCTION__
);
3112 m_camera_info
.sensor
.buffers
= 0;
3113 ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__
);
3114 cam_int_reqbufs(&(m_camera_info
.sensor
));
3115 ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__
);
3116 m_camera_info
.sensor
.status
= false;
3118 ALOGV("(%s): calling ISP streamoff", __FUNCTION__
);
3119 isp_int_streamoff(&(m_camera_info
.isp
));
3120 ALOGV("(%s): calling ISP streamoff done", __FUNCTION__
);
3122 m_camera_info
.isp
.buffers
= 0;
3123 ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__
);
3124 cam_int_reqbufs(&(m_camera_info
.isp
));
3125 ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__
);
3127 exynos_v4l2_s_ctrl(m_camera_info
.sensor
.fd
, V4L2_CID_IS_S_STREAM
, IS_DISABLE_STREAM
);
3129 m_requestManager
->releaseSensorQ();
3130 m_requestManager
->ResetEntry();
3131 ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__
);
3132 selfThread
->SetSignal(SIGNAL_THREAD_TERMINATE
);
3136 if (currentSignal
& SIGNAL_SENSOR_START_REQ_PROCESSING
)
3138 ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__
);
3139 int targetStreamIndex
= 0, i
=0;
3140 int matchedFrameCnt
= -1, processingReqIndex
;
3141 struct camera2_shot_ext
*shot_ext
;
3142 struct camera2_shot_ext
*shot_ext_capture
;
3143 bool triggered
= false;
3145 /* dqbuf from sensor */
3146 ALOGV("Sensor DQbuf start");
3147 index
= cam_int_dqbuf(&(m_camera_info
.sensor
));
3148 m_requestManager
->pushSensorQ(index
);
3149 ALOGV("Sensor DQbuf done(%d)", index
);
3150 shot_ext
= (struct camera2_shot_ext
*)(m_camera_info
.sensor
.buffer
[index
].virt
.extP
[1]);
3152 if (m_nightCaptureCnt
!= 0) {
3153 matchedFrameCnt
= m_nightCaptureFrameCnt
;
3154 } else if (m_ctlInfo
.flash
.m_flashCnt
>= IS_FLASH_STATE_CAPTURE
) {
3155 matchedFrameCnt
= m_ctlInfo
.flash
.m_flashFrameCount
;
3156 ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt
);
3158 matchedFrameCnt
= m_requestManager
->FindFrameCnt(shot_ext
);
3161 if (matchedFrameCnt
== -1 && m_vdisBubbleCnt
> 0) {
3162 matchedFrameCnt
= m_vdisDupFrame
;
3165 if (matchedFrameCnt
!= -1) {
3166 if (m_vdisBubbleCnt
== 0 || m_vdisDupFrame
!= matchedFrameCnt
) {
3167 frameTime
= systemTime();
3168 m_requestManager
->RegisterTimestamp(matchedFrameCnt
, &frameTime
);
3169 m_requestManager
->UpdateIspParameters(shot_ext
, matchedFrameCnt
, &m_ctlInfo
);
3171 ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame
, matchedFrameCnt
);
3174 // face af mode setting in case of face priority scene mode
3175 if (m_ctlInfo
.scene
.prevSceneMode
!= shot_ext
->shot
.ctl
.aa
.sceneMode
) {
3176 ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__
, shot_ext
->shot
.ctl
.aa
.sceneMode
);
3177 m_ctlInfo
.scene
.prevSceneMode
= shot_ext
->shot
.ctl
.aa
.sceneMode
;
3180 m_zoomRatio
= (float)m_camera2
->getSensorW() / (float)shot_ext
->shot
.ctl
.scaler
.cropRegion
[2];
3181 float zoomLeft
, zoomTop
, zoomWidth
, zoomHeight
;
3182 int crop_x
= 0, crop_y
= 0, crop_w
= 0, crop_h
= 0;
3184 m_getRatioSize(m_camera2
->getSensorW(), m_camera2
->getSensorH(),
3185 m_streamThreads
[0]->m_parameters
.width
, m_streamThreads
[0]->m_parameters
.height
,
3190 if (m_streamThreads
[0]->m_parameters
.width
>= m_streamThreads
[0]->m_parameters
.height
) {
3191 zoomWidth
= m_camera2
->getSensorW() / m_zoomRatio
;
3192 zoomHeight
= zoomWidth
*
3193 m_streamThreads
[0]->m_parameters
.height
/ m_streamThreads
[0]->m_parameters
.width
;
3195 zoomHeight
= m_camera2
->getSensorH() / m_zoomRatio
;
3196 zoomWidth
= zoomHeight
*
3197 m_streamThreads
[0]->m_parameters
.width
/ m_streamThreads
[0]->m_parameters
.height
;
3199 zoomLeft
= (crop_w
- zoomWidth
) / 2;
3200 zoomTop
= (crop_h
- zoomHeight
) / 2;
3202 int32_t new_cropRegion
[3] = { (int32_t)zoomLeft
, (int32_t)zoomTop
, (int32_t)zoomWidth
};
3204 int cropCompensation
= (new_cropRegion
[0] * 2 + new_cropRegion
[2]) - ALIGN(crop_w
, 4);
3205 if (cropCompensation
)
3206 new_cropRegion
[2] -= cropCompensation
;
3208 shot_ext
->shot
.ctl
.scaler
.cropRegion
[0] = new_cropRegion
[0];
3209 shot_ext
->shot
.ctl
.scaler
.cropRegion
[1] = new_cropRegion
[1];
3210 shot_ext
->shot
.ctl
.scaler
.cropRegion
[2] = new_cropRegion
[2];
3211 if (m_IsAfModeUpdateRequired
&& (m_ctlInfo
.flash
.m_precaptureTriggerId
== 0)) {
3212 ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode
);
3213 shot_ext
->shot
.ctl
.aa
.afMode
= m_afMode
;
3214 if (m_afMode
== AA_AFMODE_CONTINUOUS_VIDEO
|| m_afMode
== AA_AFMODE_CONTINUOUS_PICTURE
) {
3215 ALOGD("### With Automatic triger for continuous modes");
3216 m_afState
= HAL_AFSTATE_STARTED
;
3217 shot_ext
->shot
.ctl
.aa
.afTrigger
= 1;
3219 if ((m_ctlInfo
.scene
.prevSceneMode
== AA_SCENE_MODE_UNSUPPORTED
) ||
3220 (m_ctlInfo
.scene
.prevSceneMode
== AA_SCENE_MODE_FACE_PRIORITY
)) {
3222 case AA_AFMODE_CONTINUOUS_PICTURE
:
3223 shot_ext
->shot
.ctl
.aa
.afMode
= AA_AFMODE_CONTINUOUS_PICTURE
;
3224 ALOGD("### Face AF Mode change (Mode %d) ", shot_ext
->shot
.ctl
.aa
.afMode
);
3230 // reset flash result
3231 if (m_ctlInfo
.flash
.m_afFlashDoneFlg
) {
3232 m_ctlInfo
.flash
.m_flashEnableFlg
= false;
3233 m_ctlInfo
.flash
.m_afFlashDoneFlg
= false;
3234 m_ctlInfo
.flash
.m_flashDecisionResult
= false;
3235 m_ctlInfo
.flash
.m_flashCnt
= 0;
3237 m_ctlInfo
.af
.m_afTriggerTimeOut
= 1;
3240 m_IsAfModeUpdateRequired
= false;
3241 // support inifinity focus mode
3242 if ((m_afMode
== AA_AFMODE_MANUAL
) && ( shot_ext
->shot
.ctl
.lens
.focusDistance
== 0)) {
3243 shot_ext
->shot
.ctl
.aa
.afMode
= AA_AFMODE_INFINITY
;
3244 shot_ext
->shot
.ctl
.aa
.afTrigger
= 1;
3247 if (m_afMode2
!= NO_CHANGE
) {
3248 enum aa_afmode tempAfMode
= m_afMode2
;
3249 m_afMode2
= NO_CHANGE
;
3250 SetAfMode(tempAfMode
);
3254 shot_ext
->shot
.ctl
.aa
.afMode
= NO_CHANGE
;
3256 if (m_IsAfTriggerRequired
) {
3257 if (m_ctlInfo
.flash
.m_flashEnableFlg
&& m_ctlInfo
.flash
.m_afFlashDoneFlg
) {
3259 if (m_ctlInfo
.flash
.m_flashCnt
== IS_FLASH_STATE_ON_DONE
) {
3260 if ((m_afMode
!= AA_AFMODE_AUTO
) && (m_afMode
!= AA_AFMODE_MACRO
)) {
3261 // Flash is enabled and start AF
3262 m_afTrigger(shot_ext
, 1);
3264 m_afTrigger(shot_ext
, 0);
3269 m_afTrigger(shot_ext
, 0);
3272 shot_ext
->shot
.ctl
.aa
.afTrigger
= 0;
3276 shot_ext
->setfile
= ISS_SUB_SCENARIO_VIDEO
;
3278 shot_ext
->setfile
= ISS_SUB_SCENARIO_STILL
;
3281 shot_ext
->shot
.ctl
.aa
.afTrigger
= 1;
3283 // TODO : check collision with AFMode Update
3284 if (m_IsAfLockRequired
) {
3285 shot_ext
->shot
.ctl
.aa
.afMode
= AA_AFMODE_OFF
;
3286 m_IsAfLockRequired
= false;
3288 ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
3290 shot_ext
->shot
.ctl
.request
.frameCount
,
3291 shot_ext
->request_scp
,
3292 shot_ext
->request_scc
,
3293 shot_ext
->dis_bypass
, sizeof(camera2_shot
));
3296 m_updateAfRegion(shot_ext
);
3298 m_lastSceneMode
= shot_ext
->shot
.ctl
.aa
.sceneMode
;
3299 if (shot_ext
->shot
.ctl
.aa
.sceneMode
== AA_SCENE_MODE_NIGHT
3300 && shot_ext
->shot
.ctl
.aa
.aeMode
== AA_AEMODE_LOCKED
)
3301 shot_ext
->shot
.ctl
.aa
.aeMode
= AA_AEMODE_ON
;
3302 if (m_nightCaptureCnt
== 0) {
3303 if (shot_ext
->shot
.ctl
.aa
.captureIntent
== AA_CAPTURE_INTENT_STILL_CAPTURE
3304 && shot_ext
->shot
.ctl
.aa
.sceneMode
== AA_SCENE_MODE_NIGHT
) {
3305 shot_ext
->shot
.ctl
.aa
.sceneMode
= AA_SCENE_MODE_NIGHT_CAPTURE
;
3306 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[0] = 2;
3307 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[1] = 30;
3308 m_nightCaptureCnt
= 4;
3309 m_nightCaptureFrameCnt
= matchedFrameCnt
;
3310 shot_ext
->request_scc
= 0;
3313 else if (m_nightCaptureCnt
== 1) {
3314 shot_ext
->shot
.ctl
.aa
.sceneMode
= AA_SCENE_MODE_NIGHT_CAPTURE
;
3315 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[0] = 30;
3316 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[1] = 30;
3317 m_nightCaptureCnt
--;
3318 m_nightCaptureFrameCnt
= 0;
3319 shot_ext
->request_scc
= 1;
3321 else if (m_nightCaptureCnt
== 2) {
3322 shot_ext
->shot
.ctl
.aa
.sceneMode
= AA_SCENE_MODE_NIGHT_CAPTURE
;
3323 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[0] = 2;
3324 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[1] = 30;
3325 m_nightCaptureCnt
--;
3326 shot_ext
->request_scc
= 0;
3328 else if (m_nightCaptureCnt
== 3) {
3329 shot_ext
->shot
.ctl
.aa
.sceneMode
= AA_SCENE_MODE_NIGHT_CAPTURE
;
3330 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[0] = 2;
3331 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[1] = 30;
3332 m_nightCaptureCnt
--;
3333 shot_ext
->request_scc
= 0;
3335 else if (m_nightCaptureCnt
== 4) {
3336 shot_ext
->shot
.ctl
.aa
.sceneMode
= AA_SCENE_MODE_NIGHT_CAPTURE
;
3337 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[0] = 2;
3338 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[1] = 30;
3339 m_nightCaptureCnt
--;
3340 shot_ext
->request_scc
= 0;
3343 switch (shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[1]) {
3345 shot_ext
->shot
.ctl
.sensor
.frameDuration
= (66666 * 1000);
3349 shot_ext
->shot
.ctl
.sensor
.frameDuration
= (41666 * 1000);
3353 shot_ext
->shot
.ctl
.sensor
.frameDuration
= (40000 * 1000);
3358 shot_ext
->shot
.ctl
.sensor
.frameDuration
= (33333 * 1000);
3361 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[1] = 30;
3364 // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
3365 if ((m_ctlInfo
.flash
.i_flashMode
>= AA_AEMODE_ON_AUTO_FLASH
)
3366 && (shot_ext
->shot
.ctl
.aa
.captureIntent
== AA_CAPTURE_INTENT_STILL_CAPTURE
)
3367 && (m_cameraId
== 0)) {
3368 if (!m_ctlInfo
.flash
.m_flashDecisionResult
) {
3369 m_ctlInfo
.flash
.m_flashEnableFlg
= false;
3370 m_ctlInfo
.flash
.m_afFlashDoneFlg
= false;
3371 m_ctlInfo
.flash
.m_flashCnt
= 0;
3372 } else if ((m_ctlInfo
.flash
.m_flashCnt
== IS_FLASH_STATE_AUTO_DONE
) ||
3373 (m_ctlInfo
.flash
.m_flashCnt
== IS_FLASH_STATE_AUTO_OFF
)) {
3374 ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__
);
3375 shot_ext
->request_scc
= 0;
3376 m_ctlInfo
.flash
.m_flashFrameCount
= matchedFrameCnt
;
3377 m_ctlInfo
.flash
.m_flashEnableFlg
= true;
3378 m_ctlInfo
.flash
.m_afFlashDoneFlg
= false;
3379 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_CAPTURE
;
3380 } else if (m_ctlInfo
.flash
.m_flashCnt
< IS_FLASH_STATE_AUTO_DONE
) {
3381 ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__
, m_ctlInfo
.flash
.m_flashCnt
);
3382 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_OFF
;
3383 m_ctlInfo
.flash
.m_flashEnableFlg
= false;
3384 m_ctlInfo
.flash
.m_afFlashDoneFlg
= false;
3385 m_ctlInfo
.flash
.m_flashCnt
= 0;
3387 } else if (shot_ext
->shot
.ctl
.aa
.captureIntent
== AA_CAPTURE_INTENT_STILL_CAPTURE
) {
3388 m_ctlInfo
.flash
.m_flashDecisionResult
= false;
3391 if (shot_ext
->shot
.ctl
.flash
.flashMode
== CAM2_FLASH_MODE_TORCH
) {
3392 if (m_ctlInfo
.flash
.m_flashTorchMode
== false) {
3393 m_ctlInfo
.flash
.m_flashTorchMode
= true;
3396 if (m_ctlInfo
.flash
.m_flashTorchMode
== true) {
3397 shot_ext
->shot
.ctl
.flash
.flashMode
= CAM2_FLASH_MODE_OFF
;
3398 shot_ext
->shot
.ctl
.flash
.firingPower
= 0;
3399 m_ctlInfo
.flash
.m_flashTorchMode
= false;
3401 shot_ext
->shot
.ctl
.flash
.flashMode
= CAM2_FLASH_MODE_NOP
;
3405 if (shot_ext
->isReprocessing
) {
3406 ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__
);
3407 m_currentReprocessOutStreams
= shot_ext
->shot
.ctl
.request
.outputStreams
[0];
3408 shot_ext
->request_scp
= 0;
3409 shot_ext
->request_scc
= 0;
3410 m_reprocessingFrameCnt
= shot_ext
->shot
.ctl
.request
.frameCount
;
3411 m_ctlInfo
.flash
.m_flashDecisionResult
= false;
3412 memcpy(&m_jpegMetadata
, (void*)(m_requestManager
->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt
)),
3413 sizeof(struct camera2_shot_ext
));
3414 m_streamThreads
[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START
);
3415 m_ctlInfo
.flash
.m_flashEnableFlg
= false;
3418 if (m_ctlInfo
.flash
.m_flashEnableFlg
) {
3419 m_preCaptureListenerSensor(shot_ext
);
3420 m_preCaptureSetter(shot_ext
);
3423 ALOGV("(%s): queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__
,
3424 (int)(shot_ext
->shot
.ctl
.aa
.mode
), (int)(shot_ext
->shot
.ctl
.aa
.aeMode
),
3425 (int)(shot_ext
->shot
.ctl
.aa
.awbMode
), (int)(shot_ext
->shot
.ctl
.aa
.afMode
),
3426 (int)(shot_ext
->shot
.ctl
.aa
.afTrigger
));
3428 if (m_vdisBubbleCnt
> 0 && m_vdisDupFrame
== matchedFrameCnt
) {
3429 shot_ext
->dis_bypass
= 1;
3430 shot_ext
->dnr_bypass
= 1;
3431 shot_ext
->request_scp
= 0;
3432 shot_ext
->request_scc
= 0;
3434 matchedFrameCnt
= -1;
3436 m_vdisDupFrame
= matchedFrameCnt
;
3438 if (m_scpForceSuspended
)
3439 shot_ext
->request_scc
= 0;
3441 uint32_t current_scp
= shot_ext
->request_scp
;
3442 uint32_t current_scc
= shot_ext
->request_scc
;
3444 if (shot_ext
->shot
.dm
.request
.frameCount
== 0) {
3445 CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__
, shot_ext
->shot
.dm
.request
.frameCount
);
3448 cam_int_qbuf(&(m_camera_info
.isp
), index
);
3450 ALOGV("### isp DQBUF start");
3451 index_isp
= cam_int_dqbuf(&(m_camera_info
.isp
));
3453 shot_ext
= (struct camera2_shot_ext
*)(m_camera_info
.isp
.buffer
[index_isp
].virt
.extP
[1]);
3455 if (m_ctlInfo
.flash
.m_flashEnableFlg
)
3456 m_preCaptureListenerISP(shot_ext
);
3458 ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)",
3460 shot_ext
->shot
.ctl
.request
.frameCount
,
3461 shot_ext
->request_scp
,
3462 shot_ext
->request_scc
,
3463 shot_ext
->dis_bypass
,
3464 shot_ext
->dnr_bypass
, sizeof(camera2_shot
));
3466 ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__
,
3467 (int)(shot_ext
->shot
.dm
.aa
.mode
), (int)(shot_ext
->shot
.dm
.aa
.aeMode
),
3468 (int)(shot_ext
->shot
.dm
.aa
.awbMode
),
3469 (int)(shot_ext
->shot
.dm
.aa
.afMode
));
3471 #ifndef ENABLE_FRAME_SYNC
3472 m_currentOutputStreams
= shot_ext
->shot
.ctl
.request
.outputStreams
[0];
3475 if (!shot_ext
->fd_bypass
) {
3476 /* FD orientation axis transformation */
3477 for (int i
=0; i
< CAMERA2_MAX_FACES
; i
++) {
3478 if (shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][0] > 0)
3479 shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][0] = (m_camera2
->m_curCameraInfo
->sensorW
3480 * shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][0])
3481 / m_streamThreads
[0].get()->m_parameters
.width
;
3482 if (shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][1] > 0)
3483 shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][1] = (m_camera2
->m_curCameraInfo
->sensorH
3484 * shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][1])
3485 / m_streamThreads
[0].get()->m_parameters
.height
;
3486 if (shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][2] > 0)
3487 shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][2] = (m_camera2
->m_curCameraInfo
->sensorW
3488 * shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][2])
3489 / m_streamThreads
[0].get()->m_parameters
.width
;
3490 if (shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][3] > 0)
3491 shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][3] = (m_camera2
->m_curCameraInfo
->sensorH
3492 * shot_ext
->shot
.dm
.stats
.faceRectangles
[i
][3])
3493 / m_streamThreads
[0].get()->m_parameters
.height
;
3497 if (shot_ext
->shot
.ctl
.aa
.sceneMode
!= AA_SCENE_MODE_NIGHT
)
3498 m_preCaptureAeState(shot_ext
);
3500 // At scene mode face priority
3501 if (shot_ext
->shot
.dm
.aa
.afMode
== AA_AFMODE_CONTINUOUS_PICTURE_FACE
)
3502 shot_ext
->shot
.dm
.aa
.afMode
= AA_AFMODE_CONTINUOUS_PICTURE
;
3504 if (matchedFrameCnt
!= -1 && m_nightCaptureCnt
== 0 && (m_ctlInfo
.flash
.m_flashCnt
< IS_FLASH_STATE_CAPTURE
)) {
3505 m_requestManager
->ApplyDynamicMetadata(shot_ext
);
3508 if (current_scc
!= shot_ext
->request_scc
) {
3509 ALOGD("(%s): scc frame drop1 request_scc(%d to %d)",
3510 __FUNCTION__
, current_scc
, shot_ext
->request_scc
);
3511 m_requestManager
->NotifyStreamOutput(shot_ext
->shot
.ctl
.request
.frameCount
);
3513 if (shot_ext
->request_scc
) {
3514 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)");
3515 if (shot_ext
->shot
.ctl
.request
.outputStreams
[0] & STREAM_MASK_JPEG
) {
3516 if (m_ctlInfo
.flash
.m_flashCnt
< IS_FLASH_STATE_CAPTURE
)
3517 memcpy(&m_jpegMetadata
, (void*)(m_requestManager
->GetInternalShotExtByFrameCnt(shot_ext
->shot
.ctl
.request
.frameCount
)),
3518 sizeof(struct camera2_shot_ext
));
3520 memcpy(&m_jpegMetadata
, (void*)shot_ext
, sizeof(struct camera2_shot_ext
));
3522 m_streamThreads
[1]->SetSignal(SIGNAL_STREAM_DATA_COMING
);
3524 if (current_scp
!= shot_ext
->request_scp
) {
3525 ALOGD("(%s): scp frame drop1 request_scp(%d to %d)",
3526 __FUNCTION__
, current_scp
, shot_ext
->request_scp
);
3527 m_requestManager
->NotifyStreamOutput(shot_ext
->shot
.ctl
.request
.frameCount
);
3529 if (shot_ext
->request_scp
) {
3530 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)");
3531 m_streamThreads
[0]->SetSignal(SIGNAL_STREAM_DATA_COMING
);
3534 ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__
,
3535 shot_ext
->request_sensor
, shot_ext
->request_scc
, shot_ext
->request_scp
);
3536 if (shot_ext
->request_scc
+ shot_ext
->request_scp
+ shot_ext
->request_sensor
== 0) {
3537 ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__
);
3538 m_scp_closed
= true;
3541 m_scp_closed
= false;
3543 OnAfNotification(shot_ext
->shot
.dm
.aa
.afState
);
3544 OnPrecaptureMeteringNotificationISP();
3546 memcpy(&shot_ext
->shot
.ctl
, &m_camera_info
.dummy_shot
.shot
.ctl
, sizeof(struct camera2_ctl
));
3547 shot_ext
->shot
.ctl
.request
.frameCount
= 0xfffffffe;
3548 shot_ext
->request_sensor
= 1;
3549 shot_ext
->dis_bypass
= 1;
3550 shot_ext
->dnr_bypass
= 1;
3551 shot_ext
->fd_bypass
= 1;
3552 shot_ext
->drc_bypass
= 1;
3553 shot_ext
->request_scc
= 0;
3554 shot_ext
->request_scp
= 0;
3556 shot_ext
->setfile
= ISS_SUB_SCENARIO_VIDEO
;
3558 shot_ext
->setfile
= ISS_SUB_SCENARIO_STILL
;
3560 shot_ext
->shot
.ctl
.aa
.sceneMode
= (enum aa_scene_mode
)m_lastSceneMode
;
3561 if (shot_ext
->shot
.ctl
.aa
.sceneMode
== AA_SCENE_MODE_NIGHT_CAPTURE
|| shot_ext
->shot
.ctl
.aa
.sceneMode
== AA_SCENE_MODE_NIGHT
) {
3562 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[0] = 8;
3563 shot_ext
->shot
.ctl
.aa
.aeTargetFpsRange
[1] = 30;
3565 shot_ext
->shot
.ctl
.aa
.aeflashMode
= AA_FLASHMODE_OFF
;
3566 shot_ext
->shot
.ctl
.flash
.flashMode
= CAM2_FLASH_MODE_OFF
;
3567 ALOGV("### isp QBUF start (bubble)");
3568 ALOGV("bubble: queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)",
3569 (int)(shot_ext
->shot
.ctl
.aa
.mode
), (int)(shot_ext
->shot
.ctl
.aa
.aeMode
),
3570 (int)(shot_ext
->shot
.ctl
.aa
.awbMode
), (int)(shot_ext
->shot
.ctl
.aa
.afMode
),
3571 (int)(shot_ext
->shot
.ctl
.aa
.afTrigger
));
3573 cam_int_qbuf(&(m_camera_info
.isp
), index
);
3574 ALOGV("### isp DQBUF start (bubble)");
3575 index_isp
= cam_int_dqbuf(&(m_camera_info
.isp
));
3576 shot_ext
= (struct camera2_shot_ext
*)(m_camera_info
.isp
.buffer
[index_isp
].virt
.extP
[1]);
3577 ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)",
3578 (int)(shot_ext
->shot
.dm
.aa
.mode
), (int)(shot_ext
->shot
.dm
.aa
.aeMode
),
3579 (int)(shot_ext
->shot
.dm
.aa
.awbMode
),
3580 (int)(shot_ext
->shot
.dm
.aa
.afMode
));
3582 OnAfNotification(shot_ext
->shot
.dm
.aa
.afState
);
3585 index
= m_requestManager
->popSensorQ();
3587 ALOGE("sensorQ is empty");
3591 processingReqIndex
= m_requestManager
->MarkProcessingRequest(&(m_camera_info
.sensor
.buffer
[index
]));
3592 shot_ext
= (struct camera2_shot_ext
*)(m_camera_info
.sensor
.buffer
[index
].virt
.extP
[1]);
3593 if (m_scp_closing
|| m_scp_closed
) {
3594 ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__
, m_scp_closing
, m_scp_closed
);
3595 shot_ext
->request_scc
= 0;
3596 shot_ext
->request_scp
= 0;
3597 shot_ext
->request_sensor
= 0;
3599 cam_int_qbuf(&(m_camera_info
.sensor
), index
);
3600 ALOGV("Sensor Qbuf done(%d)", index
);
3603 && ((matchedFrameCnt
== -1) || (processingReqIndex
== -1))){
3604 ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
3605 matchedFrameCnt
, processingReqIndex
);
3606 selfThread
->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING
);
3612 void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread
*self
)
3614 uint32_t currentSignal
= self
->GetProcessingSignal();
3615 StreamThread
* selfThread
= ((StreamThread
*)self
);
3616 stream_parameters_t
*selfStreamParms
= &(selfThread
->m_parameters
);
3617 node_info_t
*currentNode
= selfStreamParms
->node
;
3618 substream_parameters_t
*subParms
;
3619 buffer_handle_t
* buf
= NULL
;
3626 if (!(selfThread
->m_isBufferInit
))
3628 for ( i
=0 ; i
< selfStreamParms
->numSvcBuffers
; i
++) {
3629 res
= selfStreamParms
->streamOps
->dequeue_buffer(selfStreamParms
->streamOps
, &buf
);
3630 if (res
!= NO_ERROR
|| buf
== NULL
) {
3631 ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__
, res
);
3634 ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__
, (uint32_t)(*buf
),
3635 ((native_handle_t
*)(*buf
))->version
, ((native_handle_t
*)(*buf
))->numFds
, ((native_handle_t
*)(*buf
))->numInts
);
3637 index
= selfThread
->findBufferIndex(buf
);
3639 ALOGE("ERR(%s): could not find buffer index", __FUNCTION__
);
3642 ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3643 __FUNCTION__
, index
, selfStreamParms
->svcBufStatus
[index
]);
3644 if (selfStreamParms
->svcBufStatus
[index
]== REQUIRES_DQ_FROM_SVC
)
3645 selfStreamParms
->svcBufStatus
[index
] = ON_DRIVER
;
3646 else if (selfStreamParms
->svcBufStatus
[index
]== ON_SERVICE
)
3647 selfStreamParms
->svcBufStatus
[index
] = ON_HAL
;
3649 ALOGV("DBG(%s): buffer status abnormal (%d) "
3650 , __FUNCTION__
, selfStreamParms
->svcBufStatus
[index
]);
3652 selfStreamParms
->numSvcBufsInHal
++;
3654 selfStreamParms
->bufIndex
= 0;
3656 selfThread
->m_isBufferInit
= true;
3658 for (int i
= 0 ; i
< NUM_MAX_SUBSTREAM
; i
++) {
3659 if (selfThread
->m_attachedSubStreams
[i
].streamId
== -1)
3662 subParms
= &m_subStreams
[selfThread
->m_attachedSubStreams
[i
].streamId
];
3663 if (subParms
->type
&& subParms
->needBufferInit
) {
3664 ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
3665 __FUNCTION__
, selfThread
->m_attachedSubStreams
[i
].streamId
, subParms
->numSvcBuffers
);
3666 int checkingIndex
= 0;
3668 for ( i
= 0 ; i
< subParms
->numSvcBuffers
; i
++) {
3669 res
= subParms
->streamOps
->dequeue_buffer(subParms
->streamOps
, &buf
);
3670 if (res
!= NO_ERROR
|| buf
== NULL
) {
3671 ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__
, res
);
3674 subParms
->numSvcBufsInHal
++;
3675 ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__
, (uint32_t)(*buf
),
3676 subParms
->numSvcBufsInHal
, ((native_handle_t
*)(*buf
))->version
, ((native_handle_t
*)(*buf
))->numFds
, ((native_handle_t
*)(*buf
))->numInts
);
3678 if (m_grallocHal
->lock(m_grallocHal
, *buf
,
3679 subParms
->usage
, 0, 0,
3680 subParms
->width
, subParms
->height
, virtAddr
) != 0) {
3681 ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__
);
3684 ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3685 __FUNCTION__
, (unsigned int)virtAddr
[0], (unsigned int)virtAddr
[1], (unsigned int)virtAddr
[2]);
3688 for (checkingIndex
= 0; checkingIndex
< subParms
->numSvcBuffers
; checkingIndex
++) {
3689 if (subParms
->svcBufHandle
[checkingIndex
] == *buf
) {
3694 ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__
, found
, checkingIndex
);
3697 index
= checkingIndex
;
3700 ALOGV("ERR(%s): could not find buffer index", __FUNCTION__
);
3703 ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3704 __FUNCTION__
, index
, subParms
->svcBufStatus
[index
]);
3705 if (subParms
->svcBufStatus
[index
]== ON_SERVICE
)
3706 subParms
->svcBufStatus
[index
] = ON_HAL
;
3708 ALOGV("DBG(%s): buffer status abnormal (%d) "
3709 , __FUNCTION__
, subParms
->svcBufStatus
[index
]);
3711 if (*buf
!= subParms
->svcBufHandle
[index
])
3712 ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__
);
3714 ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__
);
3716 subParms
->svcBufIndex
= 0;
3718 if (subParms
->type
== SUBSTREAM_TYPE_JPEG
) {
3719 m_resizeBuf
.size
.extS
[0] = ALIGN(subParms
->width
, 16) * ALIGN(subParms
->height
, 16) * 2;
3720 m_resizeBuf
.size
.extS
[1] = 0;
3721 m_resizeBuf
.size
.extS
[2] = 0;
3723 if (allocCameraMemory(m_ionCameraClient
, &m_resizeBuf
, 1) == -1) {
3724 ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__
);
3727 if (subParms
->type
== SUBSTREAM_TYPE_PRVCB
) {
3728 m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms
->internalFormat
), subParms
->width
,
3729 subParms
->height
, &m_previewCbBuf
);
3731 if (allocCameraMemory(m_ionCameraClient
, &m_previewCbBuf
, subParms
->internalPlanes
) == -1) {
3732 ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__
);
3735 subParms
->needBufferInit
= false;
3740 void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread
* self
)
3742 StreamThread
* selfThread
= ((StreamThread
*)self
);
3743 ALOGV("DEBUG(%s): ", __FUNCTION__
);
3744 memset(&(selfThread
->m_parameters
), 0, sizeof(stream_parameters_t
));
3745 selfThread
->m_isBufferInit
= false;
3746 for (int i
= 0 ; i
< NUM_MAX_SUBSTREAM
; i
++) {
3747 selfThread
->m_attachedSubStreams
[i
].streamId
= -1;
3748 selfThread
->m_attachedSubStreams
[i
].priority
= 0;
3753 int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread
*selfThread
, ExynosBuffer
*srcImageBuf
,
3754 int stream_id
, nsecs_t frameTimeStamp
)
3756 substream_parameters_t
*subParms
= &m_subStreams
[stream_id
];
3758 switch (stream_id
) {
3760 case STREAM_ID_JPEG
:
3761 return m_jpegCreator(selfThread
, srcImageBuf
, frameTimeStamp
);
3763 case STREAM_ID_RECORD
:
3764 return m_recordCreator(selfThread
, srcImageBuf
, frameTimeStamp
);
3766 case STREAM_ID_PRVCB
:
3767 return m_prvcbCreator(selfThread
, srcImageBuf
, frameTimeStamp
);
3773 void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread
*self
)
3775 uint32_t currentSignal
= self
->GetProcessingSignal();
3776 StreamThread
* selfThread
= ((StreamThread
*)self
);
3777 stream_parameters_t
*selfStreamParms
= &(selfThread
->m_parameters
);
3778 node_info_t
*currentNode
= selfStreamParms
->node
;
3780 nsecs_t frameTimeStamp
;
3782 if (currentSignal
& SIGNAL_THREAD_RELEASE
) {
3783 CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__
, selfThread
->m_index
);
3785 if (selfThread
->m_isBufferInit
) {
3786 if (!(currentNode
->fd
== m_camera_info
.capture
.fd
&& m_camera_info
.capture
.status
== false)) {
3787 ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__
,
3788 selfThread
->m_index
, currentNode
->fd
);
3789 if (cam_int_streamoff(currentNode
) < 0 ) {
3790 ALOGE("ERR(%s): stream off fail", __FUNCTION__
);
3792 ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__
,
3793 selfThread
->m_index
, currentNode
->fd
);
3794 currentNode
->buffers
= 0;
3795 cam_int_reqbufs(currentNode
);
3796 ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__
,
3797 selfThread
->m_index
, currentNode
->fd
);
3800 #ifdef ENABLE_FRAME_SYNC
3802 for (i
= 0; i
< NUM_MAX_CAMERA_BUFFERS
; i
++)
3803 if (selfStreamParms
->metaBuffers
[i
].fd
.extFd
[0] != 0) {
3804 freeCameraMemory(&(selfStreamParms
->metaBuffers
[i
]), 1);
3805 selfStreamParms
->metaBuffers
[i
].fd
.extFd
[0] = 0;
3806 selfStreamParms
->metaBuffers
[i
].size
.extS
[0] = 0;
3809 selfThread
->m_isBufferInit
= false;
3810 selfThread
->m_releasing
= false;
3811 selfThread
->m_activated
= false;
3812 ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__
, selfThread
->m_index
);
3815 if (currentSignal
& SIGNAL_STREAM_REPROCESSING_START
) {
3817 buffer_handle_t
* buf
= NULL
;
3819 ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
3820 __FUNCTION__
, selfThread
->m_index
);
3821 res
= m_reprocessOps
->acquire_buffer(m_reprocessOps
, &buf
);
3822 if (res
!= NO_ERROR
|| buf
== NULL
) {
3823 ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__
, res
);
3826 const private_handle_t
*priv_handle
= reinterpret_cast<const private_handle_t
*>(*buf
);
3827 int checkingIndex
= 0;
3828 for (checkingIndex
= 0; checkingIndex
< selfStreamParms
->numSvcBuffers
; checkingIndex
++) {
3829 if (priv_handle
->fd
== selfStreamParms
->svcBuffers
[checkingIndex
].fd
.extFd
[0] ) {
3834 ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
3835 __FUNCTION__
, (unsigned int)buf
, found
, checkingIndex
);
3839 for (int i
= 0 ; i
< NUM_MAX_SUBSTREAM
; i
++) {
3840 if (selfThread
->m_attachedSubStreams
[i
].streamId
== -1)
3843 #ifdef ENABLE_FRAME_SYNC
3844 frameTimeStamp
= m_requestManager
->GetTimestampByFrameCnt(m_reprocessingFrameCnt
);
3845 m_requestManager
->NotifyStreamOutput(m_reprocessingFrameCnt
);
3847 frameTimeStamp
= m_requestManager
->GetTimestamp(m_requestManager
->GetFrameIndex());
3849 if (m_currentReprocessOutStreams
& (1<<selfThread
->m_attachedSubStreams
[i
].streamId
))
3850 m_runSubStreamFunc(selfThread
, &(selfStreamParms
->svcBuffers
[checkingIndex
]),
3851 selfThread
->m_attachedSubStreams
[i
].streamId
, frameTimeStamp
);
3854 res
= m_reprocessOps
->release_buffer(m_reprocessOps
, buf
);
3855 if (res
!= NO_ERROR
) {
3856 ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__
, res
);
3859 ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_REPROCESSING_START",
3860 __FUNCTION__
,selfThread
->m_index
);
3864 if (currentSignal
& SIGNAL_STREAM_DATA_COMING
) {
3865 buffer_handle_t
* buf
= NULL
;
3870 #ifdef ENABLE_FRAME_SYNC
3871 camera2_stream
*frame
;
3872 uint8_t currentOutputStreams
;
3873 bool directOutputEnabled
= false;
3875 int numOfUndqbuf
= 0;
3877 ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__
,selfThread
->m_index
);
3879 m_streamBufferInit(self
);
3882 ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__
,
3883 selfThread
->m_index
, selfThread
->streamType
);
3885 #ifdef ENABLE_FRAME_SYNC
3886 selfStreamParms
->bufIndex
= cam_int_dqbuf(currentNode
, selfStreamParms
->planes
+ selfStreamParms
->metaPlanes
);
3887 frame
= (struct camera2_stream
*)(selfStreamParms
->metaBuffers
[selfStreamParms
->bufIndex
].virt
.extP
[0]);
3888 frameTimeStamp
= m_requestManager
->GetTimestampByFrameCnt(frame
->rcount
);
3889 currentOutputStreams
= m_requestManager
->GetOutputStreamByFrameCnt(frame
->rcount
);
3890 ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread
->m_index
, frame
->rcount
, currentOutputStreams
);
3891 if (((currentOutputStreams
& STREAM_MASK_PREVIEW
) && selfThread
->m_index
== 0)||
3892 ((currentOutputStreams
& STREAM_MASK_ZSL
) && selfThread
->m_index
== 1)) {
3893 directOutputEnabled
= true;
3895 if (!directOutputEnabled
) {
3896 if (!m_nightCaptureFrameCnt
)
3897 m_requestManager
->NotifyStreamOutput(frame
->rcount
);
3900 selfStreamParms
->bufIndex
= cam_int_dqbuf(currentNode
);
3901 frameTimeStamp
= m_requestManager
->GetTimestamp(m_requestManager
->GetFrameIndex())
3903 ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d) sigcnt(%d)",__FUNCTION__
,
3904 selfThread
->m_index
, selfStreamParms
->bufIndex
, m_scpOutputSignalCnt
);
3906 if (selfStreamParms
->svcBufStatus
[selfStreamParms
->bufIndex
] != ON_DRIVER
)
3907 ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3908 __FUNCTION__
, selfStreamParms
->svcBufStatus
[selfStreamParms
->bufIndex
]);
3909 selfStreamParms
->svcBufStatus
[selfStreamParms
->bufIndex
] = ON_HAL
;
3911 for (int i
= 0 ; i
< NUM_MAX_SUBSTREAM
; i
++) {
3912 if (selfThread
->m_attachedSubStreams
[i
].streamId
== -1)
3914 #ifdef ENABLE_FRAME_SYNC
3915 if (currentOutputStreams
& (1<<selfThread
->m_attachedSubStreams
[i
].streamId
)) {
3916 m_runSubStreamFunc(selfThread
, &(selfStreamParms
->svcBuffers
[selfStreamParms
->bufIndex
]),
3917 selfThread
->m_attachedSubStreams
[i
].streamId
, frameTimeStamp
);
3920 if (m_currentOutputStreams
& (1<<selfThread
->m_attachedSubStreams
[i
].streamId
)) {
3921 m_runSubStreamFunc(selfThread
, &(selfStreamParms
->svcBuffers
[selfStreamParms
->bufIndex
]),
3922 selfThread
->m_attachedSubStreams
[i
].streamId
, frameTimeStamp
);
3927 if (m_requestManager
->GetSkipCnt() <= 0) {
3928 #ifdef ENABLE_FRAME_SYNC
3929 if ((currentOutputStreams
& STREAM_MASK_PREVIEW
) && selfThread
->m_index
== 0) {
3930 ALOGV("** Display Preview(frameCnt:%d)", frame
->rcount
);
3931 res
= selfStreamParms
->streamOps
->enqueue_buffer(selfStreamParms
->streamOps
,
3933 &(selfStreamParms
->svcBufHandle
[selfStreamParms
->bufIndex
]));
3935 else if ((currentOutputStreams
& STREAM_MASK_ZSL
) && selfThread
->m_index
== 1) {
3936 ALOGV("** SCC output (frameCnt:%d)", frame
->rcount
);
3937 res
= selfStreamParms
->streamOps
->enqueue_buffer(selfStreamParms
->streamOps
,
3939 &(selfStreamParms
->svcBufHandle
[selfStreamParms
->bufIndex
]));
3942 res
= selfStreamParms
->streamOps
->cancel_buffer(selfStreamParms
->streamOps
,
3943 &(selfStreamParms
->svcBufHandle
[selfStreamParms
->bufIndex
]));
3944 ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__
, selfThread
->m_index
, res
);
3947 if ((m_currentOutputStreams
& STREAM_MASK_PREVIEW
) && selfThread
->m_index
== 0) {
3948 ALOGV("** Display Preview(frameCnt:%d)", m_requestManager
->GetFrameIndex());
3949 res
= selfStreamParms
->streamOps
->enqueue_buffer(selfStreamParms
->streamOps
,
3951 &(selfStreamParms
->svcBufHandle
[selfStreamParms
->bufIndex
]));
3953 else if ((m_currentOutputStreams
& STREAM_MASK_ZSL
) && selfThread
->m_index
== 1) {
3954 ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager
->GetFrameIndex());
3955 res
= selfStreamParms
->streamOps
->enqueue_buffer(selfStreamParms
->streamOps
,
3957 &(selfStreamParms
->svcBufHandle
[selfStreamParms
->bufIndex
]));
3960 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__
, selfThread
->m_index
, res
);
3963 res
= selfStreamParms
->streamOps
->cancel_buffer(selfStreamParms
->streamOps
,
3964 &(selfStreamParms
->svcBufHandle
[selfStreamParms
->bufIndex
]));
3965 ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__
, selfThread
->m_index
, res
);
3967 #ifdef ENABLE_FRAME_SYNC
3968 if (directOutputEnabled
) {
3969 if (!m_nightCaptureFrameCnt
)
3970 m_requestManager
->NotifyStreamOutput(frame
->rcount
);
3974 selfStreamParms
->svcBufStatus
[selfStreamParms
->bufIndex
] = ON_SERVICE
;
3975 selfStreamParms
->numSvcBufsInHal
--;
3978 selfStreamParms
->svcBufStatus
[selfStreamParms
->bufIndex
] = ON_HAL
;
3984 while ((selfStreamParms
->numSvcBufsInHal
- (selfStreamParms
->numSvcBuffers
- NUM_SCP_BUFFERS
))
3985 < selfStreamParms
->minUndequedBuffer
) {
3986 res
= selfStreamParms
->streamOps
->dequeue_buffer(selfStreamParms
->streamOps
, &buf
);
3987 if (res
!= NO_ERROR
|| buf
== NULL
) {
3988 ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__
, selfThread
->m_index
, res
, selfStreamParms
->numSvcBufsInHal
);
3991 selfStreamParms
->numSvcBufsInHal
++;
3992 ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__
,
3993 selfThread
->m_index
, (uint32_t)(*buf
), selfStreamParms
->numSvcBufsInHal
,
3994 ((native_handle_t
*)(*buf
))->version
, ((native_handle_t
*)(*buf
))->numFds
, ((native_handle_t
*)(*buf
))->numInts
);
3995 const private_handle_t
*priv_handle
= reinterpret_cast<const private_handle_t
*>(*buf
);
3998 int checkingIndex
= 0;
3999 for (checkingIndex
= 0; checkingIndex
< selfStreamParms
->numSvcBuffers
; checkingIndex
++) {
4000 if (priv_handle
->fd
== selfStreamParms
->svcBuffers
[checkingIndex
].fd
.extFd
[0] ) {
4006 selfStreamParms
->bufIndex
= checkingIndex
;
4007 if (selfStreamParms
->bufIndex
< selfStreamParms
->numHwBuffers
) {
4008 uint32_t plane_index
= 0;
4009 ExynosBuffer
* currentBuf
= &(selfStreamParms
->svcBuffers
[selfStreamParms
->bufIndex
]);
4010 struct v4l2_buffer v4l2_buf
;
4011 struct v4l2_plane planes
[VIDEO_MAX_PLANES
];
4013 v4l2_buf
.m
.planes
= planes
;
4014 v4l2_buf
.type
= currentNode
->type
;
4015 v4l2_buf
.memory
= currentNode
->memory
;
4016 v4l2_buf
.index
= selfStreamParms
->bufIndex
;
4017 v4l2_buf
.length
= currentNode
->planes
;
4019 v4l2_buf
.m
.planes
[0].m
.fd
= priv_handle
->fd
;
4020 v4l2_buf
.m
.planes
[2].m
.fd
= priv_handle
->fd1
;
4021 v4l2_buf
.m
.planes
[1].m
.fd
= priv_handle
->fd2
;
4022 for (plane_index
=0 ; plane_index
< v4l2_buf
.length
; plane_index
++) {
4023 v4l2_buf
.m
.planes
[plane_index
].length
= currentBuf
->size
.extS
[plane_index
];
4025 #ifdef ENABLE_FRAME_SYNC
4026 /* add plane for metadata*/
4027 v4l2_buf
.length
+= selfStreamParms
->metaPlanes
;
4028 v4l2_buf
.m
.planes
[v4l2_buf
.length
-1].m
.fd
= selfStreamParms
->metaBuffers
[selfStreamParms
->bufIndex
].fd
.extFd
[0];
4029 v4l2_buf
.m
.planes
[v4l2_buf
.length
-1].length
= selfStreamParms
->metaBuffers
[selfStreamParms
->bufIndex
].size
.extS
[0];
4031 if (exynos_v4l2_qbuf(currentNode
->fd
, &v4l2_buf
) < 0) {
4032 ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
4033 __FUNCTION__
, selfThread
->m_index
);
4036 selfStreamParms
->svcBufStatus
[selfStreamParms
->bufIndex
] = ON_DRIVER
;
4037 ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
4038 __FUNCTION__
, selfThread
->m_index
, selfStreamParms
->bufIndex
);
4042 ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__
,selfThread
->m_index
);
4047 void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread
*self
)
4049 uint32_t currentSignal
= self
->GetProcessingSignal();
4050 StreamThread
* selfThread
= ((StreamThread
*)self
);
4051 stream_parameters_t
*selfStreamParms
= &(selfThread
->m_parameters
);
4052 node_info_t
*currentNode
= selfStreamParms
->node
;
4055 if (currentSignal
& SIGNAL_THREAD_RELEASE
) {
4056 CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__
, selfThread
->m_index
);
4058 if (selfThread
->m_isBufferInit
) {
4059 if (currentNode
->fd
== m_camera_info
.capture
.fd
) {
4060 if (m_camera_info
.capture
.status
== true) {
4061 ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__
,
4062 selfThread
->m_index
, currentNode
->fd
);
4063 if (cam_int_streamoff(currentNode
) < 0 ){
4064 ALOGE("ERR(%s): stream off fail", __FUNCTION__
);
4066 m_camera_info
.capture
.status
= false;
4070 ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__
,
4071 selfThread
->m_index
, currentNode
->fd
);
4072 if (cam_int_streamoff(currentNode
) < 0 ){
4073 ALOGE("ERR(%s): stream off fail", __FUNCTION__
);
4076 ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__
, selfThread
->m_index
);
4077 ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__
,
4078 selfThread
->m_index
, currentNode
->fd
);
4079 currentNode
->buffers
= 0;
4080 cam_int_reqbufs(currentNode
);
4081 ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__
,
4082 selfThread
->m_index
, currentNode
->fd
);
4085 selfThread
->m_isBufferInit
= false;
4086 selfThread
->m_releasing
= false;
4087 selfThread
->m_activated
= false;
4088 ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__
, selfThread
->m_index
);
4092 if (currentSignal
& SIGNAL_STREAM_DATA_COMING
) {
4093 #ifdef ENABLE_FRAME_SYNC
4094 camera2_stream
*frame
;
4095 uint8_t currentOutputStreams
;
4097 nsecs_t frameTimeStamp
;
4099 ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
4100 __FUNCTION__
,selfThread
->m_index
);
4102 m_streamBufferInit(self
);
4104 ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__
, selfThread
->m_index
);
4105 selfStreamParms
->bufIndex
= cam_int_dqbuf(currentNode
);
4106 ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__
,
4107 selfThread
->m_index
, selfStreamParms
->bufIndex
);
4109 #ifdef ENABLE_FRAME_SYNC
4110 frame
= (struct camera2_stream
*)(currentNode
->buffer
[selfStreamParms
->bufIndex
].virt
.extP
[selfStreamParms
->planes
-1]);
4111 frameTimeStamp
= m_requestManager
->GetTimestampByFrameCnt(frame
->rcount
);
4112 currentOutputStreams
= m_requestManager
->GetOutputStreamByFrameCnt(frame
->rcount
);
4113 ALOGV("frame count(SCC) : %d outputStream(%x)", frame
->rcount
, currentOutputStreams
);
4115 frameTimeStamp
= m_requestManager
->GetTimestamp(m_requestManager
->GetFrameIndex());
4118 for (int i
= 0 ; i
< NUM_MAX_SUBSTREAM
; i
++) {
4119 if (selfThread
->m_attachedSubStreams
[i
].streamId
== -1)
4121 #ifdef ENABLE_FRAME_SYNC
4122 if (currentOutputStreams
& (1<<selfThread
->m_attachedSubStreams
[i
].streamId
)) {
4123 m_requestManager
->NotifyStreamOutput(frame
->rcount
);
4124 m_runSubStreamFunc(selfThread
, &(currentNode
->buffer
[selfStreamParms
->bufIndex
]),
4125 selfThread
->m_attachedSubStreams
[i
].streamId
, frameTimeStamp
);
4128 if (m_currentOutputStreams
& (1<<selfThread
->m_attachedSubStreams
[i
].streamId
)) {
4129 m_runSubStreamFunc(selfThread
, &(currentNode
->buffer
[selfStreamParms
->bufIndex
]),
4130 selfThread
->m_attachedSubStreams
[i
].streamId
, frameTimeStamp
);
4134 cam_int_qbuf(currentNode
, selfStreamParms
->bufIndex
);
4135 ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__
, selfThread
->m_index
);
4139 ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
4140 __FUNCTION__
, selfThread
->m_index
);
4147 void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread
* self
)
4149 uint32_t currentSignal
= self
->GetProcessingSignal();
4150 StreamThread
* selfThread
= ((StreamThread
*)self
);
4151 stream_parameters_t
*selfStreamParms
= &(selfThread
->m_parameters
);
4152 node_info_t
*currentNode
= selfStreamParms
->node
;
4154 ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__
, selfThread
->m_index
, currentSignal
);
4156 // Do something in Child thread handler
4157 // Should change function to class that inherited StreamThread class to support dynamic stream allocation
4158 if (selfThread
->streamType
== STREAM_TYPE_DIRECT
) {
4159 m_streamFunc_direct(self
);
4160 } else if (selfThread
->streamType
== STREAM_TYPE_INDIRECT
) {
4161 m_streamFunc_indirect(self
);
4166 int ExynosCameraHWInterface2::m_jpegCreator(StreamThread
*selfThread
, ExynosBuffer
*srcImageBuf
, nsecs_t frameTimeStamp
)
4168 stream_parameters_t
*selfStreamParms
= &(selfThread
->m_parameters
);
4169 substream_parameters_t
*subParms
= &m_subStreams
[STREAM_ID_JPEG
];
4171 ExynosRect jpegRect
;
4173 int srcW
, srcH
, srcCropX
, srcCropY
;
4174 int pictureW
, pictureH
, pictureFramesize
= 0;
4176 int cropX
, cropY
, cropW
, cropH
= 0;
4177 ExynosBuffer resizeBufInfo
;
4178 ExynosRect m_jpegPictureRect
;
4179 buffer_handle_t
* buf
= NULL
;
4180 camera2_jpeg_blob
* jpegBlob
= NULL
;
4181 int jpegBufSize
= 0;
4183 ALOGV("DEBUG(%s): index(%d)",__FUNCTION__
, subParms
->svcBufIndex
);
4184 for (int i
= 0 ; subParms
->numSvcBuffers
; i
++) {
4185 if (subParms
->svcBufStatus
[subParms
->svcBufIndex
] == ON_HAL
) {
4189 subParms
->svcBufIndex
++;
4190 if (subParms
->svcBufIndex
>= subParms
->numSvcBuffers
)
4191 subParms
->svcBufIndex
= 0;
4194 ALOGE("(%s): cannot find free svc buffer", __FUNCTION__
);
4195 subParms
->svcBufIndex
++;
4200 Mutex::Autolock
lock(m_jpegEncoderLock
);
4201 m_jpegEncodingCount
++;
4204 m_getRatioSize(selfStreamParms
->width
, selfStreamParms
->height
,
4205 m_streamThreads
[0]->m_parameters
.width
, m_streamThreads
[0]->m_parameters
.height
,
4206 &srcCropX
, &srcCropY
,
4210 m_jpegPictureRect
.w
= subParms
->width
;
4211 m_jpegPictureRect
.h
= subParms
->height
;
4213 ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
4214 __FUNCTION__
, selfStreamParms
->width
, selfStreamParms
->height
,
4215 m_jpegPictureRect
.w
, m_jpegPictureRect
.h
);
4217 m_getRatioSize(srcW
, srcH
,
4218 m_jpegPictureRect
.w
, m_jpegPictureRect
.h
,
4220 &pictureW
, &pictureH
,
4222 pictureFormat
= V4L2_PIX_FMT_YUYV
;
4223 pictureFramesize
= FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat
), pictureW
, pictureH
);
4225 if (m_exynosPictureCSC
) {
4226 float zoom_w
= 0, zoom_h
= 0;
4227 if (m_zoomRatio
== 0)
4230 if (m_jpegPictureRect
.w
>= m_jpegPictureRect
.h
) {
4231 zoom_w
= pictureW
/ m_zoomRatio
;
4232 zoom_h
= zoom_w
* m_jpegPictureRect
.h
/ m_jpegPictureRect
.w
;
4234 zoom_h
= pictureH
/ m_zoomRatio
;
4235 zoom_w
= zoom_h
* m_jpegPictureRect
.w
/ m_jpegPictureRect
.h
;
4237 cropX
= (srcW
- zoom_w
) / 2;
4238 cropY
= (srcH
- zoom_h
) / 2;
4242 ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4243 __FUNCTION__
, cropX
, cropY
, cropW
, cropH
);
4245 csc_set_src_format(m_exynosPictureCSC
,
4246 ALIGN(srcW
, 16), ALIGN(srcH
, 16),
4247 cropX
, cropY
, cropW
, cropH
,
4248 V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat
),
4251 csc_set_dst_format(m_exynosPictureCSC
,
4252 m_jpegPictureRect
.w
, m_jpegPictureRect
.h
,
4253 0, 0, m_jpegPictureRect
.w
, m_jpegPictureRect
.h
,
4254 V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16
),
4256 for (int i
= 0 ; i
< 3 ; i
++)
4257 ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
4258 __FUNCTION__
, i
, srcImageBuf
->fd
.extFd
[i
]);
4259 csc_set_src_buffer(m_exynosPictureCSC
,
4260 (void **)&srcImageBuf
->fd
.fd
);
4262 csc_set_dst_buffer(m_exynosPictureCSC
,
4263 (void **)&m_resizeBuf
.fd
.fd
);
4264 for (int i
= 0 ; i
< 3 ; i
++)
4265 ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
4266 __FUNCTION__
, i
, m_resizeBuf
.fd
.extFd
[i
], i
, m_resizeBuf
.size
.extS
[i
]);
4268 if (csc_convert(m_exynosPictureCSC
) != 0)
4269 ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__
);
4273 ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__
);
4276 resizeBufInfo
= m_resizeBuf
;
4278 m_getAlignedYUVSize(V4L2_PIX_FMT_NV16
, m_jpegPictureRect
.w
, m_jpegPictureRect
.h
, &m_resizeBuf
);
4280 for (int i
= 1; i
< 3; i
++) {
4281 if (m_resizeBuf
.size
.extS
[i
] != 0)
4282 m_resizeBuf
.fd
.extFd
[i
] = m_resizeBuf
.fd
.extFd
[i
-1] + m_resizeBuf
.size
.extS
[i
-1];
4284 ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__
, i
, m_resizeBuf
.size
.extS
[i
]);
4287 jpegRect
.w
= m_jpegPictureRect
.w
;
4288 jpegRect
.h
= m_jpegPictureRect
.h
;
4289 jpegRect
.colorFormat
= V4L2_PIX_FMT_NV16
;
4291 for (int j
= 0 ; j
< 3 ; j
++)
4292 ALOGV("DEBUG(%s): dest buf node fd.extFd[%d]=%d size=%d virt=%x ",
4293 __FUNCTION__
, j
, subParms
->svcBuffers
[subParms
->svcBufIndex
].fd
.extFd
[j
],
4294 (unsigned int)subParms
->svcBuffers
[subParms
->svcBufIndex
].size
.extS
[j
],
4295 (unsigned int)subParms
->svcBuffers
[subParms
->svcBufIndex
].virt
.extP
[j
]);
4297 jpegBufSize
= subParms
->svcBuffers
[subParms
->svcBufIndex
].size
.extS
[0];
4298 if (yuv2Jpeg(&m_resizeBuf
, &subParms
->svcBuffers
[subParms
->svcBufIndex
], &jpegRect
) == false) {
4299 ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__
);
4301 m_resizeBuf
= resizeBufInfo
;
4303 int jpegSize
= subParms
->svcBuffers
[subParms
->svcBufIndex
].size
.s
;
4304 ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__
,
4305 m_jpegPictureRect
.w
, m_jpegPictureRect
.h
, jpegBufSize
, jpegSize
);
4306 char * jpegBuffer
= (char*)(subParms
->svcBuffers
[subParms
->svcBufIndex
].virt
.extP
[0]);
4307 jpegBlob
= (camera2_jpeg_blob
*)(&jpegBuffer
[jpegBufSize
- sizeof(camera2_jpeg_blob
)]);
4309 if (jpegBuffer
[jpegSize
-1] == 0)
4311 jpegBlob
->jpeg_size
= jpegSize
;
4312 jpegBlob
->jpeg_blob_id
= CAMERA2_JPEG_BLOB_ID
;
4314 subParms
->svcBuffers
[subParms
->svcBufIndex
].size
.extS
[0] = jpegBufSize
;
4315 res
= subParms
->streamOps
->enqueue_buffer(subParms
->streamOps
, frameTimeStamp
, &(subParms
->svcBufHandle
[subParms
->svcBufIndex
]));
4317 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4318 __FUNCTION__
, selfThread
->m_index
, subParms
->svcBufIndex
, res
);
4320 subParms
->svcBufStatus
[subParms
->svcBufIndex
] = ON_SERVICE
;
4321 subParms
->numSvcBufsInHal
--;
4324 subParms
->svcBufStatus
[subParms
->svcBufIndex
] = ON_HAL
;
4327 while (subParms
->numSvcBufsInHal
<= subParms
->minUndequedBuffer
)
4330 int checkingIndex
= 0;
4332 ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__
, subParms
->numSvcBufsInHal
);
4334 res
= subParms
->streamOps
->dequeue_buffer(subParms
->streamOps
, &buf
);
4335 if (res
!= NO_ERROR
|| buf
== NULL
) {
4336 ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__
, selfThread
->m_index
, res
);
4339 const private_handle_t
*priv_handle
= reinterpret_cast<const private_handle_t
*>(*buf
);
4340 subParms
->numSvcBufsInHal
++;
4341 ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__
, (uint32_t)(*buf
),
4342 subParms
->numSvcBufsInHal
, ((native_handle_t
*)(*buf
))->version
, ((native_handle_t
*)(*buf
))->numFds
, ((native_handle_t
*)(*buf
))->numInts
);
4345 for (checkingIndex
= 0; checkingIndex
< subParms
->numSvcBuffers
; checkingIndex
++) {
4346 if (priv_handle
->fd
== subParms
->svcBuffers
[checkingIndex
].fd
.extFd
[0] ) {
4351 ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__
, found
);
4357 subParms
->svcBufIndex
= checkingIndex
;
4358 if (subParms
->svcBufStatus
[subParms
->svcBufIndex
] == ON_SERVICE
) {
4359 subParms
->svcBufStatus
[subParms
->svcBufIndex
] = ON_HAL
;
4362 ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d] status = %d", __FUNCTION__
,
4363 subParms
->svcBufIndex
, subParms
->svcBufStatus
[subParms
->svcBufIndex
]);
4367 Mutex::Autolock
lock(m_jpegEncoderLock
);
4368 m_jpegEncodingCount
--;
4373 int ExynosCameraHWInterface2::m_recordCreator(StreamThread
*selfThread
, ExynosBuffer
*srcImageBuf
, nsecs_t frameTimeStamp
)
4375 stream_parameters_t
*selfStreamParms
= &(selfThread
->m_parameters
);
4376 substream_parameters_t
*subParms
= &m_subStreams
[STREAM_ID_RECORD
];
4378 ExynosRect jpegRect
;
4380 int cropX
, cropY
, cropW
, cropH
= 0;
4381 buffer_handle_t
* buf
= NULL
;
4383 ALOGV("DEBUG(%s): index(%d)",__FUNCTION__
, subParms
->svcBufIndex
);
4384 for (int i
= 0 ; subParms
->numSvcBuffers
; i
++) {
4385 if (subParms
->svcBufStatus
[subParms
->svcBufIndex
] == ON_HAL
) {
4389 subParms
->svcBufIndex
++;
4390 if (subParms
->svcBufIndex
>= subParms
->numSvcBuffers
)
4391 subParms
->svcBufIndex
= 0;
4394 ALOGE("(%s): cannot find free svc buffer", __FUNCTION__
);
4395 subParms
->svcBufIndex
++;
4399 if (m_exynosVideoCSC
) {
4400 int videoW
= subParms
->width
, videoH
= subParms
->height
;
4401 int cropX
, cropY
, cropW
, cropH
= 0;
4402 int previewW
= selfStreamParms
->width
, previewH
= selfStreamParms
->height
;
4403 m_getRatioSize(previewW
, previewH
,
4409 ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4410 __FUNCTION__
, cropX
, cropY
, cropW
, cropH
);
4412 csc_set_src_format(m_exynosVideoCSC
,
4413 ALIGN(previewW
, 32), previewH
,
4414 cropX
, cropY
, cropW
, cropH
,
4415 selfStreamParms
->format
,
4418 csc_set_dst_format(m_exynosVideoCSC
,
4420 0, 0, videoW
, videoH
,
4424 csc_set_src_buffer(m_exynosVideoCSC
,
4425 (void **)&srcImageBuf
->fd
.fd
);
4427 csc_set_dst_buffer(m_exynosVideoCSC
,
4428 (void **)(&(subParms
->svcBuffers
[subParms
->svcBufIndex
].fd
.fd
)));
4430 if (csc_convert(m_exynosVideoCSC
) != 0) {
4431 ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__
);
4434 ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__
);
4438 ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__
);
4441 res
= subParms
->streamOps
->enqueue_buffer(subParms
->streamOps
, frameTimeStamp
, &(subParms
->svcBufHandle
[subParms
->svcBufIndex
]));
4443 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4444 __FUNCTION__
, selfThread
->m_index
, subParms
->svcBufIndex
, res
);
4446 subParms
->svcBufStatus
[subParms
->svcBufIndex
] = ON_SERVICE
;
4447 subParms
->numSvcBufsInHal
--;
4450 subParms
->svcBufStatus
[subParms
->svcBufIndex
] = ON_HAL
;
4453 while (subParms
->numSvcBufsInHal
<= subParms
->minUndequedBuffer
)
4456 int checkingIndex
= 0;
4458 ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__
, subParms
->numSvcBufsInHal
);
4460 res
= subParms
->streamOps
->dequeue_buffer(subParms
->streamOps
, &buf
);
4461 if (res
!= NO_ERROR
|| buf
== NULL
) {
4462 ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__
, selfThread
->m_index
, res
);
4465 const private_handle_t
*priv_handle
= reinterpret_cast<const private_handle_t
*>(*buf
);
4466 subParms
->numSvcBufsInHal
++;
4467 ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__
, (uint32_t)(*buf
),
4468 subParms
->numSvcBufsInHal
, ((native_handle_t
*)(*buf
))->version
, ((native_handle_t
*)(*buf
))->numFds
, ((native_handle_t
*)(*buf
))->numInts
);
4470 for (checkingIndex
= 0; checkingIndex
< subParms
->numSvcBuffers
; checkingIndex
++) {
4471 if (priv_handle
->fd
== subParms
->svcBuffers
[checkingIndex
].fd
.extFd
[0] ) {
4476 ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__
, found
, checkingIndex
);
4482 subParms
->svcBufIndex
= checkingIndex
;
4483 if (subParms
->svcBufStatus
[subParms
->svcBufIndex
] == ON_SERVICE
) {
4484 subParms
->svcBufStatus
[subParms
->svcBufIndex
] = ON_HAL
;
4487 ALOGV("DEBUG(%s): record bufstatus abnormal [%d] status = %d", __FUNCTION__
,
4488 subParms
->svcBufIndex
, subParms
->svcBufStatus
[subParms
->svcBufIndex
]);
4494 int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread
*selfThread
, ExynosBuffer
*srcImageBuf
, nsecs_t frameTimeStamp
)
4496 stream_parameters_t
*selfStreamParms
= &(selfThread
->m_parameters
);
4497 substream_parameters_t
*subParms
= &m_subStreams
[STREAM_ID_PRVCB
];
4500 int cropX
, cropY
, cropW
, cropH
= 0;
4501 buffer_handle_t
* buf
= NULL
;
4503 ALOGV("DEBUG(%s): index(%d)",__FUNCTION__
, subParms
->svcBufIndex
);
4504 for (int i
= 0 ; subParms
->numSvcBuffers
; i
++) {
4505 if (subParms
->svcBufStatus
[subParms
->svcBufIndex
] == ON_HAL
) {
4509 subParms
->svcBufIndex
++;
4510 if (subParms
->svcBufIndex
>= subParms
->numSvcBuffers
)
4511 subParms
->svcBufIndex
= 0;
4514 ALOGE("(%s): cannot find free svc buffer", __FUNCTION__
);
4515 subParms
->svcBufIndex
++;
4519 if (subParms
->format
== HAL_PIXEL_FORMAT_YCrCb_420_SP
) {
4520 if (m_exynosVideoCSC
) {
4521 int previewCbW
= subParms
->width
, previewCbH
= subParms
->height
;
4522 int cropX
, cropY
, cropW
, cropH
= 0;
4523 int previewW
= selfStreamParms
->width
, previewH
= selfStreamParms
->height
;
4524 m_getRatioSize(previewW
, previewH
,
4525 previewCbW
, previewCbH
,
4530 ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4531 __FUNCTION__
, cropX
, cropY
, cropW
, cropH
);
4532 csc_set_src_format(m_exynosVideoCSC
,
4533 ALIGN(previewW
, 32), previewH
,
4534 cropX
, cropY
, cropW
, cropH
,
4535 selfStreamParms
->format
,
4538 csc_set_dst_format(m_exynosVideoCSC
,
4539 previewCbW
, previewCbH
,
4540 0, 0, previewCbW
, previewCbH
,
4541 subParms
->internalFormat
,
4544 csc_set_src_buffer(m_exynosVideoCSC
,
4545 (void **)&srcImageBuf
->fd
.fd
);
4547 csc_set_dst_buffer(m_exynosVideoCSC
,
4548 (void **)(&(m_previewCbBuf
.fd
.fd
)));
4550 if (csc_convert(m_exynosVideoCSC
) != 0) {
4551 ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__
);
4554 ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__
);
4556 if (previewCbW
== ALIGN(previewCbW
, 16)) {
4557 memcpy(subParms
->svcBuffers
[subParms
->svcBufIndex
].virt
.extP
[0],
4558 m_previewCbBuf
.virt
.extP
[0], previewCbW
* previewCbH
);
4559 memcpy(subParms
->svcBuffers
[subParms
->svcBufIndex
].virt
.extP
[0] + previewCbW
* previewCbH
,
4560 m_previewCbBuf
.virt
.extP
[1], previewCbW
* previewCbH
/ 2 );
4563 // TODO : copy line by line ?
4567 ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__
);
4570 else if (subParms
->format
== HAL_PIXEL_FORMAT_YV12
) {
4571 int previewCbW
= subParms
->width
, previewCbH
= subParms
->height
;
4572 int stride
= ALIGN(previewCbW
, 16);
4573 int uv_stride
= ALIGN(previewCbW
/2, 16);
4574 int c_stride
= ALIGN(stride
/ 2, 16);
4576 if (previewCbW
== ALIGN(previewCbW
, 32)) {
4577 memcpy(subParms
->svcBuffers
[subParms
->svcBufIndex
].virt
.extP
[0],
4578 srcImageBuf
->virt
.extP
[0], stride
* previewCbH
);
4579 memcpy(subParms
->svcBuffers
[subParms
->svcBufIndex
].virt
.extP
[0] + stride
* previewCbH
,
4580 srcImageBuf
->virt
.extP
[1], c_stride
* previewCbH
/ 2 );
4581 memcpy(subParms
->svcBuffers
[subParms
->svcBufIndex
].virt
.extP
[0] + (stride
* previewCbH
) + (c_stride
* previewCbH
/ 2),
4582 srcImageBuf
->virt
.extP
[2], c_stride
* previewCbH
/ 2 );
4584 char * dstAddr
= (char *)(subParms
->svcBuffers
[subParms
->svcBufIndex
].virt
.extP
[0]);
4585 char * srcAddr
= (char *)(srcImageBuf
->virt
.extP
[0]);
4586 for (int i
= 0 ; i
< previewCbH
; i
++) {
4587 memcpy(dstAddr
, srcAddr
, previewCbW
);
4589 srcAddr
+= ALIGN(stride
, 32);
4591 dstAddr
= (char *)(subParms
->svcBuffers
[subParms
->svcBufIndex
].virt
.extP
[0] + stride
* previewCbH
);
4592 srcAddr
= (char *)(srcImageBuf
->virt
.extP
[1]);
4593 for (int i
= 0 ; i
< previewCbH
/2 ; i
++) {
4594 memcpy(dstAddr
, srcAddr
, previewCbW
/2);
4595 dstAddr
+= c_stride
;
4596 srcAddr
+= uv_stride
;
4598 srcAddr
= (char *)(srcImageBuf
->virt
.extP
[2]);
4599 for (int i
= 0 ; i
< previewCbH
/2 ; i
++) {
4600 memcpy(dstAddr
, srcAddr
, previewCbW
/2);
4601 dstAddr
+= c_stride
;
4602 srcAddr
+= uv_stride
;
4606 res
= subParms
->streamOps
->enqueue_buffer(subParms
->streamOps
, frameTimeStamp
, &(subParms
->svcBufHandle
[subParms
->svcBufIndex
]));
4608 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4609 __FUNCTION__
, selfThread
->m_index
, subParms
->svcBufIndex
, res
);
4611 subParms
->svcBufStatus
[subParms
->svcBufIndex
] = ON_SERVICE
;
4612 subParms
->numSvcBufsInHal
--;
4615 subParms
->svcBufStatus
[subParms
->svcBufIndex
] = ON_HAL
;
4618 while (subParms
->numSvcBufsInHal
<= subParms
->minUndequedBuffer
)
4621 int checkingIndex
= 0;
4623 ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__
, subParms
->numSvcBufsInHal
);
4625 res
= subParms
->streamOps
->dequeue_buffer(subParms
->streamOps
, &buf
);
4626 if (res
!= NO_ERROR
|| buf
== NULL
) {
4627 ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__
, selfThread
->m_index
, res
);
4630 const private_handle_t
*priv_handle
= reinterpret_cast<const private_handle_t
*>(*buf
);
4631 subParms
->numSvcBufsInHal
++;
4632 ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__
, (uint32_t)(*buf
),
4633 subParms
->numSvcBufsInHal
, ((native_handle_t
*)(*buf
))->version
, ((native_handle_t
*)(*buf
))->numFds
, ((native_handle_t
*)(*buf
))->numInts
);
4636 for (checkingIndex
= 0; checkingIndex
< subParms
->numSvcBuffers
; checkingIndex
++) {
4637 if (priv_handle
->fd
== subParms
->svcBuffers
[checkingIndex
].fd
.extFd
[0] ) {
4642 ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__
, found
, checkingIndex
);
4648 subParms
->svcBufIndex
= checkingIndex
;
4649 if (subParms
->svcBufStatus
[subParms
->svcBufIndex
] == ON_SERVICE
) {
4650 subParms
->svcBufStatus
[subParms
->svcBufIndex
] = ON_HAL
;
4653 ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d] status = %d", __FUNCTION__
,
4654 subParms
->svcBufIndex
, subParms
->svcBufStatus
[subParms
->svcBufIndex
]);
4660 bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w
, int h
)
4662 int sizeOfSupportList
;
4665 if(this->getCameraId() == 0) {
4666 sizeOfSupportList
= sizeof(SUPPORT_THUMBNAIL_REAR_SIZE
) / (sizeof(int)*2);
4668 for(int i
= 0; i
< sizeOfSupportList
; i
++) {
4669 if((SUPPORT_THUMBNAIL_REAR_SIZE
[i
][0] == w
) &&(SUPPORT_THUMBNAIL_REAR_SIZE
[i
][1] == h
))
4675 sizeOfSupportList
= sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE
) / (sizeof(int)*2);
4677 for(int i
= 0; i
< sizeOfSupportList
; i
++) {
4678 if((SUPPORT_THUMBNAIL_FRONT_SIZE
[i
][0] == w
) &&(SUPPORT_THUMBNAIL_FRONT_SIZE
[i
][1] == h
))
4685 bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer
*yuvBuf
,
4686 ExynosBuffer
*jpegBuf
,
4689 unsigned char *addr
;
4691 ExynosJpegEncoderForCamera jpegEnc
;
4695 unsigned int *yuvSize
= yuvBuf
->size
.extS
;
4697 if (jpegEnc
.create()) {
4698 ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__
);
4699 goto jpeg_encode_done
;
4702 if (jpegEnc
.setQuality(m_jpegMetadata
.shot
.ctl
.jpeg
.quality
)) {
4703 ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__
);
4704 goto jpeg_encode_done
;
4707 if (jpegEnc
.setSize(rect
->w
, rect
->h
)) {
4708 ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__
);
4709 goto jpeg_encode_done
;
4711 ALOGV("%s : width = %d , height = %d\n", __FUNCTION__
, rect
->w
, rect
->h
);
4713 if (jpegEnc
.setColorFormat(rect
->colorFormat
)) {
4714 ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__
);
4715 goto jpeg_encode_done
;
4718 if (jpegEnc
.setJpegFormat(V4L2_PIX_FMT_JPEG_422
)) {
4719 ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__
);
4720 goto jpeg_encode_done
;
4723 if((m_jpegMetadata
.shot
.ctl
.jpeg
.thumbnailSize
[0] != 0) && (m_jpegMetadata
.shot
.ctl
.jpeg
.thumbnailSize
[1] != 0)) {
4724 mExifInfo
.enableThumb
= true;
4725 if(!m_checkThumbnailSize(m_jpegMetadata
.shot
.ctl
.jpeg
.thumbnailSize
[0], m_jpegMetadata
.shot
.ctl
.jpeg
.thumbnailSize
[1])) {
4726 // in the case of unsupported parameter, disable thumbnail
4727 mExifInfo
.enableThumb
= false;
4729 m_thumbNailW
= m_jpegMetadata
.shot
.ctl
.jpeg
.thumbnailSize
[0];
4730 m_thumbNailH
= m_jpegMetadata
.shot
.ctl
.jpeg
.thumbnailSize
[1];
4733 ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__
, m_thumbNailW
, m_thumbNailH
);
4736 mExifInfo
.enableThumb
= false;
4739 if (jpegEnc
.setThumbnailSize(m_thumbNailW
, m_thumbNailH
)) {
4740 ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__
, m_thumbNailH
, m_thumbNailH
);
4741 goto jpeg_encode_done
;
4744 ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__
, m_thumbNailW
, m_thumbNailW
);
4745 if (jpegEnc
.setThumbnailQuality(m_jpegMetadata
.shot
.ctl
.jpeg
.thumbnailQuality
)) {
4746 ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__
);
4747 goto jpeg_encode_done
;
4750 m_setExifChangedAttribute(&mExifInfo
, rect
, &m_jpegMetadata
);
4751 ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__
, *yuvSize
);
4752 if (jpegEnc
.setInBuf((int *)&(yuvBuf
->fd
.fd
), &(yuvBuf
->virt
.p
), (int *)yuvSize
)) {
4753 ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__
);
4754 goto jpeg_encode_done
;
4756 if (jpegEnc
.setOutBuf(jpegBuf
->fd
.fd
, jpegBuf
->virt
.p
, jpegBuf
->size
.extS
[0] + jpegBuf
->size
.extS
[1] + jpegBuf
->size
.extS
[2])) {
4757 ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__
);
4758 goto jpeg_encode_done
;
4761 if (jpegEnc
.updateConfig()) {
4762 ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__
);
4763 goto jpeg_encode_done
;
4766 if ((res
= jpegEnc
.encode((int *)&jpegBuf
->size
.s
, &mExifInfo
))) {
4767 ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__
, res
);
4768 goto jpeg_encode_done
;
4775 if (jpegEnc
.flagCreate() == true)
4781 void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id
)
4783 m_ctlInfo
.flash
.m_precaptureTriggerId
= id
;
4784 m_ctlInfo
.ae
.aeStateNoti
= AE_STATE_INACTIVE
;
4785 if ((m_ctlInfo
.flash
.i_flashMode
>= AA_AEMODE_ON_AUTO_FLASH
) && (m_cameraId
== 0)) {
4786 // flash is required
4787 switch (m_ctlInfo
.flash
.m_flashCnt
) {
4788 case IS_FLASH_STATE_AUTO_DONE
:
4789 case IS_FLASH_STATE_AUTO_OFF
:
4790 // Flash capture sequence, AF flash was executed before
4793 // Full flash sequence
4794 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_ON
;
4795 m_ctlInfo
.flash
.m_flashEnableFlg
= true;
4796 m_ctlInfo
.flash
.m_flashTimeOut
= 0;
4799 // Skip pre-capture in case of non-flash.
4800 ALOGV("[PreCap] Flash OFF mode ");
4801 m_ctlInfo
.flash
.m_flashEnableFlg
= false;
4802 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_NONE
;
4804 ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id
, m_ctlInfo
.flash
.m_flashEnableFlg
, m_ctlInfo
.flash
.m_flashCnt
);
4805 OnPrecaptureMeteringNotificationSensor();
4808 void ExynosCameraHWInterface2::OnAfTrigger(int id
)
4813 case AA_AFMODE_AUTO
:
4814 case AA_AFMODE_MACRO
:
4815 case AA_AFMODE_MANUAL
:
4816 ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode
);
4817 // If flash is enable, Flash operation is executed before triggering AF
4818 if ((m_ctlInfo
.flash
.i_flashMode
>= AA_AEMODE_ON_AUTO_FLASH
)
4819 && (m_ctlInfo
.flash
.m_flashEnableFlg
== false)
4820 && (m_cameraId
== 0)) {
4821 ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode
);
4822 m_ctlInfo
.flash
.m_flashEnableFlg
= true;
4823 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_ON
;
4824 m_ctlInfo
.flash
.m_flashDecisionResult
= false;
4825 m_ctlInfo
.flash
.m_afFlashDoneFlg
= true;
4827 OnAfTriggerAutoMacro(id
);
4829 case AA_AFMODE_CONTINUOUS_VIDEO
:
4830 ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode
);
4831 OnAfTriggerCAFVideo(id
);
4833 case AA_AFMODE_CONTINUOUS_PICTURE
:
4834 ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode
);
4835 OnAfTriggerCAFPicture(id
);
4844 void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int /*id*/)
4846 int nextState
= NO_TRANSITION
;
4848 switch (m_afState
) {
4849 case HAL_AFSTATE_INACTIVE
:
4850 case HAL_AFSTATE_PASSIVE_FOCUSED
:
4851 case HAL_AFSTATE_SCANNING
:
4852 nextState
= HAL_AFSTATE_NEEDS_COMMAND
;
4853 m_IsAfTriggerRequired
= true;
4855 case HAL_AFSTATE_NEEDS_COMMAND
:
4856 nextState
= NO_TRANSITION
;
4858 case HAL_AFSTATE_STARTED
:
4859 nextState
= NO_TRANSITION
;
4861 case HAL_AFSTATE_LOCKED
:
4862 nextState
= HAL_AFSTATE_NEEDS_COMMAND
;
4863 m_IsAfTriggerRequired
= true;
4865 case HAL_AFSTATE_FAILED
:
4866 nextState
= HAL_AFSTATE_NEEDS_COMMAND
;
4867 m_IsAfTriggerRequired
= true;
4872 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__
, m_afState
, nextState
);
4873 if (nextState
!= NO_TRANSITION
)
4874 m_afState
= nextState
;
4877 void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id
)
4879 int nextState
= NO_TRANSITION
;
4881 switch (m_afState
) {
4882 case HAL_AFSTATE_INACTIVE
:
4883 nextState
= HAL_AFSTATE_FAILED
;
4884 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
4886 case HAL_AFSTATE_NEEDS_COMMAND
:
4889 case HAL_AFSTATE_STARTED
:
4890 nextState
= HAL_AFSTATE_NEEDS_DETERMINATION
;
4891 m_AfHwStateFailed
= false;
4893 case HAL_AFSTATE_SCANNING
:
4894 nextState
= HAL_AFSTATE_NEEDS_DETERMINATION
;
4895 m_AfHwStateFailed
= false;
4896 // If flash is enable, Flash operation is executed before triggering AF
4897 if ((m_ctlInfo
.flash
.i_flashMode
>= AA_AEMODE_ON_AUTO_FLASH
)
4898 && (m_ctlInfo
.flash
.m_flashEnableFlg
== false)
4899 && (m_cameraId
== 0)) {
4900 ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode
, m_afState
, id
);
4901 m_ctlInfo
.flash
.m_flashEnableFlg
= true;
4902 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_ON
;
4903 m_ctlInfo
.flash
.m_flashDecisionResult
= false;
4904 m_ctlInfo
.flash
.m_afFlashDoneFlg
= true;
4907 case HAL_AFSTATE_NEEDS_DETERMINATION
:
4908 nextState
= NO_TRANSITION
;
4910 case HAL_AFSTATE_PASSIVE_FOCUSED
:
4911 m_IsAfLockRequired
= true;
4912 if (m_AfHwStateFailed
) {
4913 ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__
);
4914 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
4915 nextState
= HAL_AFSTATE_FAILED
;
4918 ALOGV("(%s): [CAF] LAST : success", __FUNCTION__
);
4919 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED
);
4920 nextState
= HAL_AFSTATE_LOCKED
;
4922 m_AfHwStateFailed
= false;
4924 case HAL_AFSTATE_LOCKED
:
4925 nextState
= NO_TRANSITION
;
4927 case HAL_AFSTATE_FAILED
:
4928 nextState
= NO_TRANSITION
;
4933 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__
, m_afState
, nextState
);
4934 if (nextState
!= NO_TRANSITION
)
4935 m_afState
= nextState
;
4939 void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int /*id*/)
4941 int nextState
= NO_TRANSITION
;
4943 switch (m_afState
) {
4944 case HAL_AFSTATE_INACTIVE
:
4945 nextState
= HAL_AFSTATE_FAILED
;
4946 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
4948 case HAL_AFSTATE_NEEDS_COMMAND
:
4951 case HAL_AFSTATE_STARTED
:
4952 m_IsAfLockRequired
= true;
4953 nextState
= HAL_AFSTATE_FAILED
;
4954 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
4956 case HAL_AFSTATE_SCANNING
:
4957 m_IsAfLockRequired
= true;
4958 nextState
= HAL_AFSTATE_FAILED
;
4959 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
4961 case HAL_AFSTATE_NEEDS_DETERMINATION
:
4964 case HAL_AFSTATE_PASSIVE_FOCUSED
:
4965 m_IsAfLockRequired
= true;
4966 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED
);
4967 nextState
= HAL_AFSTATE_LOCKED
;
4969 case HAL_AFSTATE_LOCKED
:
4970 nextState
= NO_TRANSITION
;
4972 case HAL_AFSTATE_FAILED
:
4973 nextState
= NO_TRANSITION
;
4978 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__
, m_afState
, nextState
);
4979 if (nextState
!= NO_TRANSITION
)
4980 m_afState
= nextState
;
4983 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor()
4985 if (m_ctlInfo
.flash
.m_precaptureTriggerId
> 0) {
4986 // Just noti of pre-capture start
4987 if (m_ctlInfo
.ae
.aeStateNoti
!= AE_STATE_PRECAPTURE
) {
4988 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE
,
4989 ANDROID_CONTROL_AE_STATE_PRECAPTURE
,
4990 m_ctlInfo
.flash
.m_precaptureTriggerId
, 0, m_callbackCookie
);
4991 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__
, m_ctlInfo
.flash
.m_flashCnt
);
4992 m_notifyCb(CAMERA2_MSG_AUTOWB
,
4993 ANDROID_CONTROL_AWB_STATE_CONVERGED
,
4994 m_ctlInfo
.flash
.m_precaptureTriggerId
, 0, m_callbackCookie
);
4995 m_ctlInfo
.ae
.aeStateNoti
= AE_STATE_PRECAPTURE
;
5000 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP()
5002 if (m_ctlInfo
.flash
.m_precaptureTriggerId
> 0) {
5003 if (m_ctlInfo
.flash
.m_flashEnableFlg
) {
5005 switch (m_ctlInfo
.flash
.m_flashCnt
) {
5006 case IS_FLASH_STATE_AUTO_DONE
:
5007 case IS_FLASH_STATE_AUTO_OFF
:
5008 if (m_ctlInfo
.ae
.aeStateNoti
== AE_STATE_PRECAPTURE
) {
5010 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE
,
5011 ANDROID_CONTROL_AE_STATE_CONVERGED
,
5012 m_ctlInfo
.flash
.m_precaptureTriggerId
, 0, m_callbackCookie
);
5013 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__
, m_ctlInfo
.flash
.m_flashCnt
);
5014 m_notifyCb(CAMERA2_MSG_AUTOWB
,
5015 ANDROID_CONTROL_AWB_STATE_CONVERGED
,
5016 m_ctlInfo
.flash
.m_precaptureTriggerId
, 0, m_callbackCookie
);
5017 m_ctlInfo
.flash
.m_precaptureTriggerId
= 0;
5019 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE
,
5020 ANDROID_CONTROL_AE_STATE_PRECAPTURE
,
5021 m_ctlInfo
.flash
.m_precaptureTriggerId
, 0, m_callbackCookie
);
5022 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__
, m_ctlInfo
.flash
.m_flashCnt
);
5023 m_notifyCb(CAMERA2_MSG_AUTOWB
,
5024 ANDROID_CONTROL_AWB_STATE_CONVERGED
,
5025 m_ctlInfo
.flash
.m_precaptureTriggerId
, 0, m_callbackCookie
);
5026 m_ctlInfo
.ae
.aeStateNoti
= AE_STATE_PRECAPTURE
;
5029 case IS_FLASH_STATE_CAPTURE
:
5030 case IS_FLASH_STATE_CAPTURE_WAIT
:
5031 case IS_FLASH_STATE_CAPTURE_JPEG
:
5032 case IS_FLASH_STATE_CAPTURE_END
:
5033 ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__
, (int)m_ctlInfo
.flash
.m_flashCnt
);
5034 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_DONE
;
5035 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE
,
5036 ANDROID_CONTROL_AE_STATE_CONVERGED
,
5037 m_ctlInfo
.flash
.m_precaptureTriggerId
, 0, m_callbackCookie
);
5038 m_notifyCb(CAMERA2_MSG_AUTOWB
,
5039 ANDROID_CONTROL_AWB_STATE_CONVERGED
,
5040 m_ctlInfo
.flash
.m_precaptureTriggerId
, 0, m_callbackCookie
);
5041 m_ctlInfo
.flash
.m_precaptureTriggerId
= 0;
5046 if (m_ctlInfo
.ae
.aeStateNoti
== AE_STATE_PRECAPTURE
) {
5047 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE
,
5048 ANDROID_CONTROL_AE_STATE_CONVERGED
,
5049 m_ctlInfo
.flash
.m_precaptureTriggerId
, 0, m_callbackCookie
);
5050 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__
, m_ctlInfo
.flash
.m_flashCnt
);
5051 m_notifyCb(CAMERA2_MSG_AUTOWB
,
5052 ANDROID_CONTROL_AWB_STATE_CONVERGED
,
5053 m_ctlInfo
.flash
.m_precaptureTriggerId
, 0, m_callbackCookie
);
5054 m_ctlInfo
.flash
.m_precaptureTriggerId
= 0;
5060 void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti
)
5063 case AA_AFMODE_AUTO
:
5064 case AA_AFMODE_MACRO
:
5065 OnAfNotificationAutoMacro(noti
);
5067 case AA_AFMODE_CONTINUOUS_VIDEO
:
5068 OnAfNotificationCAFVideo(noti
);
5070 case AA_AFMODE_CONTINUOUS_PICTURE
:
5071 OnAfNotificationCAFPicture(noti
);
5079 void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti
)
5081 int nextState
= NO_TRANSITION
;
5082 bool bWrongTransition
= false;
5084 if (m_afState
== HAL_AFSTATE_INACTIVE
|| m_afState
== HAL_AFSTATE_NEEDS_COMMAND
) {
5086 case AA_AFSTATE_INACTIVE
:
5087 case AA_AFSTATE_ACTIVE_SCAN
:
5088 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5089 case AA_AFSTATE_AF_FAILED_FOCUS
:
5091 nextState
= NO_TRANSITION
;
5095 else if (m_afState
== HAL_AFSTATE_STARTED
) {
5097 case AA_AFSTATE_INACTIVE
:
5098 nextState
= NO_TRANSITION
;
5100 case AA_AFSTATE_ACTIVE_SCAN
:
5101 nextState
= HAL_AFSTATE_SCANNING
;
5102 SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN
);
5104 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5105 nextState
= NO_TRANSITION
;
5107 case AA_AFSTATE_AF_FAILED_FOCUS
:
5108 nextState
= NO_TRANSITION
;
5111 bWrongTransition
= true;
5115 else if (m_afState
== HAL_AFSTATE_SCANNING
) {
5117 case AA_AFSTATE_INACTIVE
:
5118 bWrongTransition
= true;
5120 case AA_AFSTATE_ACTIVE_SCAN
:
5121 nextState
= NO_TRANSITION
;
5123 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5124 // If Flash mode is enable, after AF execute pre-capture metering
5125 if (m_ctlInfo
.flash
.m_flashEnableFlg
&& m_ctlInfo
.flash
.m_afFlashDoneFlg
) {
5126 switch (m_ctlInfo
.flash
.m_flashCnt
) {
5127 case IS_FLASH_STATE_ON_DONE
:
5128 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_AE_AWB_LOCK
;
5129 nextState
= NO_TRANSITION
;
5131 case IS_FLASH_STATE_AUTO_DONE
:
5132 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_OFF
;
5133 nextState
= HAL_AFSTATE_LOCKED
;
5134 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED
);
5137 nextState
= NO_TRANSITION
;
5140 nextState
= HAL_AFSTATE_LOCKED
;
5141 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED
);
5144 case AA_AFSTATE_AF_FAILED_FOCUS
:
5145 // If Flash mode is enable, after AF execute pre-capture metering
5146 if (m_ctlInfo
.flash
.m_flashEnableFlg
&& m_ctlInfo
.flash
.m_afFlashDoneFlg
) {
5147 switch (m_ctlInfo
.flash
.m_flashCnt
) {
5148 case IS_FLASH_STATE_ON_DONE
:
5149 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_AE_AWB_LOCK
;
5150 nextState
= NO_TRANSITION
;
5152 case IS_FLASH_STATE_AUTO_DONE
:
5153 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_OFF
;
5154 nextState
= HAL_AFSTATE_FAILED
;
5155 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
5158 nextState
= NO_TRANSITION
;
5161 nextState
= HAL_AFSTATE_FAILED
;
5162 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
5166 bWrongTransition
= true;
5170 else if (m_afState
== HAL_AFSTATE_LOCKED
) {
5172 case AA_AFSTATE_INACTIVE
:
5173 case AA_AFSTATE_ACTIVE_SCAN
:
5174 bWrongTransition
= true;
5176 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5177 nextState
= NO_TRANSITION
;
5179 case AA_AFSTATE_AF_FAILED_FOCUS
:
5181 bWrongTransition
= true;
5185 else if (m_afState
== HAL_AFSTATE_FAILED
) {
5187 case AA_AFSTATE_INACTIVE
:
5188 case AA_AFSTATE_ACTIVE_SCAN
:
5189 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5190 bWrongTransition
= true;
5192 case AA_AFSTATE_AF_FAILED_FOCUS
:
5193 nextState
= NO_TRANSITION
;
5196 bWrongTransition
= true;
5200 if (bWrongTransition
) {
5201 ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__
, m_afState
, noti
);
5204 ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__
, m_afState
, nextState
, noti
);
5205 if (nextState
!= NO_TRANSITION
)
5206 m_afState
= nextState
;
5209 void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti
)
5211 int nextState
= NO_TRANSITION
;
5212 bool bWrongTransition
= false;
5214 if (m_afState
== HAL_AFSTATE_INACTIVE
) {
5216 case AA_AFSTATE_INACTIVE
:
5217 case AA_AFSTATE_ACTIVE_SCAN
:
5218 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5219 case AA_AFSTATE_AF_FAILED_FOCUS
:
5221 nextState
= NO_TRANSITION
;
5224 // Check AF notification after triggering
5225 if (m_ctlInfo
.af
.m_afTriggerTimeOut
> 0) {
5226 if (m_ctlInfo
.af
.m_afTriggerTimeOut
> 5) {
5227 ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__
, m_afMode
);
5228 SetAfMode(AA_AFMODE_OFF
);
5229 SetAfMode(m_afMode
);
5230 m_ctlInfo
.af
.m_afTriggerTimeOut
= 0;
5232 m_ctlInfo
.af
.m_afTriggerTimeOut
++;
5236 else if (m_afState
== HAL_AFSTATE_STARTED
) {
5238 case AA_AFSTATE_INACTIVE
:
5239 nextState
= NO_TRANSITION
;
5241 case AA_AFSTATE_ACTIVE_SCAN
:
5242 nextState
= HAL_AFSTATE_SCANNING
;
5243 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN
);
5244 m_ctlInfo
.af
.m_afTriggerTimeOut
= 0;
5246 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5247 nextState
= HAL_AFSTATE_PASSIVE_FOCUSED
;
5248 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED
);
5249 m_ctlInfo
.af
.m_afTriggerTimeOut
= 0;
5251 case AA_AFSTATE_AF_FAILED_FOCUS
:
5252 //nextState = HAL_AFSTATE_FAILED;
5253 //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5254 nextState
= NO_TRANSITION
;
5257 bWrongTransition
= true;
5261 else if (m_afState
== HAL_AFSTATE_SCANNING
) {
5263 case AA_AFSTATE_INACTIVE
:
5264 nextState
= NO_TRANSITION
;
5266 case AA_AFSTATE_ACTIVE_SCAN
:
5267 nextState
= NO_TRANSITION
;
5268 m_AfHwStateFailed
= false;
5270 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5271 nextState
= HAL_AFSTATE_PASSIVE_FOCUSED
;
5272 m_AfHwStateFailed
= false;
5273 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED
);
5275 case AA_AFSTATE_AF_FAILED_FOCUS
:
5276 nextState
= HAL_AFSTATE_PASSIVE_FOCUSED
;
5277 m_AfHwStateFailed
= true;
5278 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED
);
5281 bWrongTransition
= true;
5285 else if (m_afState
== HAL_AFSTATE_PASSIVE_FOCUSED
) {
5287 case AA_AFSTATE_INACTIVE
:
5288 nextState
= NO_TRANSITION
;
5290 case AA_AFSTATE_ACTIVE_SCAN
:
5291 nextState
= HAL_AFSTATE_SCANNING
;
5292 m_AfHwStateFailed
= false;
5293 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN
);
5295 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5296 nextState
= NO_TRANSITION
;
5297 m_AfHwStateFailed
= false;
5299 case AA_AFSTATE_AF_FAILED_FOCUS
:
5300 nextState
= NO_TRANSITION
;
5301 m_AfHwStateFailed
= true;
5304 bWrongTransition
= true;
5308 else if (m_afState
== HAL_AFSTATE_NEEDS_DETERMINATION
) {
5309 //Skip notification in case of flash, wait the end of flash on
5310 if (m_ctlInfo
.flash
.m_flashEnableFlg
&& m_ctlInfo
.flash
.m_afFlashDoneFlg
) {
5311 if (m_ctlInfo
.flash
.m_flashCnt
< IS_FLASH_STATE_ON_DONE
)
5315 case AA_AFSTATE_INACTIVE
:
5316 nextState
= NO_TRANSITION
;
5318 case AA_AFSTATE_ACTIVE_SCAN
:
5319 nextState
= NO_TRANSITION
;
5321 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5322 // If Flash mode is enable, after AF execute pre-capture metering
5323 if (m_ctlInfo
.flash
.m_flashEnableFlg
&& m_ctlInfo
.flash
.m_afFlashDoneFlg
) {
5324 switch (m_ctlInfo
.flash
.m_flashCnt
) {
5325 case IS_FLASH_STATE_ON_DONE
:
5326 ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode
, m_afState
, (int)noti
);
5327 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_AE_AWB_LOCK
;
5328 nextState
= NO_TRANSITION
;
5330 case IS_FLASH_STATE_AUTO_DONE
:
5331 ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode
, m_afState
, (int)noti
);
5332 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_OFF
;
5333 m_IsAfLockRequired
= true;
5334 nextState
= HAL_AFSTATE_LOCKED
;
5335 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED
);
5338 nextState
= NO_TRANSITION
;
5341 m_IsAfLockRequired
= true;
5342 nextState
= HAL_AFSTATE_LOCKED
;
5343 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED
);
5346 case AA_AFSTATE_AF_FAILED_FOCUS
:
5347 // If Flash mode is enable, after AF execute pre-capture metering
5348 if (m_ctlInfo
.flash
.m_flashEnableFlg
&& m_ctlInfo
.flash
.m_afFlashDoneFlg
) {
5349 switch (m_ctlInfo
.flash
.m_flashCnt
) {
5350 case IS_FLASH_STATE_ON_DONE
:
5351 ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode
, m_afState
, (int)noti
);
5352 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_AE_AWB_LOCK
;
5353 nextState
= NO_TRANSITION
;
5355 case IS_FLASH_STATE_AUTO_DONE
:
5356 ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode
, m_afState
, (int)noti
);
5357 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_OFF
;
5358 m_IsAfLockRequired
= true;
5359 nextState
= HAL_AFSTATE_FAILED
;
5360 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
5363 nextState
= NO_TRANSITION
;
5366 m_IsAfLockRequired
= true;
5367 nextState
= HAL_AFSTATE_FAILED
;
5368 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
5372 bWrongTransition
= true;
5376 else if (m_afState
== HAL_AFSTATE_LOCKED
) {
5378 case AA_AFSTATE_INACTIVE
:
5379 nextState
= NO_TRANSITION
;
5381 case AA_AFSTATE_ACTIVE_SCAN
:
5382 bWrongTransition
= true;
5384 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5385 nextState
= NO_TRANSITION
;
5387 case AA_AFSTATE_AF_FAILED_FOCUS
:
5389 bWrongTransition
= true;
5393 else if (m_afState
== HAL_AFSTATE_FAILED
) {
5395 case AA_AFSTATE_INACTIVE
:
5396 bWrongTransition
= true;
5398 case AA_AFSTATE_ACTIVE_SCAN
:
5399 nextState
= HAL_AFSTATE_SCANNING
;
5401 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5402 bWrongTransition
= true;
5404 case AA_AFSTATE_AF_FAILED_FOCUS
:
5405 nextState
= NO_TRANSITION
;
5408 bWrongTransition
= true;
5412 if (bWrongTransition
) {
5413 ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__
, m_afState
, noti
);
5416 ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__
, m_afState
, nextState
, noti
);
5417 if (nextState
!= NO_TRANSITION
)
5418 m_afState
= nextState
;
5421 void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti
)
5423 int nextState
= NO_TRANSITION
;
5424 bool bWrongTransition
= false;
5426 if (m_afState
== HAL_AFSTATE_INACTIVE
) {
5428 case AA_AFSTATE_INACTIVE
:
5429 case AA_AFSTATE_ACTIVE_SCAN
:
5430 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5431 case AA_AFSTATE_AF_FAILED_FOCUS
:
5433 nextState
= NO_TRANSITION
;
5437 else if (m_afState
== HAL_AFSTATE_STARTED
) {
5439 case AA_AFSTATE_INACTIVE
:
5440 nextState
= NO_TRANSITION
;
5442 case AA_AFSTATE_ACTIVE_SCAN
:
5443 nextState
= HAL_AFSTATE_SCANNING
;
5444 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN
);
5446 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5447 nextState
= HAL_AFSTATE_PASSIVE_FOCUSED
;
5448 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED
);
5450 case AA_AFSTATE_AF_FAILED_FOCUS
:
5451 nextState
= HAL_AFSTATE_FAILED
;
5452 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
5455 bWrongTransition
= true;
5459 else if (m_afState
== HAL_AFSTATE_SCANNING
) {
5461 case AA_AFSTATE_INACTIVE
:
5462 bWrongTransition
= true;
5464 case AA_AFSTATE_ACTIVE_SCAN
:
5465 nextState
= NO_TRANSITION
;
5467 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5468 nextState
= HAL_AFSTATE_PASSIVE_FOCUSED
;
5469 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED
);
5471 case AA_AFSTATE_AF_FAILED_FOCUS
:
5472 nextState
= NO_TRANSITION
;
5475 bWrongTransition
= true;
5479 else if (m_afState
== HAL_AFSTATE_PASSIVE_FOCUSED
) {
5481 case AA_AFSTATE_INACTIVE
:
5482 bWrongTransition
= true;
5484 case AA_AFSTATE_ACTIVE_SCAN
:
5485 nextState
= HAL_AFSTATE_SCANNING
;
5486 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN
);
5488 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5489 nextState
= NO_TRANSITION
;
5491 case AA_AFSTATE_AF_FAILED_FOCUS
:
5492 nextState
= HAL_AFSTATE_FAILED
;
5493 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
5494 // TODO : needs NO_TRANSITION ?
5497 bWrongTransition
= true;
5501 else if (m_afState
== HAL_AFSTATE_NEEDS_DETERMINATION
) {
5503 case AA_AFSTATE_INACTIVE
:
5504 bWrongTransition
= true;
5506 case AA_AFSTATE_ACTIVE_SCAN
:
5507 nextState
= NO_TRANSITION
;
5509 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5510 m_IsAfLockRequired
= true;
5511 nextState
= HAL_AFSTATE_LOCKED
;
5512 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED
);
5514 case AA_AFSTATE_AF_FAILED_FOCUS
:
5515 nextState
= HAL_AFSTATE_FAILED
;
5516 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED
);
5519 bWrongTransition
= true;
5523 else if (m_afState
== HAL_AFSTATE_LOCKED
) {
5525 case AA_AFSTATE_INACTIVE
:
5526 nextState
= NO_TRANSITION
;
5528 case AA_AFSTATE_ACTIVE_SCAN
:
5529 bWrongTransition
= true;
5531 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5532 nextState
= NO_TRANSITION
;
5534 case AA_AFSTATE_AF_FAILED_FOCUS
:
5536 bWrongTransition
= true;
5540 else if (m_afState
== HAL_AFSTATE_FAILED
) {
5542 case AA_AFSTATE_INACTIVE
:
5543 case AA_AFSTATE_ACTIVE_SCAN
:
5544 case AA_AFSTATE_AF_ACQUIRED_FOCUS
:
5545 bWrongTransition
= true;
5547 case AA_AFSTATE_AF_FAILED_FOCUS
:
5548 nextState
= NO_TRANSITION
;
5551 bWrongTransition
= true;
5555 if (bWrongTransition
) {
5556 ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__
, m_afState
, noti
);
5559 ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__
, m_afState
, nextState
, noti
);
5560 if (nextState
!= NO_TRANSITION
)
5561 m_afState
= nextState
;
5564 void ExynosCameraHWInterface2::OnAfCancel(int id
)
5569 case AA_AFMODE_AUTO
:
5570 case AA_AFMODE_MACRO
:
5572 case AA_AFMODE_MANUAL
:
5573 OnAfCancelAutoMacro(id
);
5575 case AA_AFMODE_CONTINUOUS_VIDEO
:
5576 OnAfCancelCAFVideo(id
);
5578 case AA_AFMODE_CONTINUOUS_PICTURE
:
5579 OnAfCancelCAFPicture(id
);
5586 void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int /*id*/)
5588 int nextState
= NO_TRANSITION
;
5590 if (m_ctlInfo
.flash
.m_flashEnableFlg
&& m_ctlInfo
.flash
.m_afFlashDoneFlg
) {
5591 m_ctlInfo
.flash
.m_flashCnt
= IS_FLASH_STATE_AUTO_OFF
;
5593 switch (m_afState
) {
5594 case HAL_AFSTATE_INACTIVE
:
5595 nextState
= NO_TRANSITION
;
5596 SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE
);
5598 case HAL_AFSTATE_NEEDS_COMMAND
:
5599 case HAL_AFSTATE_STARTED
:
5600 case HAL_AFSTATE_SCANNING
:
5601 case HAL_AFSTATE_LOCKED
:
5602 case HAL_AFSTATE_FAILED
:
5603 SetAfMode(AA_AFMODE_OFF
);
5604 SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE
);
5605 nextState
= HAL_AFSTATE_INACTIVE
;
5610 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__
, m_afState
, nextState
);
5611 if (nextState
!= NO_TRANSITION
)
5612 m_afState
= nextState
;
5615 void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int /*id*/)
5617 int nextState
= NO_TRANSITION
;
5619 switch (m_afState
) {
5620 case HAL_AFSTATE_INACTIVE
:
5621 nextState
= NO_TRANSITION
;
5623 case HAL_AFSTATE_NEEDS_COMMAND
:
5624 case HAL_AFSTATE_STARTED
:
5625 case HAL_AFSTATE_SCANNING
:
5626 case HAL_AFSTATE_LOCKED
:
5627 case HAL_AFSTATE_FAILED
:
5628 case HAL_AFSTATE_NEEDS_DETERMINATION
:
5629 case HAL_AFSTATE_PASSIVE_FOCUSED
:
5630 SetAfMode(AA_AFMODE_OFF
);
5631 SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE
);
5632 SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE
);
5633 nextState
= HAL_AFSTATE_INACTIVE
;
5638 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__
, m_afState
, nextState
);
5639 if (nextState
!= NO_TRANSITION
)
5640 m_afState
= nextState
;
5643 void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int /*id*/)
5645 int nextState
= NO_TRANSITION
;
5647 switch (m_afState
) {
5648 case HAL_AFSTATE_INACTIVE
:
5649 nextState
= NO_TRANSITION
;
5651 case HAL_AFSTATE_NEEDS_COMMAND
:
5652 case HAL_AFSTATE_STARTED
:
5653 case HAL_AFSTATE_SCANNING
:
5654 case HAL_AFSTATE_LOCKED
:
5655 case HAL_AFSTATE_FAILED
:
5656 case HAL_AFSTATE_NEEDS_DETERMINATION
:
5657 case HAL_AFSTATE_PASSIVE_FOCUSED
:
5658 SetAfMode(AA_AFMODE_OFF
);
5659 SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE
);
5660 SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO
);
5661 nextState
= HAL_AFSTATE_INACTIVE
;
5666 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__
, m_afState
, nextState
);
5667 if (nextState
!= NO_TRANSITION
)
5668 m_afState
= nextState
;
5671 void ExynosCameraHWInterface2::SetAfStateForService(int newState
)
5673 if (m_serviceAfState
!= newState
|| newState
== 0)
5674 m_notifyCb(CAMERA2_MSG_AUTOFOCUS
, newState
, m_afTriggerId
, 0, m_callbackCookie
);
5675 m_serviceAfState
= newState
;
5678 int ExynosCameraHWInterface2::GetAfStateForService()
5680 return m_serviceAfState
;
5683 void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode
)
5685 if (m_afMode
!= afMode
) {
5686 if (m_IsAfModeUpdateRequired
&& m_afMode
!= AA_AFMODE_OFF
) {
5688 ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__
, m_afMode
, afMode
);
5691 ALOGV("(%s): current(%d) new(%d)", __FUNCTION__
, m_afMode
, afMode
);
5692 m_IsAfModeUpdateRequired
= true;
5694 SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE
);
5695 m_afState
= HAL_AFSTATE_INACTIVE
;
5700 void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
5702 char property
[PROPERTY_VALUE_MAX
];
5704 //2 0th IFD TIFF Tags
5706 property_get("ro.product.brand", property
, EXIF_DEF_MAKER
);
5707 strncpy((char *)mExifInfo
.maker
, property
,
5708 sizeof(mExifInfo
.maker
) - 1);
5709 mExifInfo
.maker
[sizeof(mExifInfo
.maker
) - 1] = '\0';
5711 property_get("ro.product.model", property
, EXIF_DEF_MODEL
);
5712 strncpy((char *)mExifInfo
.model
, property
,
5713 sizeof(mExifInfo
.model
) - 1);
5714 mExifInfo
.model
[sizeof(mExifInfo
.model
) - 1] = '\0';
5716 property_get("ro.build.id", property
, EXIF_DEF_SOFTWARE
);
5717 strncpy((char *)mExifInfo
.software
, property
,
5718 sizeof(mExifInfo
.software
) - 1);
5719 mExifInfo
.software
[sizeof(mExifInfo
.software
) - 1] = '\0';
5721 //3 YCbCr Positioning
5722 mExifInfo
.ycbcr_positioning
= EXIF_DEF_YCBCR_POSITIONING
;
5724 //2 0th IFD Exif Private Tags
5726 mExifInfo
.fnumber
.num
= (uint32_t)(m_camera2
->m_curCameraInfo
->fnumber
* EXIF_DEF_FNUMBER_DEN
);
5727 mExifInfo
.fnumber
.den
= EXIF_DEF_FNUMBER_DEN
;
5728 //3 Exposure Program
5729 mExifInfo
.exposure_program
= EXIF_DEF_EXPOSURE_PROGRAM
;
5731 memcpy(mExifInfo
.exif_version
, EXIF_DEF_EXIF_VERSION
, sizeof(mExifInfo
.exif_version
));
5733 double av
= APEX_FNUM_TO_APERTURE((double)mExifInfo
.fnumber
.num
/mExifInfo
.fnumber
.den
);
5734 mExifInfo
.aperture
.num
= (uint32_t)(av
*EXIF_DEF_APEX_DEN
);
5735 mExifInfo
.aperture
.den
= EXIF_DEF_APEX_DEN
;
5736 //3 Maximum lens aperture
5737 mExifInfo
.max_aperture
.num
= mExifInfo
.aperture
.num
;
5738 mExifInfo
.max_aperture
.den
= mExifInfo
.aperture
.den
;
5739 //3 Lens Focal Length
5740 mExifInfo
.focal_length
.num
= (uint32_t)(m_camera2
->m_curCameraInfo
->focalLength
* 100);
5742 mExifInfo
.focal_length
.den
= EXIF_DEF_FOCAL_LEN_DEN
;
5744 strcpy((char *)mExifInfo
.user_comment
, EXIF_DEF_USERCOMMENTS
);
5745 //3 Color Space information
5746 mExifInfo
.color_space
= EXIF_DEF_COLOR_SPACE
;
5748 mExifInfo
.exposure_mode
= EXIF_DEF_EXPOSURE_MODE
;
5750 //2 0th IFD GPS Info Tags
5751 unsigned char gps_version
[4] = { 0x02, 0x02, 0x00, 0x00 };
5752 memcpy(mExifInfo
.gps_version_id
, gps_version
, sizeof(gps_version
));
5754 //2 1th IFD TIFF Tags
5755 mExifInfo
.compression_scheme
= EXIF_DEF_COMPRESSION
;
5756 mExifInfo
.x_resolution
.num
= EXIF_DEF_RESOLUTION_NUM
;
5757 mExifInfo
.x_resolution
.den
= EXIF_DEF_RESOLUTION_DEN
;
5758 mExifInfo
.y_resolution
.num
= EXIF_DEF_RESOLUTION_NUM
;
5759 mExifInfo
.y_resolution
.den
= EXIF_DEF_RESOLUTION_DEN
;
5760 mExifInfo
.resolution_unit
= EXIF_DEF_RESOLUTION_UNIT
;
5763 void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t
*exifInfo
, ExynosRect
*rect
,
5764 camera2_shot_ext
*currentEntry
)
5766 camera2_dm
*dm
= &(currentEntry
->shot
.dm
);
5767 camera2_ctl
*ctl
= &(currentEntry
->shot
.ctl
);
5769 ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__
, ctl
->request
.frameCount
, dm
->sensor
.exposureTime
,dm
->aa
.isoValue
);
5770 if (!ctl
->request
.frameCount
)
5772 //2 0th IFD TIFF Tags
5774 exifInfo
->width
= rect
->w
;
5776 exifInfo
->height
= rect
->h
;
5778 switch (ctl
->jpeg
.orientation
) {
5780 exifInfo
->orientation
= EXIF_ORIENTATION_90
;
5783 exifInfo
->orientation
= EXIF_ORIENTATION_180
;
5786 exifInfo
->orientation
= EXIF_ORIENTATION_270
;
5790 exifInfo
->orientation
= EXIF_ORIENTATION_UP
;
5796 struct tm
*timeinfo
;
5798 timeinfo
= localtime(&rawtime
);
5799 strftime((char *)exifInfo
->date_time
, 20, "%Y:%m:%d %H:%M:%S", timeinfo
);
5801 //2 0th IFD Exif Private Tags
5803 int shutterSpeed
= (dm
->sensor
.exposureTime
/1000);
5805 // To display exposure time just above 500ms as 1/2sec, not 1 sec.
5806 if (shutterSpeed
> 500000)
5807 shutterSpeed
-= 100000;
5809 if (shutterSpeed
< 0) {
5813 exifInfo
->exposure_time
.num
= 1;
5815 //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
5816 exifInfo
->exposure_time
.den
= (uint32_t)((double)1000000 / shutterSpeed
);
5818 //3 ISO Speed Rating
5819 exifInfo
->iso_speed_rating
= dm
->aa
.isoValue
;
5821 uint32_t av
, tv
, bv
, sv
, ev
;
5822 av
= APEX_FNUM_TO_APERTURE((double)exifInfo
->fnumber
.num
/ exifInfo
->fnumber
.den
);
5823 tv
= APEX_EXPOSURE_TO_SHUTTER((double)exifInfo
->exposure_time
.num
/ exifInfo
->exposure_time
.den
);
5824 sv
= APEX_ISO_TO_FILMSENSITIVITY(exifInfo
->iso_speed_rating
);
5827 //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
5828 ALOGV("AV=%d, TV=%d, SV=%d", av
, tv
, sv
);
5831 exifInfo
->shutter_speed
.num
= tv
* EXIF_DEF_APEX_DEN
;
5832 exifInfo
->shutter_speed
.den
= EXIF_DEF_APEX_DEN
;
5834 exifInfo
->brightness
.num
= bv
*EXIF_DEF_APEX_DEN
;
5835 exifInfo
->brightness
.den
= EXIF_DEF_APEX_DEN
;
5837 if (ctl
->aa
.sceneMode
== AA_SCENE_MODE_BEACH
||
5838 ctl
->aa
.sceneMode
== AA_SCENE_MODE_SNOW
) {
5839 exifInfo
->exposure_bias
.num
= EXIF_DEF_APEX_DEN
;
5840 exifInfo
->exposure_bias
.den
= EXIF_DEF_APEX_DEN
;
5842 exifInfo
->exposure_bias
.num
= 0;
5843 exifInfo
->exposure_bias
.den
= 0;
5846 /*switch (m_curCameraInfo->metering) {
5847 case METERING_MODE_CENTER:
5848 exifInfo->metering_mode = EXIF_METERING_CENTER;
5850 case METERING_MODE_MATRIX:
5851 exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
5853 case METERING_MODE_SPOT:
5854 exifInfo->metering_mode = EXIF_METERING_SPOT;
5856 case METERING_MODE_AVERAGE:
5858 exifInfo->metering_mode = EXIF_METERING_AVERAGE;
5861 exifInfo
->metering_mode
= EXIF_METERING_CENTER
;
5864 if (m_ctlInfo
.flash
.m_flashDecisionResult
)
5865 exifInfo
->flash
= 1;
5867 exifInfo
->flash
= EXIF_DEF_FLASH
;
5870 if (currentEntry
->awb_mode_dm
== AA_AWBMODE_WB_AUTO
)
5871 exifInfo
->white_balance
= EXIF_WB_AUTO
;
5873 exifInfo
->white_balance
= EXIF_WB_MANUAL
;
5875 //3 Scene Capture Type
5876 switch (ctl
->aa
.sceneMode
) {
5877 case AA_SCENE_MODE_PORTRAIT
:
5878 exifInfo
->scene_capture_type
= EXIF_SCENE_PORTRAIT
;
5880 case AA_SCENE_MODE_LANDSCAPE
:
5881 exifInfo
->scene_capture_type
= EXIF_SCENE_LANDSCAPE
;
5883 case AA_SCENE_MODE_NIGHT_PORTRAIT
:
5884 exifInfo
->scene_capture_type
= EXIF_SCENE_NIGHT
;
5887 exifInfo
->scene_capture_type
= EXIF_SCENE_STANDARD
;
5891 //2 0th IFD GPS Info Tags
5892 if (ctl
->jpeg
.gpsCoordinates
[0] != 0 && ctl
->jpeg
.gpsCoordinates
[1] != 0) {
5894 if (ctl
->jpeg
.gpsCoordinates
[0] > 0)
5895 strcpy((char *)exifInfo
->gps_latitude_ref
, "N");
5897 strcpy((char *)exifInfo
->gps_latitude_ref
, "S");
5899 if (ctl
->jpeg
.gpsCoordinates
[1] > 0)
5900 strcpy((char *)exifInfo
->gps_longitude_ref
, "E");
5902 strcpy((char *)exifInfo
->gps_longitude_ref
, "W");
5904 if (ctl
->jpeg
.gpsCoordinates
[2] > 0)
5905 exifInfo
->gps_altitude_ref
= 0;
5907 exifInfo
->gps_altitude_ref
= 1;
5909 double latitude
= fabs(ctl
->jpeg
.gpsCoordinates
[0]);
5910 double longitude
= fabs(ctl
->jpeg
.gpsCoordinates
[1]);
5911 double altitude
= fabs(ctl
->jpeg
.gpsCoordinates
[2]);
5913 exifInfo
->gps_latitude
[0].num
= (uint32_t)latitude
;
5914 exifInfo
->gps_latitude
[0].den
= 1;
5915 exifInfo
->gps_latitude
[1].num
= (uint32_t)((latitude
- exifInfo
->gps_latitude
[0].num
) * 60);
5916 exifInfo
->gps_latitude
[1].den
= 1;
5917 exifInfo
->gps_latitude
[2].num
= (uint32_t)round((((latitude
- exifInfo
->gps_latitude
[0].num
) * 60)
5918 - exifInfo
->gps_latitude
[1].num
) * 60);
5919 exifInfo
->gps_latitude
[2].den
= 1;
5921 exifInfo
->gps_longitude
[0].num
= (uint32_t)longitude
;
5922 exifInfo
->gps_longitude
[0].den
= 1;
5923 exifInfo
->gps_longitude
[1].num
= (uint32_t)((longitude
- exifInfo
->gps_longitude
[0].num
) * 60);
5924 exifInfo
->gps_longitude
[1].den
= 1;
5925 exifInfo
->gps_longitude
[2].num
= (uint32_t)round((((longitude
- exifInfo
->gps_longitude
[0].num
) * 60)
5926 - exifInfo
->gps_longitude
[1].num
) * 60);
5927 exifInfo
->gps_longitude
[2].den
= 1;
5929 exifInfo
->gps_altitude
.num
= (uint32_t)round(altitude
);
5930 exifInfo
->gps_altitude
.den
= 1;
5934 timestamp
= (long)ctl
->jpeg
.gpsTimestamp
;
5935 gmtime_r(×tamp
, &tm_data
);
5936 exifInfo
->gps_timestamp
[0].num
= tm_data
.tm_hour
;
5937 exifInfo
->gps_timestamp
[0].den
= 1;
5938 exifInfo
->gps_timestamp
[1].num
= tm_data
.tm_min
;
5939 exifInfo
->gps_timestamp
[1].den
= 1;
5940 exifInfo
->gps_timestamp
[2].num
= tm_data
.tm_sec
;
5941 exifInfo
->gps_timestamp
[2].den
= 1;
5942 snprintf((char*)exifInfo
->gps_datestamp
, sizeof(exifInfo
->gps_datestamp
),
5943 "%04d:%02d:%02d", tm_data
.tm_year
+ 1900, tm_data
.tm_mon
+ 1, tm_data
.tm_mday
);
5945 memset(exifInfo
->gps_processing_method
, 0, 100);
5946 memcpy(exifInfo
->gps_processing_method
, currentEntry
->gpsProcessingMethod
, 32);
5947 exifInfo
->enableGps
= true;
5949 exifInfo
->enableGps
= false;
5952 //2 1th IFD TIFF Tags
5953 exifInfo
->widthThumb
= ctl
->jpeg
.thumbnailSize
[0];
5954 exifInfo
->heightThumb
= ctl
->jpeg
.thumbnailSize
[1];
5957 ExynosCameraHWInterface2::MainThread::~MainThread()
5959 ALOGV("(%s):", __FUNCTION__
);
5962 void ExynosCameraHWInterface2::MainThread::release()
5964 ALOGV("(%s):", __func__
);
5965 SetSignal(SIGNAL_THREAD_RELEASE
);
5968 ExynosCameraHWInterface2::SensorThread::~SensorThread()
5970 ALOGV("(%s):", __FUNCTION__
);
5973 void ExynosCameraHWInterface2::SensorThread::release()
5975 ALOGV("(%s):", __func__
);
5976 SetSignal(SIGNAL_THREAD_RELEASE
);
5979 ExynosCameraHWInterface2::StreamThread::~StreamThread()
5981 ALOGV("(%s):", __FUNCTION__
);
5984 void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t
* new_parameters
)
5986 ALOGV("DEBUG(%s):", __FUNCTION__
);
5987 memcpy(&m_parameters
, new_parameters
, sizeof(stream_parameters_t
));
5990 void ExynosCameraHWInterface2::StreamThread::release()
5992 ALOGV("(%s):", __func__
);
5993 SetSignal(SIGNAL_THREAD_RELEASE
);
5996 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr
)
5999 for (index
= 0 ; index
< m_parameters
.numSvcBuffers
; index
++) {
6000 if (m_parameters
.svcBuffers
[index
].virt
.extP
[0] == bufAddr
)
6006 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t
* bufHandle
)
6009 for (index
= 0 ; index
< m_parameters
.numSvcBuffers
; index
++) {
6010 if (m_parameters
.svcBufHandle
[index
] == *bufHandle
)
6016 status_t
ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id
, int priority
)
6018 ALOGV("(%s): substream_id(%d)", __FUNCTION__
, stream_id
);
6019 int index
, vacantIndex
;
6020 bool vacancy
= false;
6022 for (index
= 0 ; index
< NUM_MAX_SUBSTREAM
; index
++) {
6023 if (!vacancy
&& m_attachedSubStreams
[index
].streamId
== -1) {
6025 vacantIndex
= index
;
6026 } else if (m_attachedSubStreams
[index
].streamId
== stream_id
) {
6032 m_attachedSubStreams
[vacantIndex
].streamId
= stream_id
;
6033 m_attachedSubStreams
[vacantIndex
].priority
= priority
;
6034 m_numRegisteredStream
++;
6038 status_t
ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id
)
6040 ALOGV("(%s): substream_id(%d)", __FUNCTION__
, stream_id
);
6044 for (index
= 0 ; index
< NUM_MAX_SUBSTREAM
; index
++) {
6045 if (m_attachedSubStreams
[index
].streamId
== stream_id
) {
6052 m_attachedSubStreams
[index
].streamId
= -1;
6053 m_attachedSubStreams
[index
].priority
= 0;
6054 m_numRegisteredStream
--;
6058 int ExynosCameraHWInterface2::createIonClient(ion_client ionClient
)
6060 if (ionClient
== 0) {
6061 ionClient
= ion_client_create();
6062 if (ionClient
< 0) {
6063 ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__
, ionClient
);
6070 int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient
)
6072 if (ionClient
!= 0) {
6073 if (ionClient
> 0) {
6074 ion_client_destroy(ionClient
);
6081 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient
, ExynosBuffer
*buf
, int iMemoryNum
)
6083 return allocCameraMemory(ionClient
, buf
, iMemoryNum
, 0);
6086 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient
, ExynosBuffer
*buf
, int iMemoryNum
, int cacheFlag
)
6092 if (ionClient
== 0) {
6093 ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__
, ionClient
);
6097 for (i
= 0 ; i
< iMemoryNum
; i
++) {
6098 if (buf
->size
.extS
[i
] == 0) {
6101 if (1 << i
& cacheFlag
)
6102 flag
= ION_FLAG_CACHED
| ION_FLAG_CACHED_NEEDS_SYNC
;
6105 buf
->fd
.extFd
[i
] = ion_alloc(ionClient
, \
6106 buf
->size
.extS
[i
], 0, ION_HEAP_SYSTEM_MASK
, flag
);
6107 if ((buf
->fd
.extFd
[i
] == -1) ||(buf
->fd
.extFd
[i
] == 0)) {
6108 ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__
, buf
->size
.extS
[i
]);
6109 buf
->fd
.extFd
[i
] = -1;
6110 freeCameraMemory(buf
, iMemoryNum
);
6114 buf
->virt
.extP
[i
] = (char *)ion_map(buf
->fd
.extFd
[i
], \
6115 buf
->size
.extS
[i
], 0);
6116 if ((buf
->virt
.extP
[i
] == (char *)MAP_FAILED
) || (buf
->virt
.extP
[i
] == NULL
)) {
6117 ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__
, buf
->size
.extS
[i
]);
6118 buf
->virt
.extP
[i
] = (char *)MAP_FAILED
;
6119 freeCameraMemory(buf
, iMemoryNum
);
6122 ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i
, (unsigned int)(buf
->virt
.extP
[i
]), buf
->size
.extS
[i
], flag
);
6128 void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer
*buf
, int iMemoryNum
)
6134 for (i
=0;i
<iMemoryNum
;i
++) {
6135 if (buf
->fd
.extFd
[i
] != -1) {
6136 if (buf
->virt
.extP
[i
] != (char *)MAP_FAILED
) {
6137 ret
= ion_unmap(buf
->virt
.extP
[i
], buf
->size
.extS
[i
]);
6139 ALOGE("ERR(%s)", __FUNCTION__
);
6141 ion_free(buf
->fd
.extFd
[i
]);
6142 ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i
, (unsigned int)(buf
->virt
.extP
[i
]), buf
->size
.extS
[i
]);
6144 buf
->fd
.extFd
[i
] = -1;
6145 buf
->virt
.extP
[i
] = (char *)MAP_FAILED
;
6146 buf
->size
.extS
[i
] = 0;
6150 void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer
*buf
, int iMemoryNum
)
6153 for (i
=0;i
<iMemoryNum
;i
++) {
6154 buf
->virt
.extP
[i
] = (char *)MAP_FAILED
;
6155 buf
->fd
.extFd
[i
] = -1;
6156 buf
->size
.extS
[i
] = 0;
6163 static camera2_device_t
*g_cam2_device
= NULL
;
6164 static bool g_camera_vaild
= false;
6165 static Mutex g_camera_mutex
;
6166 ExynosCamera2
* g_camera2
[2] = { NULL
, NULL
};
6168 static int HAL2_camera_device_close(struct hw_device_t
* device
)
6170 Mutex::Autolock
lock(g_camera_mutex
);
6171 ALOGD("(%s): ENTER", __FUNCTION__
);
6174 camera2_device_t
*cam_device
= (camera2_device_t
*)device
;
6175 ALOGV("cam_device(0x%08x):", (unsigned int)cam_device
);
6176 ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device
);
6177 delete static_cast<ExynosCameraHWInterface2
*>(cam_device
->priv
);
6179 g_camera_vaild
= false;
6180 g_cam2_device
= NULL
;
6183 ALOGD("(%s): EXIT", __FUNCTION__
);
6187 static inline ExynosCameraHWInterface2
*obj(const struct camera2_device
*dev
)
6189 return reinterpret_cast<ExynosCameraHWInterface2
*>(dev
->priv
);
6192 static int HAL2_device_set_request_queue_src_ops(const struct camera2_device
*dev
,
6193 const camera2_request_queue_src_ops_t
*request_src_ops
)
6195 ALOGV("DEBUG(%s):", __FUNCTION__
);
6196 return obj(dev
)->setRequestQueueSrcOps(request_src_ops
);
6199 static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device
*dev
)
6201 ALOGV("DEBUG(%s):", __FUNCTION__
);
6202 return obj(dev
)->notifyRequestQueueNotEmpty();
6205 static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device
*dev
,
6206 const camera2_frame_queue_dst_ops_t
*frame_dst_ops
)
6208 ALOGV("DEBUG(%s):", __FUNCTION__
);
6209 return obj(dev
)->setFrameQueueDstOps(frame_dst_ops
);
6212 static int HAL2_device_get_in_progress_count(const struct camera2_device
*dev
)
6214 ALOGV("DEBUG(%s):", __FUNCTION__
);
6215 return obj(dev
)->getInProgressCount();
6218 static int HAL2_device_flush_captures_in_progress(const struct camera2_device
*dev
)
6220 ALOGV("DEBUG(%s):", __FUNCTION__
);
6221 return obj(dev
)->flushCapturesInProgress();
6224 static int HAL2_device_construct_default_request(const struct camera2_device
*dev
,
6225 int request_template
, camera_metadata_t
**request
)
6227 ALOGV("DEBUG(%s):", __FUNCTION__
);
6228 return obj(dev
)->constructDefaultRequest(request_template
, request
);
6231 static int HAL2_device_allocate_stream(
6232 const struct camera2_device
*dev
,
6237 const camera2_stream_ops_t
*stream_ops
,
6239 uint32_t *stream_id
,
6240 uint32_t *format_actual
,
6242 uint32_t *max_buffers
)
6244 ALOGV("(%s): ", __FUNCTION__
);
6245 return obj(dev
)->allocateStream(width
, height
, format
, stream_ops
,
6246 stream_id
, format_actual
, usage
, max_buffers
);
6249 static int HAL2_device_register_stream_buffers(const struct camera2_device
*dev
,
6252 buffer_handle_t
*buffers
)
6254 ALOGV("DEBUG(%s):", __FUNCTION__
);
6255 return obj(dev
)->registerStreamBuffers(stream_id
, num_buffers
, buffers
);
6258 static int HAL2_device_release_stream(
6259 const struct camera2_device
*dev
,
6262 ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__
, stream_id
);
6263 if (!g_camera_vaild
)
6265 return obj(dev
)->releaseStream(stream_id
);
6268 static int HAL2_device_allocate_reprocess_stream(
6269 const struct camera2_device
*dev
,
6273 const camera2_stream_in_ops_t
*reprocess_stream_ops
,
6275 uint32_t *stream_id
,
6276 uint32_t *consumer_usage
,
6277 uint32_t *max_buffers
)
6279 ALOGV("DEBUG(%s):", __FUNCTION__
);
6280 return obj(dev
)->allocateReprocessStream(width
, height
, format
, reprocess_stream_ops
,
6281 stream_id
, consumer_usage
, max_buffers
);
6284 static int HAL2_device_allocate_reprocess_stream_from_stream(
6285 const struct camera2_device
*dev
,
6286 uint32_t output_stream_id
,
6287 const camera2_stream_in_ops_t
*reprocess_stream_ops
,
6289 uint32_t *stream_id
)
6291 ALOGV("DEBUG(%s):", __FUNCTION__
);
6292 return obj(dev
)->allocateReprocessStreamFromStream(output_stream_id
,
6293 reprocess_stream_ops
, stream_id
);
6296 static int HAL2_device_release_reprocess_stream(
6297 const struct camera2_device
*dev
,
6300 ALOGV("DEBUG(%s):", __FUNCTION__
);
6301 return obj(dev
)->releaseReprocessStream(stream_id
);
6304 static int HAL2_device_trigger_action(const struct camera2_device
*dev
,
6305 uint32_t trigger_id
,
6309 ALOGV("DEBUG(%s):", __FUNCTION__
);
6310 if (!g_camera_vaild
)
6312 return obj(dev
)->triggerAction(trigger_id
, ext1
, ext2
);
6315 static int HAL2_device_set_notify_callback(const struct camera2_device
*dev
,
6316 camera2_notify_callback notify_cb
,
6319 ALOGV("DEBUG(%s):", __FUNCTION__
);
6320 return obj(dev
)->setNotifyCallback(notify_cb
, user
);
6323 static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device
*dev
,
6324 vendor_tag_query_ops_t
**ops
)
6326 ALOGV("DEBUG(%s):", __FUNCTION__
);
6327 return obj(dev
)->getMetadataVendorTagOps(ops
);
6330 static int HAL2_device_dump(const struct camera2_device
*dev
, int fd
)
6332 ALOGV("DEBUG(%s):", __FUNCTION__
);
6333 return obj(dev
)->dump(fd
);
6340 static int HAL2_getNumberOfCameras()
6342 ALOGV("(%s): returning 2", __FUNCTION__
);
6347 static int HAL2_getCameraInfo(int cameraId
, struct camera_info
*info
)
6349 ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__
, cameraId
);
6350 static camera_metadata_t
* mCameraInfo
[2] = {NULL
, NULL
};
6354 if (cameraId
== 0) {
6355 info
->facing
= CAMERA_FACING_BACK
;
6357 g_camera2
[0] = new ExynosCamera2(0);
6359 else if (cameraId
== 1) {
6360 info
->facing
= CAMERA_FACING_FRONT
;
6362 g_camera2
[1] = new ExynosCamera2(1);
6367 info
->orientation
= 0;
6368 info
->device_version
= HARDWARE_DEVICE_API_VERSION(2, 0);
6369 if (mCameraInfo
[cameraId
] == NULL
) {
6370 res
= g_camera2
[cameraId
]->constructStaticInfo(&(mCameraInfo
[cameraId
]), cameraId
, true);
6372 ALOGE("%s: Unable to allocate static info: %s (%d)",
6373 __FUNCTION__
, strerror(-res
), res
);
6376 res
= g_camera2
[cameraId
]->constructStaticInfo(&(mCameraInfo
[cameraId
]), cameraId
, false);
6378 ALOGE("%s: Unable to fill in static info: %s (%d)",
6379 __FUNCTION__
, strerror(-res
), res
);
6383 info
->static_camera_characteristics
= mCameraInfo
[cameraId
];
6387 #define SET_METHOD(m) m : HAL2_device_##m
6389 static camera2_device_ops_t camera2_device_ops
= {
6390 SET_METHOD(set_request_queue_src_ops
),
6391 SET_METHOD(notify_request_queue_not_empty
),
6392 SET_METHOD(set_frame_queue_dst_ops
),
6393 SET_METHOD(get_in_progress_count
),
6394 SET_METHOD(flush_captures_in_progress
),
6395 SET_METHOD(construct_default_request
),
6396 SET_METHOD(allocate_stream
),
6397 SET_METHOD(register_stream_buffers
),
6398 SET_METHOD(release_stream
),
6399 SET_METHOD(allocate_reprocess_stream
),
6400 SET_METHOD(allocate_reprocess_stream_from_stream
),
6401 SET_METHOD(release_reprocess_stream
),
6402 SET_METHOD(trigger_action
),
6403 SET_METHOD(set_notify_callback
),
6404 SET_METHOD(get_metadata_vendor_tag_ops
),
6406 get_instance_metadata
: NULL
6412 static int HAL2_camera_device_open(const struct hw_module_t
* module
,
6414 struct hw_device_t
** device
)
6416 int cameraId
= atoi(id
);
6417 int openInvalid
= 0;
6419 Mutex::Autolock
lock(g_camera_mutex
);
6420 if (g_camera_vaild
) {
6421 ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__
);
6424 g_camera_vaild
= false;
6425 ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId
);
6426 if (cameraId
< 0 || cameraId
>= HAL2_getNumberOfCameras()) {
6427 ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__
, id
);
6431 ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device
);
6432 if (g_cam2_device
) {
6433 if (obj(g_cam2_device
)->getCameraId() == cameraId
) {
6434 ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__
, id
);
6437 ALOGD("(%s): START waiting for cam device free", __FUNCTION__
);
6438 while (g_cam2_device
)
6439 usleep(SIG_WAITING_TICK
);
6440 ALOGD("(%s): END waiting for cam device free", __FUNCTION__
);
6444 g_cam2_device
= (camera2_device_t
*)malloc(sizeof(camera2_device_t
));
6445 ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device
);
6450 g_cam2_device
->common
.tag
= HARDWARE_DEVICE_TAG
;
6451 g_cam2_device
->common
.version
= CAMERA_DEVICE_API_VERSION_2_0
;
6452 g_cam2_device
->common
.module
= const_cast<hw_module_t
*>(module
);
6453 g_cam2_device
->common
.close
= HAL2_camera_device_close
;
6455 g_cam2_device
->ops
= &camera2_device_ops
;
6457 ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__
, id
);
6459 g_cam2_device
->priv
= new ExynosCameraHWInterface2(cameraId
, g_cam2_device
, g_camera2
[cameraId
], &openInvalid
);
6461 ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__
);
6465 *device
= (hw_device_t
*)g_cam2_device
;
6466 ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__
, id
, *device
);
6467 g_camera_vaild
= true;
6473 static hw_module_methods_t camera_module_methods
= {
6474 open
: HAL2_camera_device_open
6478 struct camera_module HAL_MODULE_INFO_SYM
= {
6480 tag
: HARDWARE_MODULE_TAG
,
6481 module_api_version
: CAMERA_MODULE_API_VERSION_2_0
,
6482 hal_api_version
: HARDWARE_HAL_API_VERSION
,
6483 id
: CAMERA_HARDWARE_MODULE_ID
,
6484 name
: "Exynos Camera HAL2",
6485 author
: "Samsung Corporation",
6486 methods
: &camera_module_methods
,
6490 get_number_of_cameras
: HAL2_getNumberOfCameras
,
6491 get_camera_info
: HAL2_getCameraInfo
,
6492 set_callbacks
: NULL
,
6493 get_vendor_tag_ops
: NULL
,
6499 }; // namespace android