Commit | Line | Data |
---|---|---|
c15a6b00 JS |
1 | /* |
2 | ** | |
3 | ** Copyright 2008, The Android Open Source Project | |
4 | ** Copyright 2012, Samsung Electronics Co. LTD | |
5 | ** | |
6 | ** Licensed under the Apache License, Version 2.0 (the "License"); | |
7 | ** you may not use this file except in compliance with the License. | |
8 | ** You may obtain a copy of the License at | |
9 | ** | |
10 | ** http://www.apache.org/licenses/LICENSE-2.0 | |
11 | ** | |
12 | ** Unless required by applicable law or agreed to in writing, software | |
13 | ** distributed under the License is distributed on an "AS IS" BASIS, | |
14 | ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
15 | ** See the License for the specific language governing permissions and | |
16 | ** limitations under the License. | |
17 | */ | |
18 | ||
19 | /*! | |
20 | * \file ExynosCameraHWInterface2.cpp | |
21 | * \brief source file for Android Camera API 2.0 HAL | |
22 | * \author Sungjoong Kang(sj3.kang@samsung.com) | |
13d8c7b4 | 23 | * \date 2012/07/10 |
c15a6b00 JS |
24 | * |
25 | * <b>Revision History: </b> | |
26 | * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n | |
27 | * Initial Release | |
13d8c7b4 SK |
28 | * |
29 | * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n | |
30 | * 2nd Release | |
31 | * | |
c15a6b00 JS |
32 | */ |
33 | ||
34 | //#define LOG_NDEBUG 0 | |
9dd63e1f | 35 | #define LOG_TAG "ExynosCameraHAL2" |
c15a6b00 | 36 | #include <utils/Log.h> |
3db6fe61 | 37 | #include <math.h> |
c15a6b00 JS |
38 | |
39 | #include "ExynosCameraHWInterface2.h" | |
40 | #include "exynos_format.h" | |
41 | ||
c15a6b00 JS |
42 | namespace android { |
43 | ||
9dd63e1f SK |
44 | void m_savePostView(const char *fname, uint8_t *buf, uint32_t size) |
45 | { | |
46 | int nw; | |
47 | int cnt = 0; | |
48 | uint32_t written = 0; | |
49 | ||
ad37861e | 50 | ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size); |
9dd63e1f SK |
51 | int fd = open(fname, O_RDWR | O_CREAT, 0644); |
52 | if (fd < 0) { | |
53 | ALOGE("failed to create file [%s]: %s", fname, strerror(errno)); | |
54 | return; | |
55 | } | |
56 | ||
ad37861e | 57 | ALOGV("writing %d bytes to file [%s]", size, fname); |
9dd63e1f SK |
58 | while (written < size) { |
59 | nw = ::write(fd, buf + written, size - written); | |
60 | if (nw < 0) { | |
61 | ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno)); | |
62 | break; | |
63 | } | |
64 | written += nw; | |
65 | cnt++; | |
66 | } | |
ad37861e | 67 | ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt); |
9dd63e1f SK |
68 | ::close(fd); |
69 | } | |
70 | ||
c15a6b00 JS |
71 | int get_pixel_depth(uint32_t fmt) |
72 | { | |
73 | int depth = 0; | |
74 | ||
75 | switch (fmt) { | |
76 | case V4L2_PIX_FMT_JPEG: | |
77 | depth = 8; | |
78 | break; | |
79 | ||
80 | case V4L2_PIX_FMT_NV12: | |
81 | case V4L2_PIX_FMT_NV21: | |
82 | case V4L2_PIX_FMT_YUV420: | |
83 | case V4L2_PIX_FMT_YVU420M: | |
84 | case V4L2_PIX_FMT_NV12M: | |
85 | case V4L2_PIX_FMT_NV12MT: | |
86 | depth = 12; | |
87 | break; | |
88 | ||
89 | case V4L2_PIX_FMT_RGB565: | |
90 | case V4L2_PIX_FMT_YUYV: | |
91 | case V4L2_PIX_FMT_YVYU: | |
92 | case V4L2_PIX_FMT_UYVY: | |
93 | case V4L2_PIX_FMT_VYUY: | |
94 | case V4L2_PIX_FMT_NV16: | |
95 | case V4L2_PIX_FMT_NV61: | |
96 | case V4L2_PIX_FMT_YUV422P: | |
97 | case V4L2_PIX_FMT_SBGGR10: | |
98 | case V4L2_PIX_FMT_SBGGR12: | |
99 | case V4L2_PIX_FMT_SBGGR16: | |
100 | depth = 16; | |
101 | break; | |
102 | ||
103 | case V4L2_PIX_FMT_RGB32: | |
104 | depth = 32; | |
105 | break; | |
106 | default: | |
107 | ALOGE("Get depth failed(format : %d)", fmt); | |
108 | break; | |
109 | } | |
110 | ||
111 | return depth; | |
13d8c7b4 | 112 | } |
c15a6b00 JS |
113 | |
114 | int cam_int_s_fmt(node_info_t *node) | |
115 | { | |
116 | struct v4l2_format v4l2_fmt; | |
117 | unsigned int framesize; | |
118 | int ret; | |
119 | ||
120 | memset(&v4l2_fmt, 0, sizeof(struct v4l2_format)); | |
121 | ||
122 | v4l2_fmt.type = node->type; | |
123 | framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8; | |
124 | ||
125 | if (node->planes >= 1) { | |
126 | v4l2_fmt.fmt.pix_mp.width = node->width; | |
127 | v4l2_fmt.fmt.pix_mp.height = node->height; | |
128 | v4l2_fmt.fmt.pix_mp.pixelformat = node->format; | |
129 | v4l2_fmt.fmt.pix_mp.field = V4L2_FIELD_ANY; | |
130 | } else { | |
13d8c7b4 | 131 | ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__); |
c15a6b00 JS |
132 | } |
133 | ||
134 | /* Set up for capture */ | |
135 | ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt); | |
136 | ||
137 | if (ret < 0) | |
13d8c7b4 | 138 | ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret); |
c15a6b00 | 139 | |
be494d19 | 140 | |
c15a6b00 JS |
141 | return ret; |
142 | } | |
143 | ||
144 | int cam_int_reqbufs(node_info_t *node) | |
145 | { | |
146 | struct v4l2_requestbuffers req; | |
147 | int ret; | |
148 | ||
149 | req.count = node->buffers; | |
150 | req.type = node->type; | |
151 | req.memory = node->memory; | |
152 | ||
153 | ret = exynos_v4l2_reqbufs(node->fd, &req); | |
154 | ||
155 | if (ret < 0) | |
13d8c7b4 | 156 | ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret); |
c15a6b00 JS |
157 | |
158 | return req.count; | |
159 | } | |
160 | ||
161 | int cam_int_qbuf(node_info_t *node, int index) | |
162 | { | |
163 | struct v4l2_buffer v4l2_buf; | |
164 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
165 | int i; | |
166 | int ret = 0; | |
167 | ||
168 | v4l2_buf.m.planes = planes; | |
169 | v4l2_buf.type = node->type; | |
170 | v4l2_buf.memory = node->memory; | |
171 | v4l2_buf.index = index; | |
172 | v4l2_buf.length = node->planes; | |
173 | ||
174 | for(i = 0; i < node->planes; i++){ | |
13d8c7b4 SK |
175 | v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]); |
176 | v4l2_buf.m.planes[i].length = (unsigned long)(node->buffer[index].size.extS[i]); | |
c15a6b00 JS |
177 | } |
178 | ||
179 | ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf); | |
180 | ||
181 | if (ret < 0) | |
13d8c7b4 | 182 | ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret); |
c15a6b00 JS |
183 | |
184 | return ret; | |
185 | } | |
186 | ||
187 | int cam_int_streamon(node_info_t *node) | |
188 | { | |
189 | enum v4l2_buf_type type = node->type; | |
190 | int ret; | |
191 | ||
be494d19 | 192 | |
c15a6b00 JS |
193 | ret = exynos_v4l2_streamon(node->fd, type); |
194 | ||
195 | if (ret < 0) | |
ad37861e | 196 | ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret); |
c15a6b00 JS |
197 | |
198 | ALOGV("On streaming I/O... ... fd(%d)", node->fd); | |
199 | ||
200 | return ret; | |
201 | } | |
202 | ||
13d8c7b4 SK |
203 | int cam_int_streamoff(node_info_t *node) |
204 | { | |
ad37861e SK |
205 | enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
206 | int ret; | |
13d8c7b4 | 207 | |
be494d19 | 208 | |
ad37861e SK |
209 | ALOGV("Off streaming I/O... fd(%d)", node->fd); |
210 | ret = exynos_v4l2_streamoff(node->fd, type); | |
13d8c7b4 SK |
211 | |
212 | if (ret < 0) | |
213 | ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret); | |
214 | ||
ad37861e | 215 | return ret; |
13d8c7b4 SK |
216 | } |
217 | ||
9dd63e1f SK |
218 | int isp_int_streamoff(node_info_t *node) |
219 | { | |
ad37861e SK |
220 | enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
221 | int ret; | |
9dd63e1f | 222 | |
ad37861e SK |
223 | ALOGV("Off streaming I/O... fd(%d)", node->fd); |
224 | ret = exynos_v4l2_streamoff(node->fd, type); | |
9dd63e1f SK |
225 | |
226 | if (ret < 0) | |
227 | ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret); | |
228 | ||
ad37861e | 229 | return ret; |
9dd63e1f SK |
230 | } |
231 | ||
c15a6b00 JS |
232 | int cam_int_dqbuf(node_info_t *node) |
233 | { | |
234 | struct v4l2_buffer v4l2_buf; | |
235 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
236 | int ret; | |
237 | ||
238 | v4l2_buf.type = node->type; | |
239 | v4l2_buf.memory = node->memory; | |
240 | v4l2_buf.m.planes = planes; | |
241 | v4l2_buf.length = node->planes; | |
242 | ||
243 | ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf); | |
244 | if (ret < 0) | |
13d8c7b4 | 245 | ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret); |
c15a6b00 JS |
246 | |
247 | return v4l2_buf.index; | |
248 | } | |
249 | ||
feb7df4c SK |
250 | int cam_int_dqbuf(node_info_t *node, int num_plane) |
251 | { | |
252 | struct v4l2_buffer v4l2_buf; | |
253 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
254 | int ret; | |
255 | ||
256 | v4l2_buf.type = node->type; | |
257 | v4l2_buf.memory = node->memory; | |
258 | v4l2_buf.m.planes = planes; | |
259 | v4l2_buf.length = num_plane; | |
260 | ||
261 | ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf); | |
262 | if (ret < 0) | |
263 | ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret); | |
264 | ||
265 | return v4l2_buf.index; | |
266 | } | |
267 | ||
c15a6b00 JS |
268 | int cam_int_s_input(node_info_t *node, int index) |
269 | { | |
270 | int ret; | |
13d8c7b4 | 271 | |
c15a6b00 JS |
272 | ret = exynos_v4l2_s_input(node->fd, index); |
273 | if (ret < 0) | |
13d8c7b4 | 274 | ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret); |
c15a6b00 JS |
275 | |
276 | return ret; | |
277 | } | |
278 | ||
279 | ||
280 | gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal; | |
281 | ||
282 | RequestManager::RequestManager(SignalDrivenThread* main_thread): | |
a3bcc37b | 283 | m_vdisEnable(false), |
2bdec060 SK |
284 | m_lastAeMode(0), |
285 | m_lastAaMode(0), | |
286 | m_lastAwbMode(0), | |
5c88d1f2 | 287 | m_vdisBubbleEn(false), |
2bdec060 | 288 | m_lastAeComp(0), |
041f38de | 289 | m_lastCompletedFrameCnt(-1) |
c15a6b00 JS |
290 | { |
291 | m_metadataConverter = new MetadataConverter; | |
292 | m_mainThread = main_thread; | |
2adfa429 | 293 | ResetEntry(); |
ad37861e | 294 | m_sensorPipelineSkipCnt = 0; |
c15a6b00 JS |
295 | return; |
296 | } | |
297 | ||
298 | RequestManager::~RequestManager() | |
299 | { | |
6d8e5b08 SK |
300 | ALOGV("%s", __FUNCTION__); |
301 | if (m_metadataConverter != NULL) { | |
302 | delete m_metadataConverter; | |
303 | m_metadataConverter = NULL; | |
304 | } | |
305 | ||
52f54308 | 306 | releaseSensorQ(); |
c15a6b00 JS |
307 | return; |
308 | } | |
309 | ||
2adfa429 JS |
310 | void RequestManager::ResetEntry() |
311 | { | |
312 | Mutex::Autolock lock(m_requestMutex); | |
1264ab16 | 313 | Mutex::Autolock lock2(m_numOfEntriesLock); |
2adfa429 JS |
314 | for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) { |
315 | memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t)); | |
316 | entries[i].internal_shot.shot.ctl.request.frameCount = -1; | |
317 | } | |
318 | m_numOfEntries = 0; | |
319 | m_entryInsertionIndex = -1; | |
320 | m_entryProcessingIndex = -1; | |
321 | m_entryFrameOutputIndex = -1; | |
322 | } | |
323 | ||
c15a6b00 JS |
324 | int RequestManager::GetNumEntries() |
325 | { | |
1264ab16 | 326 | Mutex::Autolock lock(m_numOfEntriesLock); |
c15a6b00 JS |
327 | return m_numOfEntries; |
328 | } | |
329 | ||
9dd63e1f SK |
330 | void RequestManager::SetDefaultParameters(int cropX) |
331 | { | |
332 | m_cropX = cropX; | |
333 | } | |
334 | ||
c15a6b00 JS |
335 | bool RequestManager::IsRequestQueueFull() |
336 | { | |
337 | Mutex::Autolock lock(m_requestMutex); | |
1264ab16 | 338 | Mutex::Autolock lock2(m_numOfEntriesLock); |
c15a6b00 JS |
339 | if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY) |
340 | return true; | |
341 | else | |
342 | return false; | |
343 | } | |
344 | ||
ca714238 | 345 | void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion) |
c15a6b00 | 346 | { |
13d8c7b4 SK |
347 | ALOGV("DEBUG(%s):", __FUNCTION__); |
348 | ||
c15a6b00 | 349 | Mutex::Autolock lock(m_requestMutex); |
1264ab16 | 350 | Mutex::Autolock lock2(m_numOfEntriesLock); |
13d8c7b4 | 351 | |
c15a6b00 | 352 | request_manager_entry * newEntry = NULL; |
9dd63e1f | 353 | int newInsertionIndex = GetNextIndex(m_entryInsertionIndex); |
ca714238 | 354 | ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries ); |
13d8c7b4 | 355 | |
c15a6b00 | 356 | |
c15a6b00 JS |
357 | newEntry = &(entries[newInsertionIndex]); |
358 | ||
359 | if (newEntry->status!=EMPTY) { | |
13d8c7b4 SK |
360 | ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__); |
361 | return; | |
c15a6b00 JS |
362 | } |
363 | newEntry->status = REGISTERED; | |
364 | newEntry->original_request = new_request; | |
ad37861e | 365 | memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext)); |
13d8c7b4 | 366 | m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot)); |
a85ec381 SK |
367 | newEntry->output_stream_count = 0; |
368 | if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP) | |
369 | newEntry->output_stream_count++; | |
370 | ||
371 | if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC) | |
372 | newEntry->output_stream_count++; | |
c15a6b00 JS |
373 | |
374 | m_numOfEntries++; | |
375 | m_entryInsertionIndex = newInsertionIndex; | |
376 | ||
13d8c7b4 | 377 | |
ca714238 SK |
378 | *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode); |
379 | afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0]; | |
380 | afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1]; | |
381 | afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2]; | |
382 | afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3]; | |
c15a6b00 | 383 | ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))", |
be494d19 | 384 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount); |
c15a6b00 JS |
385 | } |
386 | ||
387 | void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request) | |
388 | { | |
13d8c7b4 | 389 | ALOGV("DEBUG(%s):", __FUNCTION__); |
ad37861e SK |
390 | int frame_index; |
391 | request_manager_entry * currentEntry; | |
c15a6b00 | 392 | |
ad37861e | 393 | Mutex::Autolock lock(m_requestMutex); |
1264ab16 | 394 | Mutex::Autolock lock2(m_numOfEntriesLock); |
13d8c7b4 | 395 | |
f9a06609 | 396 | frame_index = GetCompletedIndex(); |
ad37861e | 397 | currentEntry = &(entries[frame_index]); |
f9a06609 | 398 | if (currentEntry->status != COMPLETED) { |
5c88d1f2 C |
399 | CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__, |
400 | m_entryProcessingIndex, frame_index,(int)(currentEntry->status)); | |
13d8c7b4 | 401 | return; |
c15a6b00 | 402 | } |
13d8c7b4 SK |
403 | if (deregistered_request) *deregistered_request = currentEntry->original_request; |
404 | ||
041f38de SK |
405 | m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount; |
406 | ||
c15a6b00 JS |
407 | currentEntry->status = EMPTY; |
408 | currentEntry->original_request = NULL; | |
be494d19 SK |
409 | memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext)); |
410 | currentEntry->internal_shot.shot.ctl.request.frameCount = -1; | |
c15a6b00 JS |
411 | currentEntry->output_stream_count = 0; |
412 | m_numOfEntries--; | |
413 | ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)", | |
414 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); | |
13d8c7b4 | 415 | |
041f38de | 416 | CheckCompleted(GetNextIndex(frame_index)); |
c15a6b00 | 417 | return; |
c15a6b00 JS |
418 | } |
419 | ||
13d8c7b4 | 420 | bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size, |
0f26b20f | 421 | camera_metadata_t ** prepared_frame, int afState) |
c15a6b00 | 422 | { |
13d8c7b4 | 423 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
424 | Mutex::Autolock lock(m_requestMutex); |
425 | status_t res = NO_ERROR; | |
f9a06609 | 426 | int tempFrameOutputIndex = GetCompletedIndex(); |
13d8c7b4 SK |
427 | request_manager_entry * currentEntry = &(entries[tempFrameOutputIndex]); |
428 | ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__, | |
429 | m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex); | |
430 | ||
f9a06609 | 431 | if (currentEntry->status != COMPLETED) { |
ad37861e | 432 | ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status)); |
13d8c7b4 SK |
433 | |
434 | return false; | |
435 | } | |
436 | m_entryFrameOutputIndex = tempFrameOutputIndex; | |
07b3ad1c | 437 | m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated |
0f26b20f | 438 | add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1); |
13d8c7b4 | 439 | res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot), |
c15a6b00 JS |
440 | m_tempFrameMetadata); |
441 | if (res!=NO_ERROR) { | |
13d8c7b4 SK |
442 | ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res); |
443 | return false; | |
c15a6b00 JS |
444 | } |
445 | *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata); | |
446 | *frame_size = get_camera_metadata_size(m_tempFrameMetadata); | |
447 | *prepared_frame = m_tempFrameMetadata; | |
5506cebf SK |
448 | ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex, |
449 | currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp); | |
9dd63e1f | 450 | // Dump(); |
13d8c7b4 | 451 | return true; |
c15a6b00 JS |
452 | } |
453 | ||
ca714238 | 454 | int RequestManager::MarkProcessingRequest(ExynosBuffer* buf) |
c15a6b00 | 455 | { |
13d8c7b4 | 456 | struct camera2_shot_ext * shot_ext; |
b56dcc00 | 457 | struct camera2_shot_ext * request_shot; |
13d8c7b4 | 458 | int targetStreamIndex = 0; |
ad37861e | 459 | request_manager_entry * newEntry = NULL; |
0f26b20f | 460 | static int count = 0; |
13d8c7b4 | 461 | |
52f54308 | 462 | Mutex::Autolock lock(m_requestMutex); |
1264ab16 | 463 | Mutex::Autolock lock2(m_numOfEntriesLock); |
13d8c7b4 | 464 | if (m_numOfEntries == 0) { |
4aa4d739 | 465 | CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__); |
13d8c7b4 SK |
466 | return -1; |
467 | } | |
468 | ||
469 | if ((m_entryProcessingIndex == m_entryInsertionIndex) | |
be494d19 | 470 | && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) { |
da7ca692 | 471 | ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)", |
13d8c7b4 SK |
472 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); |
473 | return -1; | |
474 | } | |
c15a6b00 | 475 | |
9dd63e1f | 476 | int newProcessingIndex = GetNextIndex(m_entryProcessingIndex); |
ad37861e | 477 | ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex); |
c15a6b00 | 478 | |
c15a6b00 | 479 | newEntry = &(entries[newProcessingIndex]); |
ad37861e | 480 | request_shot = &(newEntry->internal_shot); |
be494d19 | 481 | if (newEntry->status != REGISTERED) { |
5c88d1f2 C |
482 | CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status); |
483 | for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) { | |
484 | CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount); | |
485 | } | |
13d8c7b4 | 486 | return -1; |
c15a6b00 | 487 | } |
ad37861e | 488 | |
be494d19 | 489 | newEntry->status = REQUESTED; |
c15a6b00 | 490 | |
ad37861e | 491 | shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1]; |
13d8c7b4 | 492 | |
ad37861e SK |
493 | memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext)); |
494 | shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount; | |
13d8c7b4 | 495 | shot_ext->request_sensor = 1; |
9dd63e1f SK |
496 | shot_ext->dis_bypass = 1; |
497 | shot_ext->dnr_bypass = 1; | |
ad37861e SK |
498 | shot_ext->fd_bypass = 1; |
499 | shot_ext->setfile = 0; | |
500 | ||
5506cebf SK |
501 | targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0]; |
502 | shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex; | |
503 | if (targetStreamIndex & MASK_OUTPUT_SCP) | |
504 | shot_ext->request_scp = 1; | |
13d8c7b4 | 505 | |
5506cebf SK |
506 | if (targetStreamIndex & MASK_OUTPUT_SCC) |
507 | shot_ext->request_scc = 1; | |
508 | ||
509 | if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF) | |
510 | shot_ext->fd_bypass = 0; | |
13d8c7b4 | 511 | |
0f26b20f SK |
512 | if (count == 0){ |
513 | shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO; | |
514 | } else | |
515 | shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE; | |
516 | ||
517 | count++; | |
ad37861e SK |
518 | shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL; |
519 | shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL; | |
520 | shot_ext->shot.magicNumber = 0x23456789; | |
521 | shot_ext->shot.ctl.sensor.exposureTime = 0; | |
522 | shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000; | |
523 | shot_ext->shot.ctl.sensor.sensitivity = 0; | |
524 | ||
e4657e32 SK |
525 | |
526 | shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0]; | |
527 | shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1]; | |
528 | shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2]; | |
13d8c7b4 | 529 | |
ad37861e SK |
530 | m_entryProcessingIndex = newProcessingIndex; |
531 | return newProcessingIndex; | |
c15a6b00 JS |
532 | } |
533 | ||
2adfa429 | 534 | void RequestManager::NotifyStreamOutput(int frameCnt) |
c15a6b00 | 535 | { |
9dd63e1f SK |
536 | int index; |
537 | ||
2adfa429 JS |
538 | Mutex::Autolock lock(m_requestMutex); |
539 | ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt); | |
9dd63e1f SK |
540 | |
541 | index = FindEntryIndexByFrameCnt(frameCnt); | |
542 | if (index == -1) { | |
543 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
544 | return; | |
545 | } | |
2adfa429 | 546 | ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt, entries[index].output_stream_count); |
9dd63e1f | 547 | |
be494d19 SK |
548 | entries[index].output_stream_count--; //TODO : match stream id also |
549 | CheckCompleted(index); | |
13d8c7b4 SK |
550 | } |
551 | ||
552 | void RequestManager::CheckCompleted(int index) | |
553 | { | |
041f38de SK |
554 | if ((entries[index].status == METADONE || entries[index].status == COMPLETED) |
555 | && (entries[index].output_stream_count <= 0)){ | |
556 | ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__, | |
2adfa429 | 557 | index, entries[index].internal_shot.shot.ctl.request.frameCount ); |
041f38de SK |
558 | entries[index].status = COMPLETED; |
559 | if (m_lastCompletedFrameCnt + 1 == entries[index].internal_shot.shot.ctl.request.frameCount) | |
560 | m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE); | |
2adfa429 | 561 | } |
c15a6b00 | 562 | } |
9dd63e1f | 563 | |
f9a06609 | 564 | int RequestManager::GetCompletedIndex() |
ad37861e | 565 | { |
041f38de | 566 | return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1); |
ad37861e SK |
567 | } |
568 | ||
52f54308 SK |
569 | void RequestManager::pushSensorQ(int index) |
570 | { | |
571 | Mutex::Autolock lock(m_requestMutex); | |
572 | m_sensorQ.push_back(index); | |
573 | } | |
574 | ||
575 | int RequestManager::popSensorQ() | |
576 | { | |
577 | List<int>::iterator sensor_token; | |
578 | int index; | |
579 | ||
580 | Mutex::Autolock lock(m_requestMutex); | |
581 | ||
582 | if(m_sensorQ.size() == 0) | |
583 | return -1; | |
584 | ||
585 | sensor_token = m_sensorQ.begin()++; | |
586 | index = *sensor_token; | |
587 | m_sensorQ.erase(sensor_token); | |
588 | ||
589 | return (index); | |
590 | } | |
591 | ||
592 | void RequestManager::releaseSensorQ() | |
593 | { | |
594 | List<int>::iterator r; | |
595 | ||
596 | Mutex::Autolock lock(m_requestMutex); | |
0eb27a9d | 597 | ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size()); |
52f54308 SK |
598 | |
599 | while(m_sensorQ.size() > 0){ | |
600 | r = m_sensorQ.begin()++; | |
601 | m_sensorQ.erase(r); | |
602 | } | |
603 | return; | |
604 | } | |
605 | ||
ad37861e | 606 | void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext) |
13d8c7b4 | 607 | { |
9dd63e1f | 608 | int index; |
b56dcc00 SK |
609 | struct camera2_shot_ext * request_shot; |
610 | nsecs_t timeStamp; | |
ad37861e | 611 | int i; |
13d8c7b4 | 612 | |
52f54308 | 613 | Mutex::Autolock lock(m_requestMutex); |
ad37861e | 614 | ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
9dd63e1f | 615 | |
ad37861e SK |
616 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { |
617 | if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount) | |
2adfa429 | 618 | && (entries[i].status == CAPTURED)){ |
f9a06609 | 619 | entries[i].status = METADONE; |
ad37861e | 620 | break; |
2adfa429 | 621 | } |
ad37861e SK |
622 | } |
623 | ||
624 | if (i == NUM_MAX_REQUEST_MGR_ENTRY){ | |
625 | ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); | |
9dd63e1f | 626 | return; |
13d8c7b4 | 627 | } |
9dd63e1f | 628 | |
ad37861e | 629 | request_manager_entry * newEntry = &(entries[i]); |
b56dcc00 | 630 | request_shot = &(newEntry->internal_shot); |
9dd63e1f | 631 | |
b56dcc00 | 632 | timeStamp = request_shot->shot.dm.sensor.timeStamp; |
ad37861e | 633 | memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm)); |
b56dcc00 | 634 | request_shot->shot.dm.sensor.timeStamp = timeStamp; |
5506cebf | 635 | m_lastTimeStamp = timeStamp; |
ad37861e | 636 | CheckCompleted(i); |
13d8c7b4 SK |
637 | } |
638 | ||
53f62ad9 | 639 | void RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info) |
13d8c7b4 | 640 | { |
9dd63e1f | 641 | int index, targetStreamIndex; |
b56dcc00 | 642 | struct camera2_shot_ext * request_shot; |
9dd63e1f SK |
643 | |
644 | ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt); | |
645 | if (frameCnt < 0) | |
13d8c7b4 | 646 | return; |
9dd63e1f SK |
647 | |
648 | index = FindEntryIndexByFrameCnt(frameCnt); | |
649 | if (index == -1) { | |
650 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
651 | return; | |
652 | } | |
653 | ||
13d8c7b4 | 654 | request_manager_entry * newEntry = &(entries[index]); |
ad37861e | 655 | request_shot = &(newEntry->internal_shot); |
2bdec060 | 656 | memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl)); |
2adfa429 | 657 | shot_ext->shot.ctl.request.frameCount = frameCnt; |
13d8c7b4 | 658 | shot_ext->request_sensor = 1; |
ad37861e SK |
659 | shot_ext->dis_bypass = 1; |
660 | shot_ext->dnr_bypass = 1; | |
661 | shot_ext->fd_bypass = 1; | |
10e122bd | 662 | shot_ext->drc_bypass = 1; |
ad37861e SK |
663 | shot_ext->setfile = 0; |
664 | ||
13d8c7b4 SK |
665 | shot_ext->request_scc = 0; |
666 | shot_ext->request_scp = 0; | |
ad37861e | 667 | |
5506cebf SK |
668 | shot_ext->isReprocessing = request_shot->isReprocessing; |
669 | shot_ext->reprocessInput = request_shot->reprocessInput; | |
9dd63e1f | 670 | shot_ext->shot.ctl.request.outputStreams[0] = 0; |
9dd63e1f | 671 | |
48728d49 SK |
672 | shot_ext->awb_mode_dm = request_shot->awb_mode_dm; |
673 | ||
e4657e32 SK |
674 | shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0]; |
675 | shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1]; | |
676 | shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2]; | |
677 | ||
53f62ad9 YJ |
678 | // mapping flash UI mode from aeMode |
679 | if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) { | |
4a9565ae | 680 | if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW) |
73f5ad60 | 681 | ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode; |
a0648fc7 | 682 | else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD) |
73f5ad60 | 683 | ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode; |
53f62ad9 YJ |
684 | request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON; |
685 | } | |
53f62ad9 YJ |
686 | |
687 | // Apply ae/awb lock or unlock | |
e117f756 YJ |
688 | if (request_shot->ae_lock == AEMODE_LOCK_ON) |
689 | request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED; | |
690 | if (request_shot->awb_lock == AWBMODE_LOCK_ON) | |
691 | request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED; | |
692 | ||
2bdec060 SK |
693 | if (m_lastAaMode == request_shot->shot.ctl.aa.mode) { |
694 | shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0); | |
695 | } | |
696 | else { | |
697 | shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode; | |
698 | m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode); | |
699 | } | |
700 | if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) { | |
701 | shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0); | |
702 | } | |
703 | else { | |
704 | shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode; | |
705 | m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode); | |
706 | } | |
707 | if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) { | |
708 | shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0); | |
709 | } | |
710 | else { | |
711 | shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode; | |
712 | m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode); | |
713 | } | |
714 | if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) { | |
715 | shot_ext->shot.ctl.aa.aeExpCompensation = 0; | |
716 | } | |
717 | else { | |
718 | shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation; | |
719 | m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation); | |
720 | } | |
ef6f83ca | 721 | |
a3bcc37b | 722 | if (request_shot->shot.ctl.aa.videoStabilizationMode && m_vdisEnable) { |
5c88d1f2 C |
723 | m_vdisBubbleEn = true; |
724 | shot_ext->dis_bypass = 0; | |
7ef20f42 | 725 | shot_ext->dnr_bypass = 0; |
5c88d1f2 C |
726 | } else { |
727 | m_vdisBubbleEn = false; | |
728 | shot_ext->dis_bypass = 1; | |
7ef20f42 | 729 | shot_ext->dnr_bypass = 1; |
5c88d1f2 | 730 | } |
5c88d1f2 | 731 | |
ef6f83ca SK |
732 | shot_ext->shot.ctl.aa.afTrigger = 0; |
733 | ||
5506cebf SK |
734 | targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0]; |
735 | shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex; | |
736 | if (targetStreamIndex & MASK_OUTPUT_SCP) | |
737 | shot_ext->request_scp = 1; | |
13d8c7b4 | 738 | |
5506cebf SK |
739 | if (targetStreamIndex & MASK_OUTPUT_SCC) |
740 | shot_ext->request_scc = 1; | |
741 | ||
742 | if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF) | |
743 | shot_ext->fd_bypass = 0; | |
744 | ||
6ba9ef65 SK |
745 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0]; |
746 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1]; | |
5506cebf SK |
747 | |
748 | ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__, | |
749 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
750 | (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode), | |
751 | (int)(shot_ext->shot.ctl.aa.afMode)); | |
13d8c7b4 SK |
752 | } |
753 | ||
5c88d1f2 C |
754 | bool RequestManager::IsVdisEnable(void) |
755 | { | |
756 | return m_vdisBubbleEn; | |
757 | } | |
5c88d1f2 | 758 | |
9dd63e1f SK |
759 | int RequestManager::FindEntryIndexByFrameCnt(int frameCnt) |
760 | { | |
761 | for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { | |
be494d19 | 762 | if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt) |
9dd63e1f SK |
763 | return i; |
764 | } | |
765 | return -1; | |
766 | } | |
767 | ||
768 | void RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime) | |
13d8c7b4 | 769 | { |
9dd63e1f SK |
770 | int index = FindEntryIndexByFrameCnt(frameCnt); |
771 | if (index == -1) { | |
772 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
773 | return; | |
774 | } | |
775 | ||
13d8c7b4 | 776 | request_manager_entry * currentEntry = &(entries[index]); |
a8be0011 SK |
777 | if (currentEntry->internal_shot.isReprocessing == 1) { |
778 | ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__, | |
be494d19 | 779 | index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp); |
a8be0011 SK |
780 | } else { |
781 | currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime); | |
782 | ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__, | |
783 | index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp); | |
784 | } | |
13d8c7b4 SK |
785 | } |
786 | ||
5506cebf SK |
787 | |
788 | nsecs_t RequestManager::GetTimestampByFrameCnt(int frameCnt) | |
789 | { | |
790 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
791 | if (index == -1) { | |
792 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp); | |
793 | return m_lastTimeStamp; | |
794 | } | |
795 | else | |
796 | return GetTimestamp(index); | |
797 | } | |
798 | ||
799 | nsecs_t RequestManager::GetTimestamp(int index) | |
13d8c7b4 | 800 | { |
5f643a75 | 801 | Mutex::Autolock lock(m_requestMutex); |
54f4971e SK |
802 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { |
803 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
9dd63e1f SK |
804 | return 0; |
805 | } | |
806 | ||
13d8c7b4 | 807 | request_manager_entry * currentEntry = &(entries[index]); |
5f643a75 | 808 | nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp; |
5506cebf SK |
809 | if (frameTime == 0) { |
810 | ALOGV("DEBUG(%s): timestamp null, returning saved value", __FUNCTION__); | |
811 | frameTime = m_lastTimeStamp; | |
812 | } | |
9dd63e1f | 813 | ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime); |
13d8c7b4 SK |
814 | return frameTime; |
815 | } | |
816 | ||
2f4d175d SK |
817 | uint8_t RequestManager::GetOutputStreamByFrameCnt(int frameCnt) |
818 | { | |
819 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
820 | if (index == -1) { | |
821 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
822 | return 0; | |
823 | } | |
824 | else | |
825 | return GetOutputStream(index); | |
826 | } | |
827 | ||
828 | uint8_t RequestManager::GetOutputStream(int index) | |
829 | { | |
830 | Mutex::Autolock lock(m_requestMutex); | |
831 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { | |
832 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
833 | return 0; | |
834 | } | |
835 | ||
836 | request_manager_entry * currentEntry = &(entries[index]); | |
837 | return currentEntry->internal_shot.shot.ctl.request.outputStreams[0]; | |
838 | } | |
839 | ||
69d1e6e9 SK |
840 | camera2_shot_ext * RequestManager::GetInternalShotExtByFrameCnt(int frameCnt) |
841 | { | |
842 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
843 | if (index == -1) { | |
844 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
845 | return 0; | |
846 | } | |
847 | else | |
848 | return GetInternalShotExt(index); | |
849 | } | |
850 | ||
851 | camera2_shot_ext * RequestManager::GetInternalShotExt(int index) | |
852 | { | |
853 | Mutex::Autolock lock(m_requestMutex); | |
854 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { | |
855 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
856 | return 0; | |
857 | } | |
858 | ||
859 | request_manager_entry * currentEntry = &(entries[index]); | |
860 | return ¤tEntry->internal_shot; | |
861 | } | |
862 | ||
9dd63e1f SK |
863 | int RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext) |
864 | { | |
041f38de | 865 | Mutex::Autolock lock(m_requestMutex); |
ad37861e SK |
866 | int i; |
867 | ||
be494d19 | 868 | if (m_numOfEntries == 0) { |
5c88d1f2 | 869 | CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__); |
be494d19 SK |
870 | return -1; |
871 | } | |
ad37861e | 872 | |
be494d19 | 873 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { |
ad37861e | 874 | if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount) |
be494d19 | 875 | continue; |
ad37861e SK |
876 | |
877 | if (entries[i].status == REQUESTED) { | |
878 | entries[i].status = CAPTURED; | |
879 | return entries[i].internal_shot.shot.ctl.request.frameCount; | |
be494d19 | 880 | } |
5c88d1f2 | 881 | CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status); |
ad37861e | 882 | |
be494d19 | 883 | } |
5c88d1f2 | 884 | CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
ad37861e | 885 | |
be494d19 | 886 | return -1; |
9dd63e1f | 887 | } |
13d8c7b4 | 888 | |
b5237e6b SK |
889 | void RequestManager::SetInitialSkip(int count) |
890 | { | |
891 | ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt); | |
892 | if (count > m_sensorPipelineSkipCnt) | |
893 | m_sensorPipelineSkipCnt = count; | |
894 | } | |
895 | ||
ad37861e SK |
896 | int RequestManager::GetSkipCnt() |
897 | { | |
898 | ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt); | |
899 | if (m_sensorPipelineSkipCnt == 0) | |
900 | return m_sensorPipelineSkipCnt; | |
901 | else | |
902 | return --m_sensorPipelineSkipCnt; | |
903 | } | |
904 | ||
13d8c7b4 SK |
905 | void RequestManager::Dump(void) |
906 | { | |
13d8c7b4 SK |
907 | int i = 0; |
908 | request_manager_entry * currentEntry; | |
1264ab16 | 909 | Mutex::Autolock lock(m_numOfEntriesLock); |
ad37861e | 910 | ALOGD("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)", |
13d8c7b4 SK |
911 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); |
912 | ||
913 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { | |
914 | currentEntry = &(entries[i]); | |
5506cebf | 915 | ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i, |
be494d19 | 916 | currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount, |
ad37861e | 917 | currentEntry->output_stream_count, |
5506cebf | 918 | currentEntry->internal_shot.shot.ctl.request.outputStreams[0]); |
13d8c7b4 SK |
919 | } |
920 | } | |
c15a6b00 | 921 | |
9dd63e1f SK |
922 | int RequestManager::GetNextIndex(int index) |
923 | { | |
924 | index++; | |
925 | if (index >= NUM_MAX_REQUEST_MGR_ENTRY) | |
926 | index = 0; | |
927 | ||
928 | return index; | |
929 | } | |
930 | ||
f9a06609 SK |
931 | int RequestManager::GetPrevIndex(int index) |
932 | { | |
933 | index--; | |
934 | if (index < 0) | |
935 | index = NUM_MAX_REQUEST_MGR_ENTRY-1; | |
936 | ||
937 | return index; | |
938 | } | |
939 | ||
6044e509 | 940 | ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid): |
c15a6b00 JS |
941 | m_requestQueueOps(NULL), |
942 | m_frameQueueOps(NULL), | |
943 | m_callbackCookie(NULL), | |
944 | m_numOfRemainingReqInSvc(0), | |
945 | m_isRequestQueuePending(false), | |
13d8c7b4 | 946 | m_isRequestQueueNull(true), |
ad37861e | 947 | m_isIspStarted(false), |
13d8c7b4 | 948 | m_ionCameraClient(0), |
308291de | 949 | m_zoomRatio(1), |
9dd63e1f SK |
950 | m_scp_closing(false), |
951 | m_scp_closed(false), | |
0f26b20f SK |
952 | m_afState(HAL_AFSTATE_INACTIVE), |
953 | m_afMode(NO_CHANGE), | |
954 | m_afMode2(NO_CHANGE), | |
5c88d1f2 C |
955 | m_vdisBubbleCnt(0), |
956 | m_vdisDupFrame(0), | |
0f26b20f SK |
957 | m_IsAfModeUpdateRequired(false), |
958 | m_IsAfTriggerRequired(false), | |
959 | m_IsAfLockRequired(false), | |
483728e7 | 960 | m_serviceAfState(ANDROID_CONTROL_AF_STATE_INACTIVE), |
90e439c1 | 961 | m_sccLocalBufferValid(false), |
6d8e5b08 | 962 | m_wideAspect(false), |
b55ed664 SK |
963 | m_scpOutputSignalCnt(0), |
964 | m_scpOutputImageCnt(0), | |
0f26b20f | 965 | m_afTriggerId(0), |
8e2c2fdb SK |
966 | m_afPendingTriggerId(0), |
967 | m_afModeWaitingCnt(0), | |
32cf9401 | 968 | m_jpegEncodingCount(0), |
f9a06609 | 969 | m_scpForceSuspended(false), |
9dd63e1f | 970 | m_halDevice(dev), |
a15b4e3f | 971 | m_nightCaptureCnt(0), |
2f4d175d | 972 | m_nightCaptureFrameCnt(0), |
572470e2 | 973 | m_lastSceneMode(0), |
2d5e6ec2 SK |
974 | m_cameraId(cameraId), |
975 | m_thumbNailW(160), | |
976 | m_thumbNailH(120) | |
13d8c7b4 | 977 | { |
ed4ad5fe | 978 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 979 | int ret = 0; |
6044e509 | 980 | int res = 0; |
c15a6b00 | 981 | |
13d8c7b4 | 982 | m_exynosPictureCSC = NULL; |
9dd63e1f | 983 | m_exynosVideoCSC = NULL; |
13d8c7b4 | 984 | |
c15a6b00 JS |
985 | if (!m_grallocHal) { |
986 | ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal); | |
987 | if (ret) | |
13d8c7b4 SK |
988 | ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__); |
989 | } | |
c15a6b00 | 990 | |
daa1fcd6 | 991 | m_camera2 = camera; |
c15a6b00 JS |
992 | m_ionCameraClient = createIonClient(m_ionCameraClient); |
993 | if(m_ionCameraClient == 0) | |
13d8c7b4 | 994 | ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__); |
c15a6b00 | 995 | |
9dd63e1f SK |
996 | |
997 | m_BayerManager = new BayerBufManager(); | |
c15a6b00 | 998 | m_mainThread = new MainThread(this); |
52f54308 | 999 | m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get())); |
6044e509 SK |
1000 | *openInvalid = InitializeISPChain(); |
1001 | if (*openInvalid < 0) { | |
ed4ad5fe | 1002 | ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__); |
6044e509 SK |
1003 | // clean process |
1004 | // 1. close video nodes | |
1005 | // SCP | |
5506cebf | 1006 | res = exynos_v4l2_close(m_camera_info.scp.fd); |
6044e509 SK |
1007 | if (res != NO_ERROR ) { |
1008 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1009 | } | |
1010 | // SCC | |
1011 | res = exynos_v4l2_close(m_camera_info.capture.fd); | |
1012 | if (res != NO_ERROR ) { | |
1013 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1014 | } | |
1015 | // Sensor | |
1016 | res = exynos_v4l2_close(m_camera_info.sensor.fd); | |
1017 | if (res != NO_ERROR ) { | |
1018 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1019 | } | |
1020 | // ISP | |
1021 | res = exynos_v4l2_close(m_camera_info.isp.fd); | |
1022 | if (res != NO_ERROR ) { | |
1023 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1024 | } | |
1025 | } else { | |
1026 | m_sensorThread = new SensorThread(this); | |
1027 | m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0); | |
053d38cf | 1028 | m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0); |
ed4ad5fe | 1029 | ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__); |
52f54308 | 1030 | |
5506cebf SK |
1031 | for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++) |
1032 | m_subStreams[i].type = SUBSTREAM_TYPE_NONE; | |
6044e509 SK |
1033 | CSC_METHOD cscMethod = CSC_METHOD_HW; |
1034 | m_exynosPictureCSC = csc_init(cscMethod); | |
1035 | if (m_exynosPictureCSC == NULL) | |
1036 | ALOGE("ERR(%s): csc_init() fail", __FUNCTION__); | |
1037 | csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM); | |
c3fb36ed | 1038 | csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER); |
6d8e5b08 | 1039 | |
6044e509 SK |
1040 | m_exynosVideoCSC = csc_init(cscMethod); |
1041 | if (m_exynosVideoCSC == NULL) | |
1042 | ALOGE("ERR(%s): csc_init() fail", __FUNCTION__); | |
1043 | csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM); | |
c3fb36ed | 1044 | csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER); |
9dd63e1f | 1045 | |
6044e509 | 1046 | m_setExifFixedAttribute(); |
9a710a45 YJ |
1047 | |
1048 | // contol information clear | |
1049 | // flash | |
1050 | m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON; | |
1051 | m_ctlInfo.flash.m_afFlashDoneFlg= false; | |
9a710a45 | 1052 | m_ctlInfo.flash.m_flashEnableFlg = false; |
9a710a45 YJ |
1053 | m_ctlInfo.flash.m_flashFrameCount = 0; |
1054 | m_ctlInfo.flash.m_flashCnt = 0; | |
1055 | m_ctlInfo.flash.m_flashTimeOut = 0; | |
caea49e6 YJ |
1056 | m_ctlInfo.flash.m_flashDecisionResult = false; |
1057 | m_ctlInfo.flash.m_flashTorchMode = false; | |
e117f756 YJ |
1058 | m_ctlInfo.flash.m_precaptureState = 0; |
1059 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
73f5ad60 YJ |
1060 | // ae |
1061 | m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE; | |
4a9565ae YJ |
1062 | // af |
1063 | m_ctlInfo.af.m_afTriggerTimeOut = 0; | |
275c9744 YJ |
1064 | // scene |
1065 | m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX; | |
6044e509 | 1066 | } |
ed4ad5fe | 1067 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
1068 | } |
1069 | ||
1070 | ExynosCameraHWInterface2::~ExynosCameraHWInterface2() | |
1071 | { | |
ed4ad5fe | 1072 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 1073 | this->release(); |
ed4ad5fe | 1074 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
1075 | } |
1076 | ||
1077 | void ExynosCameraHWInterface2::release() | |
1078 | { | |
13d8c7b4 | 1079 | int i, res; |
ed4ad5fe | 1080 | ALOGD("(HAL2::release): ENTER"); |
9dd63e1f | 1081 | |
ad37861e SK |
1082 | if (m_streamThreads[1] != NULL) { |
1083 | m_streamThreads[1]->release(); | |
1084 | m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE); | |
1085 | } | |
1086 | ||
1087 | if (m_streamThreads[0] != NULL) { | |
1088 | m_streamThreads[0]->release(); | |
1089 | m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE); | |
1090 | } | |
1091 | ||
13d8c7b4 SK |
1092 | if (m_sensorThread != NULL) { |
1093 | m_sensorThread->release(); | |
13d8c7b4 | 1094 | } |
c15a6b00 JS |
1095 | |
1096 | if (m_mainThread != NULL) { | |
13d8c7b4 | 1097 | m_mainThread->release(); |
13d8c7b4 SK |
1098 | } |
1099 | ||
13d8c7b4 SK |
1100 | if (m_exynosPictureCSC) |
1101 | csc_deinit(m_exynosPictureCSC); | |
1102 | m_exynosPictureCSC = NULL; | |
1103 | ||
9dd63e1f SK |
1104 | if (m_exynosVideoCSC) |
1105 | csc_deinit(m_exynosVideoCSC); | |
1106 | m_exynosVideoCSC = NULL; | |
1107 | ||
ad37861e | 1108 | if (m_streamThreads[1] != NULL) { |
0eb27a9d | 1109 | ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination"); |
ad37861e | 1110 | while (!m_streamThreads[1]->IsTerminated()) |
041f38de | 1111 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1112 | ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 1 termination"); |
ad37861e SK |
1113 | m_streamThreads[1] = NULL; |
1114 | } | |
1115 | ||
1116 | if (m_streamThreads[0] != NULL) { | |
0eb27a9d | 1117 | ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination"); |
ad37861e | 1118 | while (!m_streamThreads[0]->IsTerminated()) |
041f38de | 1119 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1120 | ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 0 termination"); |
ad37861e SK |
1121 | m_streamThreads[0] = NULL; |
1122 | } | |
1123 | ||
9dd63e1f | 1124 | if (m_sensorThread != NULL) { |
0eb27a9d | 1125 | ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination"); |
9dd63e1f | 1126 | while (!m_sensorThread->IsTerminated()) |
041f38de | 1127 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1128 | ALOGD("(HAL2::release): END Waiting for (indirect) sensor thread termination"); |
9dd63e1f SK |
1129 | m_sensorThread = NULL; |
1130 | } | |
1131 | ||
ad37861e | 1132 | if (m_mainThread != NULL) { |
0eb27a9d | 1133 | ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination"); |
9dd63e1f | 1134 | while (!m_mainThread->IsTerminated()) |
041f38de | 1135 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1136 | ALOGD("(HAL2::release): END Waiting for (indirect) main thread termination"); |
9dd63e1f SK |
1137 | m_mainThread = NULL; |
1138 | } | |
1139 | ||
6d8e5b08 SK |
1140 | if (m_requestManager != NULL) { |
1141 | delete m_requestManager; | |
1142 | m_requestManager = NULL; | |
1143 | } | |
1144 | ||
1145 | if (m_BayerManager != NULL) { | |
1146 | delete m_BayerManager; | |
1147 | m_BayerManager = NULL; | |
1148 | } | |
6d8e5b08 | 1149 | for (i = 0; i < NUM_BAYER_BUFFERS; i++) |
c15a6b00 JS |
1150 | freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes); |
1151 | ||
90e439c1 SK |
1152 | if (m_sccLocalBufferValid) { |
1153 | for (i = 0; i < NUM_SCC_BUFFERS; i++) | |
1154 | #ifdef ENABLE_FRAME_SYNC | |
1155 | freeCameraMemory(&m_sccLocalBuffer[i], 2); | |
1156 | #else | |
1157 | freeCameraMemory(&m_sccLocalBuffer[i], 1); | |
1158 | #endif | |
1159 | } | |
1160 | else { | |
1161 | for (i = 0; i < NUM_SCC_BUFFERS; i++) | |
1162 | freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes); | |
1163 | } | |
c15a6b00 | 1164 | |
9dd63e1f | 1165 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__); |
13d8c7b4 SK |
1166 | res = exynos_v4l2_close(m_camera_info.sensor.fd); |
1167 | if (res != NO_ERROR ) { | |
9dd63e1f | 1168 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1169 | } |
1170 | ||
9dd63e1f | 1171 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__); |
13d8c7b4 SK |
1172 | res = exynos_v4l2_close(m_camera_info.isp.fd); |
1173 | if (res != NO_ERROR ) { | |
9dd63e1f | 1174 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1175 | } |
1176 | ||
9dd63e1f | 1177 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__); |
13d8c7b4 SK |
1178 | res = exynos_v4l2_close(m_camera_info.capture.fd); |
1179 | if (res != NO_ERROR ) { | |
9dd63e1f | 1180 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1181 | } |
1182 | ||
9dd63e1f | 1183 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__); |
5506cebf | 1184 | res = exynos_v4l2_close(m_camera_info.scp.fd); |
13d8c7b4 | 1185 | if (res != NO_ERROR ) { |
9dd63e1f | 1186 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 | 1187 | } |
9dd63e1f | 1188 | ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__); |
c15a6b00 | 1189 | deleteIonClient(m_ionCameraClient); |
ad37861e | 1190 | |
ed4ad5fe | 1191 | ALOGD("(HAL2::release): EXIT"); |
ad37861e SK |
1192 | } |
1193 | ||
6044e509 | 1194 | int ExynosCameraHWInterface2::InitializeISPChain() |
ad37861e SK |
1195 | { |
1196 | char node_name[30]; | |
1197 | int fd = 0; | |
1198 | int i; | |
6044e509 | 1199 | int ret = 0; |
ad37861e SK |
1200 | |
1201 | /* Open Sensor */ | |
1202 | memset(&node_name, 0x00, sizeof(char[30])); | |
1203 | sprintf(node_name, "%s%d", NODE_PREFIX, 40); | |
1204 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1205 | ||
1206 | if (fd < 0) { | |
1207 | ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1208 | } | |
1209 | else { | |
1210 | ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1211 | } | |
1212 | m_camera_info.sensor.fd = fd; | |
1213 | ||
1214 | /* Open ISP */ | |
1215 | memset(&node_name, 0x00, sizeof(char[30])); | |
1216 | sprintf(node_name, "%s%d", NODE_PREFIX, 41); | |
1217 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1218 | ||
1219 | if (fd < 0) { | |
1220 | ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1221 | } | |
1222 | else { | |
1223 | ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1224 | } | |
1225 | m_camera_info.isp.fd = fd; | |
1226 | ||
1227 | /* Open ScalerC */ | |
1228 | memset(&node_name, 0x00, sizeof(char[30])); | |
1229 | sprintf(node_name, "%s%d", NODE_PREFIX, 42); | |
1230 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1231 | ||
1232 | if (fd < 0) { | |
1233 | ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1234 | } | |
1235 | else { | |
1236 | ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1237 | } | |
1238 | m_camera_info.capture.fd = fd; | |
1239 | ||
1240 | /* Open ScalerP */ | |
1241 | memset(&node_name, 0x00, sizeof(char[30])); | |
1242 | sprintf(node_name, "%s%d", NODE_PREFIX, 44); | |
1243 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1244 | if (fd < 0) { | |
1245 | ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1246 | } | |
1247 | else { | |
1248 | ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1249 | } | |
5506cebf | 1250 | m_camera_info.scp.fd = fd; |
ad37861e SK |
1251 | |
1252 | if(m_cameraId == 0) | |
1253 | m_camera_info.sensor_id = SENSOR_NAME_S5K4E5; | |
1254 | else | |
1255 | m_camera_info.sensor_id = SENSOR_NAME_S5K6A3; | |
1256 | ||
1257 | memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext)); | |
1258 | m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL; | |
1259 | m_camera_info.dummy_shot.shot.magicNumber = 0x23456789; | |
1260 | ||
1261 | m_camera_info.dummy_shot.dis_bypass = 1; | |
1262 | m_camera_info.dummy_shot.dnr_bypass = 1; | |
1263 | m_camera_info.dummy_shot.fd_bypass = 1; | |
1264 | ||
1265 | /*sensor setting*/ | |
1266 | m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0; | |
1267 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0; | |
1268 | m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0; | |
1269 | ||
1270 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0; | |
1271 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0; | |
1272 | ||
1273 | /*request setting*/ | |
1274 | m_camera_info.dummy_shot.request_sensor = 1; | |
1275 | m_camera_info.dummy_shot.request_scc = 0; | |
1276 | m_camera_info.dummy_shot.request_scp = 0; | |
1277 | m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0; | |
ad37861e SK |
1278 | |
1279 | m_camera_info.sensor.width = m_camera2->getSensorRawW(); | |
1280 | m_camera_info.sensor.height = m_camera2->getSensorRawH(); | |
1281 | ||
1282 | m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16; | |
1283 | m_camera_info.sensor.planes = 2; | |
1284 | m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS; | |
1285 | m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1286 | m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF; | |
ad37861e SK |
1287 | |
1288 | for(i = 0; i < m_camera_info.sensor.buffers; i++){ | |
5c664f4c | 1289 | int res; |
ad37861e SK |
1290 | initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes); |
1291 | m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2; | |
1292 | m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value | |
5c664f4c AR |
1293 | res = allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1); |
1294 | if (res) { | |
1295 | ALOGE("ERROR(%s): failed to allocateCameraMemory for sensor buffer %d", __FUNCTION__, i); | |
1296 | // Free allocated sensor buffers | |
1297 | for (int j = 0; j < i; j++) { | |
1298 | freeCameraMemory(&m_camera_info.sensor.buffer[j], m_camera_info.sensor.planes); | |
1299 | } | |
1300 | return false; | |
1301 | } | |
ad37861e SK |
1302 | } |
1303 | ||
1304 | m_camera_info.isp.width = m_camera_info.sensor.width; | |
1305 | m_camera_info.isp.height = m_camera_info.sensor.height; | |
1306 | m_camera_info.isp.format = m_camera_info.sensor.format; | |
1307 | m_camera_info.isp.planes = m_camera_info.sensor.planes; | |
1308 | m_camera_info.isp.buffers = m_camera_info.sensor.buffers; | |
1309 | m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1310 | m_camera_info.isp.memory = V4L2_MEMORY_DMABUF; | |
ad37861e SK |
1311 | |
1312 | for(i = 0; i < m_camera_info.isp.buffers; i++){ | |
1313 | initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes); | |
1314 | m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0]; | |
1315 | m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1]; | |
1316 | m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0]; | |
1317 | m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1]; | |
1318 | m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0]; | |
1319 | m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1]; | |
1320 | }; | |
1321 | ||
1322 | /* init ISP */ | |
6044e509 SK |
1323 | ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id); |
1324 | if (ret < 0) { | |
1325 | ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ", __FUNCTION__, m_camera_info.sensor_id); | |
1326 | return false; | |
1327 | } | |
ad37861e SK |
1328 | cam_int_s_fmt(&(m_camera_info.isp)); |
1329 | ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__); | |
1330 | cam_int_reqbufs(&(m_camera_info.isp)); | |
1331 | ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__); | |
1332 | ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__); | |
1333 | ||
1334 | /* init Sensor */ | |
1335 | cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id); | |
1336 | ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__); | |
1337 | if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) { | |
1338 | ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__); | |
1339 | } | |
1340 | ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__); | |
1341 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
1342 | ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__); | |
1343 | for (i = 0; i < m_camera_info.sensor.buffers; i++) { | |
1344 | ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i); | |
ad37861e SK |
1345 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1 |
1346 | m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1; | |
52f54308 SK |
1347 | memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot), |
1348 | sizeof(struct camera2_shot_ext)); | |
ad37861e | 1349 | } |
52f54308 SK |
1350 | |
1351 | for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++) | |
1352 | cam_int_qbuf(&(m_camera_info.sensor), i); | |
1353 | ||
1354 | for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++) | |
1355 | m_requestManager->pushSensorQ(i); | |
1356 | ||
5506cebf | 1357 | ALOGV("== stream_on :: sensor"); |
ad37861e | 1358 | cam_int_streamon(&(m_camera_info.sensor)); |
5506cebf | 1359 | m_camera_info.sensor.status = true; |
ad37861e SK |
1360 | |
1361 | /* init Capture */ | |
1362 | m_camera_info.capture.width = m_camera2->getSensorW(); | |
1363 | m_camera_info.capture.height = m_camera2->getSensorH(); | |
1364 | m_camera_info.capture.format = V4L2_PIX_FMT_YUYV; | |
feb7df4c SK |
1365 | #ifdef ENABLE_FRAME_SYNC |
1366 | m_camera_info.capture.planes = 2; | |
1367 | #else | |
ad37861e | 1368 | m_camera_info.capture.planes = 1; |
feb7df4c | 1369 | #endif |
ac8c2060 | 1370 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
ad37861e SK |
1371 | m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
1372 | m_camera_info.capture.memory = V4L2_MEMORY_DMABUF; | |
ad37861e | 1373 | |
5506cebf SK |
1374 | m_camera_info.capture.status = false; |
1375 | ||
1376 | return true; | |
1377 | } | |
1378 | ||
1379 | void ExynosCameraHWInterface2::StartSCCThread(bool threadExists) | |
1380 | { | |
1381 | ALOGV("(%s)", __FUNCTION__); | |
1382 | StreamThread *AllocatedStream; | |
1383 | stream_parameters_t newParameters; | |
1384 | uint32_t format_actual; | |
5506cebf SK |
1385 | |
1386 | ||
1387 | if (!threadExists) { | |
1388 | m_streamThreads[1] = new StreamThread(this, 1); | |
1389 | } | |
1390 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); | |
b8d41ae2 | 1391 | if (!threadExists) { |
053d38cf | 1392 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf | 1393 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
b8d41ae2 SK |
1394 | AllocatedStream->m_numRegisteredStream = 1; |
1395 | } | |
5506cebf SK |
1396 | AllocatedStream->m_index = 1; |
1397 | ||
1398 | format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1399 | ||
1400 | newParameters.width = m_camera2->getSensorW(); | |
1401 | newParameters.height = m_camera2->getSensorH(); | |
1402 | newParameters.format = format_actual; | |
1403 | newParameters.streamOps = NULL; | |
ac8c2060 | 1404 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; |
feb7df4c | 1405 | #ifdef ENABLE_FRAME_SYNC |
5506cebf | 1406 | newParameters.planes = 2; |
2adfa429 | 1407 | #else |
5506cebf | 1408 | newParameters.planes = 1; |
2adfa429 | 1409 | #endif |
ad37861e | 1410 | |
5506cebf SK |
1411 | newParameters.numSvcBufsInHal = 0; |
1412 | ||
1413 | newParameters.node = &m_camera_info.capture; | |
1414 | ||
1415 | AllocatedStream->streamType = STREAM_TYPE_INDIRECT; | |
5506cebf SK |
1416 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); |
1417 | ||
90e439c1 SK |
1418 | if (!threadExists) { |
1419 | if (!m_sccLocalBufferValid) { | |
1420 | for (int i = 0; i < m_camera_info.capture.buffers; i++){ | |
1421 | initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes); | |
1422 | m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2; | |
5506cebf | 1423 | #ifdef ENABLE_FRAME_SYNC |
90e439c1 SK |
1424 | m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value |
1425 | allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1); | |
5506cebf | 1426 | #else |
90e439c1 | 1427 | allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes); |
5506cebf | 1428 | #endif |
90e439c1 SK |
1429 | m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i]; |
1430 | } | |
1431 | m_sccLocalBufferValid = true; | |
1432 | } | |
1433 | } else { | |
1434 | if (m_sccLocalBufferValid) { | |
1435 | for (int i = 0; i < m_camera_info.capture.buffers; i++) | |
1436 | m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i]; | |
1437 | } else { | |
1438 | ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__); | |
5506cebf SK |
1439 | } |
1440 | } | |
1441 | cam_int_s_input(newParameters.node, m_camera_info.sensor_id); | |
ac8c2060 | 1442 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
5506cebf | 1443 | cam_int_s_fmt(newParameters.node); |
ad37861e | 1444 | ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__); |
5506cebf | 1445 | cam_int_reqbufs(newParameters.node); |
ad37861e SK |
1446 | ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__); |
1447 | ||
5506cebf | 1448 | for (int i = 0; i < newParameters.node->buffers; i++) { |
ad37861e | 1449 | ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i); |
5506cebf SK |
1450 | cam_int_qbuf(newParameters.node, i); |
1451 | newParameters.svcBufStatus[i] = ON_DRIVER; | |
ad37861e SK |
1452 | } |
1453 | ||
1454 | ALOGV("== stream_on :: capture"); | |
5506cebf | 1455 | if (cam_int_streamon(newParameters.node) < 0) { |
6d8e5b08 SK |
1456 | ALOGE("ERR(%s): capture stream on fail", __FUNCTION__); |
1457 | } else { | |
1458 | m_camera_info.capture.status = true; | |
1459 | } | |
6044e509 | 1460 | |
5506cebf SK |
1461 | AllocatedStream->setParameter(&newParameters); |
1462 | AllocatedStream->m_activated = true; | |
1463 | AllocatedStream->m_isBufferInit = true; | |
ad37861e SK |
1464 | } |
1465 | ||
1466 | void ExynosCameraHWInterface2::StartISP() | |
1467 | { | |
ad37861e SK |
1468 | ALOGV("== stream_on :: isp"); |
1469 | cam_int_streamon(&(m_camera_info.isp)); | |
ad37861e | 1470 | exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM); |
13d8c7b4 SK |
1471 | } |
1472 | ||
c15a6b00 JS |
1473 | int ExynosCameraHWInterface2::getCameraId() const |
1474 | { | |
9dd63e1f | 1475 | return m_cameraId; |
c15a6b00 | 1476 | } |
c15a6b00 JS |
1477 | |
1478 | int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops) | |
1479 | { | |
13d8c7b4 | 1480 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
1481 | if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request) |
1482 | && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) { | |
1483 | m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops; | |
1484 | return 0; | |
1485 | } | |
1486 | else { | |
13d8c7b4 | 1487 | ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__); |
c15a6b00 JS |
1488 | return 1; |
1489 | } | |
1490 | } | |
1491 | ||
1492 | int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty() | |
1493 | { | |
5506cebf SK |
1494 | int i = 0; |
1495 | ||
b5237e6b | 1496 | ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries()); |
c15a6b00 | 1497 | if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) { |
13d8c7b4 | 1498 | ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__); |
c15a6b00 JS |
1499 | return 0; |
1500 | } | |
13d8c7b4 | 1501 | m_isRequestQueueNull = false; |
0f26b20f | 1502 | if (m_requestManager->GetNumEntries() == 0) |
572470e2 | 1503 | m_requestManager->SetInitialSkip(0); |
5506cebf SK |
1504 | |
1505 | if (m_isIspStarted == false) { | |
1506 | /* isp */ | |
1507 | m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS; | |
1508 | m_camera_info.isp.buffers = m_camera_info.sensor.buffers; | |
1509 | cam_int_s_fmt(&(m_camera_info.isp)); | |
1510 | cam_int_reqbufs(&(m_camera_info.isp)); | |
1511 | ||
1512 | /* sensor */ | |
1513 | if (m_camera_info.sensor.status == false) { | |
1514 | cam_int_s_fmt(&(m_camera_info.sensor)); | |
1515 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
1516 | ||
1517 | for (i = 0; i < m_camera_info.sensor.buffers; i++) { | |
1518 | ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i); | |
1519 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1 | |
1520 | m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1; | |
1521 | memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot), | |
1522 | sizeof(struct camera2_shot_ext)); | |
1523 | } | |
1524 | for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++) | |
1525 | cam_int_qbuf(&(m_camera_info.sensor), i); | |
1526 | ||
1527 | for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++) | |
1528 | m_requestManager->pushSensorQ(i); | |
1529 | ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__); | |
1530 | cam_int_streamon(&(m_camera_info.sensor)); | |
1531 | m_camera_info.sensor.status = true; | |
1532 | } | |
1533 | } | |
1534 | if (!(m_streamThreads[1].get())) { | |
1535 | ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__); | |
1536 | StartSCCThread(false); | |
1537 | } else { | |
1538 | if (m_streamThreads[1]->m_activated == false) { | |
1539 | ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__); | |
1540 | StartSCCThread(true); | |
1541 | } else { | |
1542 | if (m_camera_info.capture.status == false) { | |
ac8c2060 | 1543 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
5506cebf SK |
1544 | cam_int_s_fmt(&(m_camera_info.capture)); |
1545 | ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__); | |
1546 | cam_int_reqbufs(&(m_camera_info.capture)); | |
1547 | ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__); | |
1548 | ||
b8d41ae2 SK |
1549 | if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) { |
1550 | StreamThread * targetStream = m_streamThreads[1].get(); | |
1551 | stream_parameters_t *targetStreamParms = &(targetStream->m_parameters); | |
1552 | node_info_t *currentNode = targetStreamParms->node; | |
1553 | ||
1554 | struct v4l2_buffer v4l2_buf; | |
1555 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
1556 | ||
1557 | for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) { | |
1558 | v4l2_buf.m.planes = planes; | |
1559 | v4l2_buf.type = currentNode->type; | |
1560 | v4l2_buf.memory = currentNode->memory; | |
1561 | ||
1562 | v4l2_buf.length = currentNode->planes; | |
1563 | v4l2_buf.index = i; | |
1564 | ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i]; | |
1565 | ||
1566 | if (i < currentNode->buffers) { | |
1567 | #ifdef ENABLE_FRAME_SYNC | |
1568 | v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0]; | |
1569 | v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1]; | |
1570 | v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2]; | |
1571 | v4l2_buf.length += targetStreamParms->metaPlanes; | |
1572 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0]; | |
1573 | v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0]; | |
1574 | ||
1575 | ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length); | |
1576 | #endif | |
1577 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { | |
1578 | ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd); | |
1579 | } | |
1580 | ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd); | |
1581 | targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC; | |
1582 | } | |
1583 | else { | |
1584 | targetStreamParms->svcBufStatus[i] = ON_SERVICE; | |
1585 | } | |
1586 | ||
1587 | } | |
1588 | ||
1589 | } else { | |
1590 | for (int i = 0; i < m_camera_info.capture.buffers; i++) { | |
1591 | ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i); | |
1592 | cam_int_qbuf(&(m_camera_info.capture), i); | |
1593 | } | |
5506cebf SK |
1594 | } |
1595 | ALOGV("== stream_on :: capture"); | |
1596 | if (cam_int_streamon(&(m_camera_info.capture)) < 0) { | |
1597 | ALOGE("ERR(%s): capture stream on fail", __FUNCTION__); | |
1598 | } else { | |
1599 | m_camera_info.capture.status = true; | |
1600 | } | |
1601 | } | |
f9a06609 SK |
1602 | if (m_scpForceSuspended) { |
1603 | m_scpForceSuspended = false; | |
1604 | } | |
5506cebf SK |
1605 | } |
1606 | } | |
1607 | if (m_isIspStarted == false) { | |
1608 | StartISP(); | |
1609 | ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__); | |
71f3bb38 | 1610 | m_requestManager->SetInitialSkip(6); |
5506cebf SK |
1611 | m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0); |
1612 | m_isIspStarted = true; | |
1613 | } | |
c15a6b00 JS |
1614 | m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); |
1615 | return 0; | |
1616 | } | |
1617 | ||
1618 | int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops) | |
1619 | { | |
13d8c7b4 | 1620 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
1621 | if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame) |
1622 | && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) { | |
1623 | m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops; | |
1624 | return 0; | |
1625 | } | |
1626 | else { | |
13d8c7b4 | 1627 | ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__); |
c15a6b00 JS |
1628 | return 1; |
1629 | } | |
1630 | } | |
1631 | ||
1632 | int ExynosCameraHWInterface2::getInProgressCount() | |
1633 | { | |
1264ab16 AR |
1634 | int inProgressJpeg; |
1635 | int inProgressCount; | |
1636 | ||
1637 | { | |
1638 | Mutex::Autolock lock(m_jpegEncoderLock); | |
1639 | inProgressJpeg = m_jpegEncodingCount; | |
1640 | inProgressCount = m_requestManager->GetNumEntries(); | |
1641 | } | |
32cf9401 | 1642 | ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__, |
1264ab16 AR |
1643 | inProgressCount, inProgressJpeg, (inProgressCount + inProgressJpeg)); |
1644 | return (inProgressCount + inProgressJpeg); | |
c15a6b00 JS |
1645 | } |
1646 | ||
1647 | int ExynosCameraHWInterface2::flushCapturesInProgress() | |
1648 | { | |
1649 | return 0; | |
1650 | } | |
1651 | ||
c15a6b00 JS |
1652 | int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request) |
1653 | { | |
13d8c7b4 | 1654 | ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template); |
c15a6b00 JS |
1655 | |
1656 | if (request == NULL) return BAD_VALUE; | |
1657 | if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) { | |
1658 | return BAD_VALUE; | |
1659 | } | |
1660 | status_t res; | |
1661 | // Pass 1, calculate size and allocate | |
daa1fcd6 | 1662 | res = m_camera2->constructDefaultRequest(request_template, |
c15a6b00 JS |
1663 | request, |
1664 | true); | |
1665 | if (res != OK) { | |
1666 | return res; | |
1667 | } | |
1668 | // Pass 2, build request | |
daa1fcd6 | 1669 | res = m_camera2->constructDefaultRequest(request_template, |
c15a6b00 JS |
1670 | request, |
1671 | false); | |
1672 | if (res != OK) { | |
1673 | ALOGE("Unable to populate new request for template %d", | |
1674 | request_template); | |
1675 | } | |
1676 | ||
1677 | return res; | |
1678 | } | |
1679 | ||
1680 | int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops, | |
1681 | uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers) | |
1682 | { | |
ed4ad5fe | 1683 | ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__, width, height, format); |
5506cebf | 1684 | bool useDirectOutput = false; |
13d8c7b4 SK |
1685 | StreamThread *AllocatedStream; |
1686 | stream_parameters_t newParameters; | |
5506cebf SK |
1687 | substream_parameters_t *subParameters; |
1688 | StreamThread *parentStream; | |
1689 | status_t res; | |
1690 | int allocCase = 0; | |
c15a6b00 | 1691 | |
5506cebf SK |
1692 | if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) && |
1693 | m_camera2->isSupportedResolution(width, height)) { | |
9dd63e1f SK |
1694 | if (!(m_streamThreads[0].get())) { |
1695 | ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__); | |
1696 | allocCase = 0; | |
13d8c7b4 SK |
1697 | } |
1698 | else { | |
6bbb593a | 1699 | if ((m_streamThreads[0].get())->m_activated == true) { |
9dd63e1f SK |
1700 | ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__); |
1701 | allocCase = 1; | |
1702 | } | |
1703 | else { | |
1704 | ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__); | |
1705 | allocCase = 2; | |
1706 | } | |
13d8c7b4 | 1707 | } |
5506cebf SK |
1708 | |
1709 | // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio. | |
1710 | if ((width == 1920 && height == 1080) || (width == 1280 && height == 720) | |
1711 | || (width == 720 && height == 480) || (width == 1440 && height == 960) | |
1712 | || (width == 1344 && height == 896)) { | |
6d8e5b08 | 1713 | m_wideAspect = true; |
5506cebf | 1714 | } else { |
6d8e5b08 SK |
1715 | m_wideAspect = false; |
1716 | } | |
1717 | ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect); | |
1718 | ||
9dd63e1f | 1719 | if (allocCase == 0 || allocCase == 2) { |
5506cebf | 1720 | *stream_id = STREAM_ID_PREVIEW; |
9dd63e1f | 1721 | |
5506cebf | 1722 | m_streamThreads[0] = new StreamThread(this, *stream_id); |
9dd63e1f | 1723 | |
5506cebf | 1724 | AllocatedStream = (StreamThread*)(m_streamThreads[0].get()); |
053d38cf | 1725 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf SK |
1726 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
1727 | ||
1728 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
1729 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1730 | if (m_wideAspect) |
1731 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1732 | *max_buffers = 7; |
5506cebf SK |
1733 | |
1734 | newParameters.width = width; | |
1735 | newParameters.height = height; | |
1736 | newParameters.format = *format_actual; | |
1737 | newParameters.streamOps = stream_ops; | |
1738 | newParameters.usage = *usage; | |
ac8c2060 | 1739 | newParameters.numHwBuffers = NUM_SCP_BUFFERS; |
5506cebf SK |
1740 | newParameters.numOwnSvcBuffers = *max_buffers; |
1741 | newParameters.planes = NUM_PLANES(*format_actual); | |
1742 | newParameters.metaPlanes = 1; | |
1743 | newParameters.numSvcBufsInHal = 0; | |
a85ec381 | 1744 | newParameters.minUndequedBuffer = 3; |
bf96172c | 1745 | newParameters.needsIonMap = true; |
5506cebf SK |
1746 | |
1747 | newParameters.node = &m_camera_info.scp; | |
1748 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1749 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1750 | ||
1751 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1752 | AllocatedStream->m_index = 0; | |
9dd63e1f SK |
1753 | AllocatedStream->setParameter(&newParameters); |
1754 | AllocatedStream->m_activated = true; | |
5506cebf SK |
1755 | AllocatedStream->m_numRegisteredStream = 1; |
1756 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); | |
ad37861e SK |
1757 | m_requestManager->SetDefaultParameters(m_camera2->getSensorW()); |
1758 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW(); | |
5506cebf SK |
1759 | if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE) |
1760 | AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10); | |
1761 | if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE) | |
1762 | AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70); | |
a3bcc37b AR |
1763 | |
1764 | // set video stabilization killswitch | |
1765 | m_requestManager->m_vdisEnable = width > 352 && height > 288; | |
1766 | ||
9dd63e1f | 1767 | return 0; |
5506cebf SK |
1768 | } else if (allocCase == 1) { |
1769 | *stream_id = STREAM_ID_RECORD; | |
1770 | ||
1771 | subParameters = &m_subStreams[STREAM_ID_RECORD]; | |
1772 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
1773 | ||
9dd63e1f SK |
1774 | parentStream = (StreamThread*)(m_streamThreads[0].get()); |
1775 | if (!parentStream) { | |
1776 | return 1; | |
9dd63e1f | 1777 | } |
9dd63e1f | 1778 | |
804236a7 | 1779 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M |
6bbb593a | 1780 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; |
6568c0f1 AR |
1781 | if (m_wideAspect) |
1782 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1783 | *max_buffers = 7; |
9dd63e1f | 1784 | |
5506cebf SK |
1785 | subParameters->type = SUBSTREAM_TYPE_RECORD; |
1786 | subParameters->width = width; | |
1787 | subParameters->height = height; | |
1788 | subParameters->format = *format_actual; | |
1789 | subParameters->svcPlanes = NUM_PLANES(*format_actual); | |
1790 | subParameters->streamOps = stream_ops; | |
1791 | subParameters->usage = *usage; | |
1792 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1793 | subParameters->numSvcBufsInHal = 0; | |
1794 | subParameters->needBufferInit = false; | |
1795 | subParameters->minUndequedBuffer = 2; | |
1796 | ||
1797 | res = parentStream->attachSubStream(STREAM_ID_RECORD, 20); | |
1798 | if (res != NO_ERROR) { | |
1799 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
1800 | return 1; | |
1801 | } | |
1802 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); | |
1803 | ALOGV("(%s): Enabling Record", __FUNCTION__); | |
9dd63e1f SK |
1804 | return 0; |
1805 | } | |
13d8c7b4 | 1806 | } |
b8d41ae2 | 1807 | else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL) |
5506cebf | 1808 | && (width == m_camera2->getSensorW()) && (height == m_camera2->getSensorH())) { |
13d8c7b4 | 1809 | |
5506cebf SK |
1810 | if (!(m_streamThreads[1].get())) { |
1811 | ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__); | |
1812 | useDirectOutput = true; | |
6d8e5b08 SK |
1813 | } |
1814 | else { | |
5506cebf | 1815 | ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__); |
90e439c1 | 1816 | useDirectOutput = false; |
5506cebf SK |
1817 | } |
1818 | if (useDirectOutput) { | |
1819 | *stream_id = STREAM_ID_ZSL; | |
1820 | ||
053d38cf | 1821 | m_streamThreads[1] = new StreamThread(this, *stream_id); |
5506cebf | 1822 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); |
053d38cf | 1823 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf SK |
1824 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
1825 | ||
1826 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
5506cebf SK |
1827 | |
1828 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1829 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1830 | if (m_wideAspect) |
1831 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1832 | *max_buffers = 7; |
5506cebf SK |
1833 | |
1834 | newParameters.width = width; | |
1835 | newParameters.height = height; | |
1836 | newParameters.format = *format_actual; | |
1837 | newParameters.streamOps = stream_ops; | |
1838 | newParameters.usage = *usage; | |
ac8c2060 | 1839 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; |
5506cebf SK |
1840 | newParameters.numOwnSvcBuffers = *max_buffers; |
1841 | newParameters.planes = NUM_PLANES(*format_actual); | |
1842 | newParameters.metaPlanes = 1; | |
1843 | ||
1844 | newParameters.numSvcBufsInHal = 0; | |
a85ec381 | 1845 | newParameters.minUndequedBuffer = 2; |
bf96172c | 1846 | newParameters.needsIonMap = false; |
5506cebf SK |
1847 | |
1848 | newParameters.node = &m_camera_info.capture; | |
1849 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1850 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1851 | ||
1852 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1853 | AllocatedStream->m_index = 1; | |
1854 | AllocatedStream->setParameter(&newParameters); | |
1855 | AllocatedStream->m_activated = true; | |
b8d41ae2 | 1856 | AllocatedStream->m_numRegisteredStream = 1; |
5506cebf SK |
1857 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); |
1858 | return 0; | |
90e439c1 SK |
1859 | } else { |
1860 | bool bJpegExists = false; | |
1861 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); | |
1862 | subParameters = &m_subStreams[STREAM_ID_JPEG]; | |
1863 | if (subParameters->type == SUBSTREAM_TYPE_JPEG) { | |
1864 | ALOGD("(%s): jpeg stream exists", __FUNCTION__); | |
1865 | bJpegExists = true; | |
1866 | AllocatedStream->detachSubStream(STREAM_ID_JPEG); | |
1867 | } | |
1868 | AllocatedStream->m_releasing = true; | |
1869 | ALOGD("START stream thread 1 release %d", __LINE__); | |
1870 | do { | |
1871 | AllocatedStream->release(); | |
041f38de | 1872 | usleep(SIG_WAITING_TICK); |
90e439c1 SK |
1873 | } while (AllocatedStream->m_releasing); |
1874 | ALOGD("END stream thread 1 release %d", __LINE__); | |
1875 | ||
1876 | *stream_id = STREAM_ID_ZSL; | |
1877 | ||
1878 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); | |
1879 | ||
1880 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
90e439c1 SK |
1881 | |
1882 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1883 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1884 | if (m_wideAspect) |
1885 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1886 | *max_buffers = 7; |
90e439c1 SK |
1887 | |
1888 | newParameters.width = width; | |
1889 | newParameters.height = height; | |
1890 | newParameters.format = *format_actual; | |
1891 | newParameters.streamOps = stream_ops; | |
1892 | newParameters.usage = *usage; | |
1893 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; | |
1894 | newParameters.numOwnSvcBuffers = *max_buffers; | |
1895 | newParameters.planes = NUM_PLANES(*format_actual); | |
1896 | newParameters.metaPlanes = 1; | |
1897 | ||
1898 | newParameters.numSvcBufsInHal = 0; | |
bf96172c SK |
1899 | newParameters.minUndequedBuffer = 2; |
1900 | newParameters.needsIonMap = false; | |
90e439c1 SK |
1901 | |
1902 | newParameters.node = &m_camera_info.capture; | |
1903 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1904 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1905 | ||
1906 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1907 | AllocatedStream->m_index = 1; | |
1908 | AllocatedStream->setParameter(&newParameters); | |
1909 | AllocatedStream->m_activated = true; | |
1910 | AllocatedStream->m_numRegisteredStream = 1; | |
1911 | if (bJpegExists) { | |
1912 | AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10); | |
1913 | } | |
1914 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); | |
1915 | return 0; | |
1916 | ||
6d8e5b08 | 1917 | } |
5506cebf SK |
1918 | } |
1919 | else if (format == HAL_PIXEL_FORMAT_BLOB | |
1920 | && m_camera2->isSupportedJpegResolution(width, height)) { | |
1921 | *stream_id = STREAM_ID_JPEG; | |
6d8e5b08 | 1922 | |
5506cebf SK |
1923 | subParameters = &m_subStreams[*stream_id]; |
1924 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
9dd63e1f | 1925 | |
5506cebf SK |
1926 | if (!(m_streamThreads[1].get())) { |
1927 | ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__); | |
1928 | StartSCCThread(false); | |
1929 | } | |
1930 | else if (m_streamThreads[1]->m_activated == false) { | |
1931 | ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__); | |
1932 | StartSCCThread(true); | |
1933 | } | |
1934 | parentStream = (StreamThread*)(m_streamThreads[1].get()); | |
13d8c7b4 SK |
1935 | |
1936 | *format_actual = HAL_PIXEL_FORMAT_BLOB; | |
13d8c7b4 | 1937 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; |
6568c0f1 AR |
1938 | if (m_wideAspect) |
1939 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1940 | *max_buffers = 5; |
13d8c7b4 | 1941 | |
5506cebf SK |
1942 | subParameters->type = SUBSTREAM_TYPE_JPEG; |
1943 | subParameters->width = width; | |
1944 | subParameters->height = height; | |
1945 | subParameters->format = *format_actual; | |
1946 | subParameters->svcPlanes = 1; | |
1947 | subParameters->streamOps = stream_ops; | |
1948 | subParameters->usage = *usage; | |
1949 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1950 | subParameters->numSvcBufsInHal = 0; | |
1951 | subParameters->needBufferInit = false; | |
1952 | subParameters->minUndequedBuffer = 2; | |
1953 | ||
1954 | res = parentStream->attachSubStream(STREAM_ID_JPEG, 10); | |
1955 | if (res != NO_ERROR) { | |
1956 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
1957 | return 1; | |
1958 | } | |
1959 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); | |
1960 | ALOGV("(%s): Enabling Jpeg", __FUNCTION__); | |
13d8c7b4 SK |
1961 | return 0; |
1962 | } | |
74d78ebe | 1963 | else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) { |
5506cebf SK |
1964 | *stream_id = STREAM_ID_PRVCB; |
1965 | ||
1966 | subParameters = &m_subStreams[STREAM_ID_PRVCB]; | |
1967 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
1968 | ||
74d78ebe SK |
1969 | parentStream = (StreamThread*)(m_streamThreads[0].get()); |
1970 | if (!parentStream) { | |
74d78ebe SK |
1971 | return 1; |
1972 | } | |
74d78ebe SK |
1973 | |
1974 | *format_actual = format; | |
1975 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1976 | if (m_wideAspect) |
1977 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1978 | *max_buffers = 7; |
5506cebf SK |
1979 | |
1980 | subParameters->type = SUBSTREAM_TYPE_PRVCB; | |
1981 | subParameters->width = width; | |
1982 | subParameters->height = height; | |
1983 | subParameters->format = *format_actual; | |
1984 | subParameters->svcPlanes = NUM_PLANES(*format_actual); | |
1985 | subParameters->streamOps = stream_ops; | |
1986 | subParameters->usage = *usage; | |
1987 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1988 | subParameters->numSvcBufsInHal = 0; | |
1989 | subParameters->needBufferInit = false; | |
1990 | subParameters->minUndequedBuffer = 2; | |
1991 | ||
1992 | if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { | |
1993 | subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP; | |
1994 | subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP); | |
1995 | } | |
1996 | else { | |
1997 | subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
1998 | subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12); | |
1999 | } | |
74d78ebe | 2000 | |
5506cebf SK |
2001 | res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20); |
2002 | if (res != NO_ERROR) { | |
2003 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
2004 | return 1; | |
74d78ebe | 2005 | } |
5506cebf SK |
2006 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); |
2007 | ALOGV("(%s): Enabling previewcb", __FUNCTION__); | |
74d78ebe SK |
2008 | return 0; |
2009 | } | |
ed4ad5fe | 2010 | ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__); |
5506cebf | 2011 | return 1; |
c15a6b00 JS |
2012 | } |
2013 | ||
13d8c7b4 SK |
2014 | int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id, |
2015 | int num_buffers, buffer_handle_t *registeringBuffers) | |
c15a6b00 | 2016 | { |
13d8c7b4 SK |
2017 | int i,j; |
2018 | void *virtAddr[3]; | |
5506cebf SK |
2019 | int plane_index = 0; |
2020 | StreamThread * targetStream; | |
13d8c7b4 SK |
2021 | stream_parameters_t *targetStreamParms; |
2022 | node_info_t *currentNode; | |
2023 | ||
c15a6b00 JS |
2024 | struct v4l2_buffer v4l2_buf; |
2025 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
13d8c7b4 | 2026 | |
ed4ad5fe | 2027 | ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__, |
13d8c7b4 SK |
2028 | stream_id, num_buffers, (uint32_t)registeringBuffers); |
2029 | ||
5506cebf SK |
2030 | if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) { |
2031 | targetStream = m_streamThreads[0].get(); | |
13d8c7b4 | 2032 | targetStreamParms = &(m_streamThreads[0]->m_parameters); |
5c88d1f2 | 2033 | |
13d8c7b4 | 2034 | } |
5506cebf SK |
2035 | else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) { |
2036 | substream_parameters_t *targetParms; | |
2037 | targetParms = &m_subStreams[stream_id]; | |
9dd63e1f | 2038 | |
5506cebf | 2039 | targetParms->numSvcBuffers = num_buffers; |
9dd63e1f | 2040 | |
5506cebf SK |
2041 | for (i = 0 ; i < targetParms->numSvcBuffers ; i++) { |
2042 | ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__, | |
2043 | i, stream_id, (uint32_t)(registeringBuffers[i])); | |
9dd63e1f SK |
2044 | if (m_grallocHal) { |
2045 | if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i], | |
5506cebf SK |
2046 | targetParms->usage, 0, 0, |
2047 | targetParms->width, targetParms->height, virtAddr) != 0) { | |
9dd63e1f SK |
2048 | ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__); |
2049 | } | |
2050 | else { | |
2051 | ExynosBuffer currentBuf; | |
2052 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]); | |
5506cebf SK |
2053 | if (targetParms->svcPlanes == 1) { |
2054 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2055 | currentBuf.size.extS[0] = priv_handle->size; | |
2056 | currentBuf.size.extS[1] = 0; | |
2057 | currentBuf.size.extS[2] = 0; | |
2058 | } else if (targetParms->svcPlanes == 2) { | |
2059 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2060 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
2061 | ||
2062 | } else if (targetParms->svcPlanes == 3) { | |
2063 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2064 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
2065 | currentBuf.fd.extFd[2] = priv_handle->fd2; | |
2066 | } | |
2067 | for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) { | |
9dd63e1f | 2068 | currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index]; |
0d220b42 | 2069 | CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)", |
804236a7 | 2070 | __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index], |
5506cebf | 2071 | (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]); |
9dd63e1f | 2072 | } |
5506cebf SK |
2073 | targetParms->svcBufStatus[i] = ON_SERVICE; |
2074 | targetParms->svcBuffers[i] = currentBuf; | |
2075 | targetParms->svcBufHandle[i] = registeringBuffers[i]; | |
9dd63e1f SK |
2076 | } |
2077 | } | |
2078 | } | |
5506cebf | 2079 | targetParms->needBufferInit = true; |
9dd63e1f SK |
2080 | return 0; |
2081 | } | |
5506cebf SK |
2082 | else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) { |
2083 | targetStream = m_streamThreads[1].get(); | |
2084 | targetStreamParms = &(m_streamThreads[1]->m_parameters); | |
74d78ebe | 2085 | } |
13d8c7b4 | 2086 | else { |
ed4ad5fe | 2087 | ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id); |
be494d19 | 2088 | return 1; |
13d8c7b4 | 2089 | } |
c15a6b00 | 2090 | |
5506cebf | 2091 | if (targetStream->streamType == STREAM_TYPE_DIRECT) { |
13d8c7b4 SK |
2092 | if (num_buffers < targetStreamParms->numHwBuffers) { |
2093 | ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)", | |
2094 | __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers); | |
be494d19 | 2095 | return 1; |
13d8c7b4 SK |
2096 | } |
2097 | } | |
0d220b42 | 2098 | CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)", |
5506cebf SK |
2099 | __FUNCTION__, targetStreamParms->format, targetStreamParms->width, |
2100 | targetStreamParms->height, targetStreamParms->planes); | |
13d8c7b4 | 2101 | targetStreamParms->numSvcBuffers = num_buffers; |
5506cebf SK |
2102 | currentNode = targetStreamParms->node; |
2103 | currentNode->width = targetStreamParms->width; | |
2104 | currentNode->height = targetStreamParms->height; | |
2105 | currentNode->format = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format); | |
2106 | currentNode->planes = targetStreamParms->planes; | |
13d8c7b4 | 2107 | currentNode->buffers = targetStreamParms->numHwBuffers; |
5506cebf SK |
2108 | cam_int_s_input(currentNode, m_camera_info.sensor_id); |
2109 | cam_int_s_fmt(currentNode); | |
2110 | cam_int_reqbufs(currentNode); | |
2111 | for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) { | |
13d8c7b4 SK |
2112 | ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__, |
2113 | i, (uint32_t)(registeringBuffers[i])); | |
13d8c7b4 SK |
2114 | v4l2_buf.m.planes = planes; |
2115 | v4l2_buf.type = currentNode->type; | |
2116 | v4l2_buf.memory = currentNode->memory; | |
2117 | v4l2_buf.index = i; | |
2118 | v4l2_buf.length = currentNode->planes; | |
c15a6b00 | 2119 | |
13d8c7b4 | 2120 | ExynosBuffer currentBuf; |
feb7df4c | 2121 | ExynosBuffer metaBuf; |
13d8c7b4 SK |
2122 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]); |
2123 | ||
2124 | m_getAlignedYUVSize(currentNode->format, | |
2125 | currentNode->width, currentNode->height, ¤tBuf); | |
24231221 | 2126 | |
37e122d5 SK |
2127 | ALOGV("DEBUG(%s): ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride); |
2128 | if (currentNode->planes == 1) { | |
74d78ebe SK |
2129 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; |
2130 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
37e122d5 SK |
2131 | currentBuf.size.extS[0] = priv_handle->size; |
2132 | currentBuf.size.extS[1] = 0; | |
2133 | currentBuf.size.extS[2] = 0; | |
74d78ebe SK |
2134 | } else if (currentNode->planes == 2) { |
2135 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; | |
2136 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd1; | |
2137 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2138 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
2139 | ||
2140 | } else if (currentNode->planes == 3) { | |
2141 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; | |
2142 | v4l2_buf.m.planes[2].m.fd = priv_handle->fd1; | |
2143 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd2; | |
2144 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2145 | currentBuf.fd.extFd[2] = priv_handle->fd1; | |
2146 | currentBuf.fd.extFd[1] = priv_handle->fd2; | |
37e122d5 | 2147 | } |
0d220b42 | 2148 | |
5506cebf | 2149 | for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) { |
bf96172c SK |
2150 | if (targetStreamParms->needsIonMap) |
2151 | currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0); | |
13d8c7b4 | 2152 | v4l2_buf.m.planes[plane_index].length = currentBuf.size.extS[plane_index]; |
bf96172c | 2153 | ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)", |
13d8c7b4 SK |
2154 | __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd, |
2155 | (unsigned int)currentBuf.virt.extP[plane_index], | |
2156 | v4l2_buf.m.planes[plane_index].length); | |
2157 | } | |
c15a6b00 | 2158 | |
5506cebf | 2159 | if (i < currentNode->buffers) { |
feb7df4c SK |
2160 | |
2161 | ||
2162 | #ifdef ENABLE_FRAME_SYNC | |
5506cebf SK |
2163 | /* add plane for metadata*/ |
2164 | metaBuf.size.extS[0] = 4*1024; | |
2165 | allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0); | |
feb7df4c | 2166 | |
5506cebf SK |
2167 | v4l2_buf.length += targetStreamParms->metaPlanes; |
2168 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0]; | |
2169 | v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0]; | |
feb7df4c | 2170 | |
5506cebf | 2171 | ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length); |
feb7df4c | 2172 | #endif |
5506cebf SK |
2173 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { |
2174 | ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)", | |
2175 | __FUNCTION__, stream_id, currentNode->fd); | |
13d8c7b4 | 2176 | } |
5506cebf SK |
2177 | ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)", |
2178 | __FUNCTION__, stream_id, currentNode->fd); | |
2179 | targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC; | |
13d8c7b4 | 2180 | } |
5506cebf | 2181 | else { |
13d8c7b4 | 2182 | targetStreamParms->svcBufStatus[i] = ON_SERVICE; |
c15a6b00 | 2183 | } |
5506cebf | 2184 | |
13d8c7b4 | 2185 | targetStreamParms->svcBuffers[i] = currentBuf; |
feb7df4c | 2186 | targetStreamParms->metaBuffers[i] = metaBuf; |
13d8c7b4 SK |
2187 | targetStreamParms->svcBufHandle[i] = registeringBuffers[i]; |
2188 | } | |
6d8e5b08 | 2189 | |
5506cebf SK |
2190 | ALOGV("DEBUG(%s): calling streamon stream id = %d", __FUNCTION__, stream_id); |
2191 | cam_int_streamon(targetStreamParms->node); | |
ad37861e | 2192 | ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__); |
5506cebf | 2193 | currentNode->status = true; |
13d8c7b4 | 2194 | ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__); |
ad37861e | 2195 | |
c15a6b00 JS |
2196 | return 0; |
2197 | } | |
2198 | ||
2199 | int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id) | |
2200 | { | |
13d8c7b4 | 2201 | StreamThread *targetStream; |
5506cebf | 2202 | status_t res = NO_ERROR; |
ed4ad5fe | 2203 | ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id); |
5506cebf | 2204 | bool releasingScpMain = false; |
13d8c7b4 | 2205 | |
5506cebf | 2206 | if (stream_id == STREAM_ID_PREVIEW) { |
13d8c7b4 | 2207 | targetStream = (StreamThread*)(m_streamThreads[0].get()); |
d0a2bb69 SK |
2208 | if (!targetStream) { |
2209 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2210 | return NO_ERROR; |
d0a2bb69 | 2211 | } |
5506cebf SK |
2212 | targetStream->m_numRegisteredStream--; |
2213 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
2214 | releasingScpMain = true; | |
bf96172c SK |
2215 | if (targetStream->m_parameters.needsIonMap) { |
2216 | for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) { | |
2217 | for (int j = 0; j < targetStream->m_parameters.planes; j++) { | |
2218 | ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j], | |
2219 | targetStream->m_parameters.svcBuffers[i].size.extS[j]); | |
2220 | ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j, | |
c48f0170 | 2221 | targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j])); |
bf96172c | 2222 | } |
0d220b42 C |
2223 | } |
2224 | } | |
5506cebf | 2225 | } else if (stream_id == STREAM_ID_JPEG) { |
a038aa84 HC |
2226 | if (m_resizeBuf.size.s != 0) { |
2227 | freeCameraMemory(&m_resizeBuf, 1); | |
2228 | } | |
2229 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); | |
2230 | ||
13d8c7b4 | 2231 | targetStream = (StreamThread*)(m_streamThreads[1].get()); |
d0a2bb69 SK |
2232 | if (!targetStream) { |
2233 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2234 | return NO_ERROR; |
5506cebf | 2235 | } |
a038aa84 HC |
2236 | |
2237 | if (targetStream->detachSubStream(stream_id) != NO_ERROR) { | |
5506cebf SK |
2238 | ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res); |
2239 | return 1; | |
2240 | } | |
2241 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
74d78ebe | 2242 | return 0; |
5506cebf | 2243 | } else if (stream_id == STREAM_ID_RECORD) { |
a038aa84 HC |
2244 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); |
2245 | ||
5506cebf | 2246 | targetStream = (StreamThread*)(m_streamThreads[0].get()); |
d0a2bb69 SK |
2247 | if (!targetStream) { |
2248 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 HC |
2249 | return NO_ERROR; |
2250 | } | |
2251 | ||
2252 | if (targetStream->detachSubStream(stream_id) != NO_ERROR) { | |
2253 | ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res); | |
d0a2bb69 SK |
2254 | return 1; |
2255 | } | |
a038aa84 | 2256 | |
f0708d21 SK |
2257 | if (targetStream->m_numRegisteredStream != 0) |
2258 | return 0; | |
5506cebf | 2259 | } else if (stream_id == STREAM_ID_PRVCB) { |
a038aa84 HC |
2260 | if (m_previewCbBuf.size.s != 0) { |
2261 | freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes); | |
2262 | } | |
2263 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); | |
2264 | ||
5506cebf | 2265 | targetStream = (StreamThread*)(m_streamThreads[0].get()); |
d0a2bb69 SK |
2266 | if (!targetStream) { |
2267 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2268 | return NO_ERROR; |
d0a2bb69 | 2269 | } |
a038aa84 HC |
2270 | |
2271 | if (targetStream->detachSubStream(stream_id) != NO_ERROR) { | |
2272 | ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res); | |
2273 | return 1; | |
5506cebf | 2274 | } |
a038aa84 | 2275 | |
f0708d21 SK |
2276 | if (targetStream->m_numRegisteredStream != 0) |
2277 | return 0; | |
5506cebf SK |
2278 | } else if (stream_id == STREAM_ID_ZSL) { |
2279 | targetStream = (StreamThread*)(m_streamThreads[1].get()); | |
d0a2bb69 SK |
2280 | if (!targetStream) { |
2281 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2282 | return NO_ERROR; |
d0a2bb69 | 2283 | } |
a038aa84 | 2284 | |
5506cebf SK |
2285 | targetStream->m_numRegisteredStream--; |
2286 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
bf96172c SK |
2287 | if (targetStream->m_parameters.needsIonMap) { |
2288 | for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) { | |
2289 | for (int j = 0; j < targetStream->m_parameters.planes; j++) { | |
2290 | ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j], | |
2291 | targetStream->m_parameters.svcBuffers[i].size.extS[j]); | |
2292 | ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j, | |
c48f0170 | 2293 | targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j])); |
bf96172c SK |
2294 | } |
2295 | } | |
2296 | } | |
5506cebf | 2297 | } else { |
13d8c7b4 | 2298 | ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id); |
be494d19 | 2299 | return 1; |
13d8c7b4 SK |
2300 | } |
2301 | ||
a038aa84 | 2302 | if (m_sensorThread != NULL && releasingScpMain) { |
2d5655e1 SK |
2303 | m_sensorThread->release(); |
2304 | ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__); | |
2305 | while (!m_sensorThread->IsTerminated()) | |
48728d49 | 2306 | usleep(SIG_WAITING_TICK); |
2d5655e1 SK |
2307 | ALOGD("(%s): END Waiting for (indirect) sensor thread termination", __FUNCTION__); |
2308 | } | |
2d5655e1 | 2309 | |
5506cebf SK |
2310 | if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) { |
2311 | ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__); | |
2312 | targetStream = (StreamThread*)(m_streamThreads[1].get()); | |
2313 | targetStream->m_releasing = true; | |
0eb27a9d | 2314 | ALOGD("START stream thread release %d", __LINE__); |
5506cebf | 2315 | do { |
5506cebf | 2316 | targetStream->release(); |
041f38de | 2317 | usleep(SIG_WAITING_TICK); |
5506cebf | 2318 | } while (targetStream->m_releasing); |
a8be0011 | 2319 | m_camera_info.capture.status = false; |
0eb27a9d | 2320 | ALOGD("END stream thread release %d", __LINE__); |
5506cebf SK |
2321 | } |
2322 | ||
a8be0011 | 2323 | if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) { |
5506cebf SK |
2324 | ALOGV("(%s): deactivating stream thread 0", __FUNCTION__); |
2325 | targetStream = (StreamThread*)(m_streamThreads[0].get()); | |
2326 | targetStream->m_releasing = true; | |
0eb27a9d | 2327 | ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__); |
5506cebf | 2328 | do { |
5506cebf | 2329 | targetStream->release(); |
041f38de | 2330 | usleep(SIG_WAITING_TICK); |
5506cebf | 2331 | } while (targetStream->m_releasing); |
0eb27a9d | 2332 | ALOGD("(%s): END Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__); |
5506cebf SK |
2333 | targetStream->SetSignal(SIGNAL_THREAD_TERMINATE); |
2334 | ||
2335 | if (targetStream != NULL) { | |
0eb27a9d SK |
2336 | ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__); |
2337 | while (!targetStream->IsTerminated()) | |
041f38de | 2338 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 2339 | ALOGD("(%s): END Waiting for (indirect) stream thread termination", __FUNCTION__); |
5506cebf SK |
2340 | m_streamThreads[0] = NULL; |
2341 | } | |
5506cebf | 2342 | if (m_camera_info.capture.status == true) { |
f9a06609 | 2343 | m_scpForceSuspended = true; |
5506cebf SK |
2344 | } |
2345 | m_isIspStarted = false; | |
2346 | } | |
2347 | ALOGV("(%s): END", __FUNCTION__); | |
c15a6b00 JS |
2348 | return 0; |
2349 | } | |
2350 | ||
2351 | int ExynosCameraHWInterface2::allocateReprocessStream( | |
13d8c7b4 SK |
2352 | uint32_t width, uint32_t height, uint32_t format, |
2353 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
c15a6b00 JS |
2354 | uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers) |
2355 | { | |
13d8c7b4 | 2356 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
2357 | return 0; |
2358 | } | |
2359 | ||
5506cebf SK |
2360 | int ExynosCameraHWInterface2::allocateReprocessStreamFromStream( |
2361 | uint32_t output_stream_id, | |
2362 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
2363 | // outputs | |
2364 | uint32_t *stream_id) | |
2365 | { | |
ed4ad5fe | 2366 | ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id); |
5506cebf SK |
2367 | *stream_id = STREAM_ID_JPEG_REPROCESS; |
2368 | ||
2369 | m_reprocessStreamId = *stream_id; | |
2370 | m_reprocessOps = reprocess_stream_ops; | |
2371 | m_reprocessOutputStreamId = output_stream_id; | |
2372 | return 0; | |
2373 | } | |
2374 | ||
c15a6b00 JS |
2375 | int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id) |
2376 | { | |
ed4ad5fe | 2377 | ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id); |
5506cebf SK |
2378 | if (stream_id == STREAM_ID_JPEG_REPROCESS) { |
2379 | m_reprocessStreamId = 0; | |
2380 | m_reprocessOps = NULL; | |
2381 | m_reprocessOutputStreamId = 0; | |
2382 | return 0; | |
2383 | } | |
2384 | return 1; | |
c15a6b00 JS |
2385 | } |
2386 | ||
2387 | int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2) | |
2388 | { | |
ca714238 | 2389 | Mutex::Autolock lock(m_afModeTriggerLock); |
0f26b20f SK |
2390 | ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2); |
2391 | ||
2392 | switch (trigger_id) { | |
2393 | case CAMERA2_TRIGGER_AUTOFOCUS: | |
2394 | ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1); | |
ca714238 | 2395 | OnAfTrigger(ext1); |
0f26b20f SK |
2396 | break; |
2397 | ||
2398 | case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS: | |
2399 | ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1); | |
2400 | OnAfCancel(ext1); | |
2401 | break; | |
e117f756 YJ |
2402 | case CAMERA2_TRIGGER_PRECAPTURE_METERING: |
2403 | ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1); | |
2404 | OnPrecaptureMeteringTriggerStart(ext1); | |
2405 | break; | |
0f26b20f SK |
2406 | default: |
2407 | break; | |
2408 | } | |
c15a6b00 JS |
2409 | return 0; |
2410 | } | |
2411 | ||
2412 | int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user) | |
2413 | { | |
0f26b20f | 2414 | ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb); |
c15a6b00 JS |
2415 | m_notifyCb = notify_cb; |
2416 | m_callbackCookie = user; | |
2417 | return 0; | |
2418 | } | |
2419 | ||
2420 | int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops) | |
2421 | { | |
13d8c7b4 | 2422 | ALOGV("DEBUG(%s):", __FUNCTION__); |
cf95ef60 | 2423 | *ops = NULL; |
c15a6b00 JS |
2424 | return 0; |
2425 | } | |
2426 | ||
2427 | int ExynosCameraHWInterface2::dump(int fd) | |
2428 | { | |
13d8c7b4 | 2429 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
2430 | return 0; |
2431 | } | |
2432 | ||
13d8c7b4 SK |
2433 | void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf) |
2434 | { | |
2435 | switch (colorFormat) { | |
2436 | // 1p | |
2437 | case V4L2_PIX_FMT_RGB565 : | |
2438 | case V4L2_PIX_FMT_YUYV : | |
2439 | case V4L2_PIX_FMT_UYVY : | |
2440 | case V4L2_PIX_FMT_VYUY : | |
2441 | case V4L2_PIX_FMT_YVYU : | |
2442 | buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h); | |
2443 | buf->size.extS[1] = 0; | |
2444 | buf->size.extS[2] = 0; | |
2445 | break; | |
2446 | // 2p | |
2447 | case V4L2_PIX_FMT_NV12 : | |
2448 | case V4L2_PIX_FMT_NV12T : | |
2449 | case V4L2_PIX_FMT_NV21 : | |
2450 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); | |
2451 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16); | |
2452 | buf->size.extS[2] = 0; | |
2453 | break; | |
2454 | case V4L2_PIX_FMT_NV12M : | |
2455 | case V4L2_PIX_FMT_NV12MT_16X16 : | |
9dd63e1f | 2456 | case V4L2_PIX_FMT_NV21M: |
13d8c7b4 SK |
2457 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); |
2458 | buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256); | |
2459 | buf->size.extS[2] = 0; | |
2460 | break; | |
2461 | case V4L2_PIX_FMT_NV16 : | |
2462 | case V4L2_PIX_FMT_NV61 : | |
2463 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); | |
2464 | buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h, 16); | |
2465 | buf->size.extS[2] = 0; | |
2466 | break; | |
2467 | // 3p | |
2468 | case V4L2_PIX_FMT_YUV420 : | |
2469 | case V4L2_PIX_FMT_YVU420 : | |
2470 | buf->size.extS[0] = (w * h); | |
2471 | buf->size.extS[1] = (w * h) >> 2; | |
2472 | buf->size.extS[2] = (w * h) >> 2; | |
2473 | break; | |
2474 | case V4L2_PIX_FMT_YUV420M: | |
2475 | case V4L2_PIX_FMT_YVU420M : | |
4a3f1820 SK |
2476 | buf->size.extS[0] = ALIGN(w, 32) * ALIGN(h, 16); |
2477 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8); | |
2478 | buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8); | |
2479 | break; | |
13d8c7b4 | 2480 | case V4L2_PIX_FMT_YUV422P : |
0d220b42 | 2481 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); |
13d8c7b4 SK |
2482 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8); |
2483 | buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8); | |
2484 | break; | |
2485 | default: | |
2486 | ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat); | |
2487 | return; | |
2488 | break; | |
2489 | } | |
2490 | } | |
c15a6b00 | 2491 | |
13d8c7b4 SK |
2492 | bool ExynosCameraHWInterface2::m_getRatioSize(int src_w, int src_h, |
2493 | int dst_w, int dst_h, | |
2494 | int *crop_x, int *crop_y, | |
2495 | int *crop_w, int *crop_h, | |
2496 | int zoom) | |
c15a6b00 | 2497 | { |
13d8c7b4 SK |
2498 | *crop_w = src_w; |
2499 | *crop_h = src_h; | |
2500 | ||
2501 | if ( src_w != dst_w | |
2502 | || src_h != dst_h) { | |
2503 | float src_ratio = 1.0f; | |
2504 | float dst_ratio = 1.0f; | |
2505 | ||
2506 | // ex : 1024 / 768 | |
2507 | src_ratio = (float)src_w / (float)src_h; | |
2508 | ||
2509 | // ex : 352 / 288 | |
2510 | dst_ratio = (float)dst_w / (float)dst_h; | |
2511 | ||
2512 | if (dst_w * dst_h < src_w * src_h) { | |
2513 | if (dst_ratio <= src_ratio) { | |
2514 | // shrink w | |
2515 | *crop_w = src_h * dst_ratio; | |
2516 | *crop_h = src_h; | |
2517 | } else { | |
2518 | // shrink h | |
2519 | *crop_w = src_w; | |
2520 | *crop_h = src_w / dst_ratio; | |
c15a6b00 | 2521 | } |
13d8c7b4 SK |
2522 | } else { |
2523 | if (dst_ratio <= src_ratio) { | |
2524 | // shrink w | |
2525 | *crop_w = src_h * dst_ratio; | |
2526 | *crop_h = src_h; | |
2527 | } else { | |
2528 | // shrink h | |
2529 | *crop_w = src_w; | |
2530 | *crop_h = src_w / dst_ratio; | |
c15a6b00 JS |
2531 | } |
2532 | } | |
c15a6b00 JS |
2533 | } |
2534 | ||
13d8c7b4 SK |
2535 | if (zoom != 0) { |
2536 | float zoomLevel = ((float)zoom + 10.0) / 10.0; | |
2537 | *crop_w = (int)((float)*crop_w / zoomLevel); | |
2538 | *crop_h = (int)((float)*crop_h / zoomLevel); | |
2539 | } | |
2540 | ||
2541 | #define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2) | |
2542 | unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1)); | |
2543 | if (w_align != 0) { | |
2544 | if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align | |
2545 | && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) { | |
2546 | *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align); | |
2547 | } | |
2548 | else | |
2549 | *crop_w -= w_align; | |
2550 | } | |
2551 | ||
2552 | #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2) | |
2553 | unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1)); | |
2554 | if (h_align != 0) { | |
2555 | if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align | |
2556 | && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) { | |
2557 | *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align); | |
2558 | } | |
2559 | else | |
2560 | *crop_h -= h_align; | |
2561 | } | |
2562 | ||
2563 | *crop_x = (src_w - *crop_w) >> 1; | |
2564 | *crop_y = (src_h - *crop_h) >> 1; | |
2565 | ||
2566 | if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1)) | |
2567 | *crop_x -= 1; | |
2568 | ||
2569 | if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1)) | |
2570 | *crop_y -= 1; | |
2571 | ||
2572 | return true; | |
2573 | } | |
2574 | ||
9dd63e1f | 2575 | BayerBufManager::BayerBufManager() |
13d8c7b4 | 2576 | { |
9dd63e1f SK |
2577 | ALOGV("DEBUG(%s): ", __FUNCTION__); |
2578 | for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) { | |
2579 | entries[i].status = BAYER_ON_HAL_EMPTY; | |
2580 | entries[i].reqFrameCnt = 0; | |
13d8c7b4 | 2581 | } |
9dd63e1f SK |
2582 | sensorEnqueueHead = 0; |
2583 | sensorDequeueHead = 0; | |
2584 | ispEnqueueHead = 0; | |
2585 | ispDequeueHead = 0; | |
2586 | numOnSensor = 0; | |
2587 | numOnIsp = 0; | |
2588 | numOnHalFilled = 0; | |
2589 | numOnHalEmpty = NUM_BAYER_BUFFERS; | |
13d8c7b4 SK |
2590 | } |
2591 | ||
6d8e5b08 SK |
2592 | BayerBufManager::~BayerBufManager() |
2593 | { | |
2594 | ALOGV("%s", __FUNCTION__); | |
2595 | } | |
2596 | ||
9dd63e1f | 2597 | int BayerBufManager::GetIndexForSensorEnqueue() |
13d8c7b4 | 2598 | { |
9dd63e1f SK |
2599 | int ret = 0; |
2600 | if (numOnHalEmpty == 0) | |
2601 | ret = -1; | |
2602 | else | |
2603 | ret = sensorEnqueueHead; | |
2604 | ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret); | |
2605 | return ret; | |
13d8c7b4 SK |
2606 | } |
2607 | ||
9dd63e1f | 2608 | int BayerBufManager::MarkSensorEnqueue(int index) |
13d8c7b4 | 2609 | { |
9dd63e1f SK |
2610 | ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index); |
2611 | ||
2612 | // sanity check | |
2613 | if (index != sensorEnqueueHead) { | |
2614 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead); | |
2615 | return -1; | |
2616 | } | |
2617 | if (entries[index].status != BAYER_ON_HAL_EMPTY) { | |
2618 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2619 | index, entries[index].status, BAYER_ON_HAL_EMPTY); | |
2620 | return -1; | |
13d8c7b4 | 2621 | } |
13d8c7b4 | 2622 | |
9dd63e1f SK |
2623 | entries[index].status = BAYER_ON_SENSOR; |
2624 | entries[index].reqFrameCnt = 0; | |
2625 | numOnHalEmpty--; | |
2626 | numOnSensor++; | |
2627 | sensorEnqueueHead = GetNextIndex(index); | |
2628 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2629 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2630 | return 0; | |
2631 | } | |
13d8c7b4 | 2632 | |
9dd63e1f | 2633 | int BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp) |
13d8c7b4 | 2634 | { |
9dd63e1f SK |
2635 | ALOGV("DEBUG(%s) : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt); |
2636 | ||
9dd63e1f | 2637 | if (entries[index].status != BAYER_ON_SENSOR) { |
ad37861e | 2638 | ALOGE("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, |
9dd63e1f SK |
2639 | index, entries[index].status, BAYER_ON_SENSOR); |
2640 | return -1; | |
13d8c7b4 | 2641 | } |
13d8c7b4 | 2642 | |
9dd63e1f | 2643 | entries[index].status = BAYER_ON_HAL_FILLED; |
9dd63e1f SK |
2644 | numOnHalFilled++; |
2645 | numOnSensor--; | |
ad37861e | 2646 | |
9dd63e1f SK |
2647 | return 0; |
2648 | } | |
2649 | ||
2650 | int BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt) | |
2651 | { | |
2652 | int ret = 0; | |
2653 | if (numOnHalFilled == 0) | |
2654 | ret = -1; | |
2655 | else { | |
2656 | *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt; | |
2657 | ret = ispEnqueueHead; | |
13d8c7b4 | 2658 | } |
9dd63e1f | 2659 | ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret); |
13d8c7b4 SK |
2660 | return ret; |
2661 | } | |
2662 | ||
9dd63e1f SK |
2663 | int BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt) |
2664 | { | |
2665 | int ret = 0; | |
2666 | if (numOnIsp == 0) | |
2667 | ret = -1; | |
2668 | else { | |
2669 | *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt; | |
2670 | ret = ispDequeueHead; | |
2671 | } | |
2672 | ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret); | |
2673 | return ret; | |
2674 | } | |
13d8c7b4 | 2675 | |
9dd63e1f | 2676 | int BayerBufManager::MarkIspEnqueue(int index) |
13d8c7b4 | 2677 | { |
9dd63e1f SK |
2678 | ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index); |
2679 | ||
2680 | // sanity check | |
2681 | if (index != ispEnqueueHead) { | |
2682 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead); | |
13d8c7b4 SK |
2683 | return -1; |
2684 | } | |
9dd63e1f SK |
2685 | if (entries[index].status != BAYER_ON_HAL_FILLED) { |
2686 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2687 | index, entries[index].status, BAYER_ON_HAL_FILLED); | |
2688 | return -1; | |
13d8c7b4 SK |
2689 | } |
2690 | ||
9dd63e1f SK |
2691 | entries[index].status = BAYER_ON_ISP; |
2692 | numOnHalFilled--; | |
2693 | numOnIsp++; | |
2694 | ispEnqueueHead = GetNextIndex(index); | |
2695 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2696 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2697 | return 0; | |
2698 | } | |
2699 | ||
2700 | int BayerBufManager::MarkIspDequeue(int index) | |
2701 | { | |
2702 | ALOGV("DEBUG(%s) : BayerIndex[%d]", __FUNCTION__, index); | |
2703 | ||
2704 | // sanity check | |
2705 | if (index != ispDequeueHead) { | |
2706 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead); | |
2707 | return -1; | |
13d8c7b4 | 2708 | } |
9dd63e1f SK |
2709 | if (entries[index].status != BAYER_ON_ISP) { |
2710 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2711 | index, entries[index].status, BAYER_ON_ISP); | |
13d8c7b4 SK |
2712 | return -1; |
2713 | } | |
2714 | ||
9dd63e1f SK |
2715 | entries[index].status = BAYER_ON_HAL_EMPTY; |
2716 | entries[index].reqFrameCnt = 0; | |
2717 | numOnHalEmpty++; | |
2718 | numOnIsp--; | |
2719 | ispDequeueHead = GetNextIndex(index); | |
2720 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2721 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2722 | return 0; | |
2723 | } | |
13d8c7b4 | 2724 | |
9dd63e1f SK |
2725 | int BayerBufManager::GetNumOnSensor() |
2726 | { | |
2727 | return numOnSensor; | |
13d8c7b4 SK |
2728 | } |
2729 | ||
9dd63e1f | 2730 | int BayerBufManager::GetNumOnHalFilled() |
13d8c7b4 | 2731 | { |
9dd63e1f SK |
2732 | return numOnHalFilled; |
2733 | } | |
2734 | ||
2735 | int BayerBufManager::GetNumOnIsp() | |
2736 | { | |
2737 | return numOnIsp; | |
2738 | } | |
2739 | ||
2740 | int BayerBufManager::GetNextIndex(int index) | |
2741 | { | |
2742 | index++; | |
2743 | if (index >= NUM_BAYER_BUFFERS) | |
2744 | index = 0; | |
2745 | ||
2746 | return index; | |
2747 | } | |
2748 | ||
2749 | void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self) | |
2750 | { | |
2751 | camera_metadata_t *currentRequest = NULL; | |
2752 | camera_metadata_t *currentFrame = NULL; | |
2753 | size_t numEntries = 0; | |
2754 | size_t frameSize = 0; | |
2755 | camera_metadata_t * preparedFrame = NULL; | |
13d8c7b4 SK |
2756 | camera_metadata_t *deregisteredRequest = NULL; |
2757 | uint32_t currentSignal = self->GetProcessingSignal(); | |
2758 | MainThread * selfThread = ((MainThread*)self); | |
2759 | int res = 0; | |
2760 | ||
ad37861e | 2761 | int ret; |
ca714238 SK |
2762 | int afMode; |
2763 | uint32_t afRegion[4]; | |
ad37861e | 2764 | |
13d8c7b4 SK |
2765 | ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal); |
2766 | ||
2767 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
2768 | ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__); | |
2769 | ||
2770 | ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__); | |
2771 | selfThread->SetSignal(SIGNAL_THREAD_TERMINATE); | |
2772 | return; | |
2773 | } | |
2774 | ||
2775 | if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) { | |
2776 | ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__); | |
ad37861e | 2777 | if (m_requestManager->IsRequestQueueFull()==false) { |
ca714238 | 2778 | Mutex::Autolock lock(m_afModeTriggerLock); |
13d8c7b4 SK |
2779 | m_requestQueueOps->dequeue_request(m_requestQueueOps, ¤tRequest); |
2780 | if (NULL == currentRequest) { | |
e2068c92 | 2781 | ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal); |
13d8c7b4 | 2782 | m_isRequestQueueNull = true; |
5c88d1f2 C |
2783 | if (m_requestManager->IsVdisEnable()) |
2784 | m_vdisBubbleCnt = 1; | |
13d8c7b4 SK |
2785 | } |
2786 | else { | |
ca714238 SK |
2787 | m_requestManager->RegisterRequest(currentRequest, &afMode, afRegion); |
2788 | ||
2789 | SetAfMode((enum aa_afmode)afMode); | |
2790 | SetAfRegion(afRegion); | |
13d8c7b4 SK |
2791 | |
2792 | m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps); | |
2793 | ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc); | |
ad37861e | 2794 | if (m_requestManager->IsRequestQueueFull()==false) |
13d8c7b4 | 2795 | selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly |
9dd63e1f | 2796 | |
13d8c7b4 SK |
2797 | m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); |
2798 | } | |
c15a6b00 JS |
2799 | } |
2800 | else { | |
13d8c7b4 SK |
2801 | m_isRequestQueuePending = true; |
2802 | } | |
2803 | } | |
2804 | ||
2805 | if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) { | |
2806 | ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__); | |
2807 | /*while (1)*/ { | |
0f26b20f | 2808 | ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService()); |
ad37861e | 2809 | if (ret == false) |
0d220b42 | 2810 | CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret); |
ad37861e | 2811 | |
13d8c7b4 | 2812 | m_requestManager->DeregisterRequest(&deregisteredRequest); |
ad37861e SK |
2813 | |
2814 | ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest); | |
2815 | if (ret < 0) | |
0d220b42 | 2816 | CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret); |
ad37861e SK |
2817 | |
2818 | ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, ¤tFrame); | |
2819 | if (ret < 0) | |
0d220b42 | 2820 | CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret); |
ad37861e | 2821 | |
13d8c7b4 | 2822 | if (currentFrame==NULL) { |
ad37861e | 2823 | ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ ); |
13d8c7b4 SK |
2824 | } |
2825 | else { | |
daa1fcd6 | 2826 | ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize); |
13d8c7b4 SK |
2827 | } |
2828 | res = append_camera_metadata(currentFrame, preparedFrame); | |
2829 | if (res==0) { | |
2830 | ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__); | |
2831 | m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame); | |
ad37861e SK |
2832 | } |
2833 | else { | |
2834 | ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res); | |
2835 | } | |
2836 | } | |
2837 | if (!m_isRequestQueueNull) { | |
2838 | selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); | |
2839 | } | |
c15a6b00 | 2840 | |
ad37861e SK |
2841 | if (getInProgressCount()>0) { |
2842 | ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__); | |
2843 | m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); | |
2844 | } | |
c15a6b00 | 2845 | } |
ad37861e SK |
2846 | ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__); |
2847 | return; | |
2848 | } | |
c15a6b00 | 2849 | |
13d8c7b4 SK |
2850 | void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext) |
2851 | { | |
ad37861e SK |
2852 | ALOGD("#### common Section"); |
2853 | ALOGD("#### magic(%x) ", | |
13d8c7b4 | 2854 | shot_ext->shot.magicNumber); |
ad37861e SK |
2855 | ALOGD("#### ctl Section"); |
2856 | ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)", | |
13d8c7b4 | 2857 | shot_ext->shot.ctl.request.metadataMode, |
b56dcc00 | 2858 | shot_ext->shot.ctl.lens.aperture, |
13d8c7b4 SK |
2859 | shot_ext->shot.ctl.sensor.exposureTime, |
2860 | shot_ext->shot.ctl.sensor.frameDuration, | |
b56dcc00 SK |
2861 | shot_ext->shot.ctl.sensor.sensitivity, |
2862 | shot_ext->shot.ctl.aa.awbMode); | |
13d8c7b4 | 2863 | |
5506cebf | 2864 | ALOGD("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)", |
9dd63e1f | 2865 | shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc, |
5506cebf | 2866 | shot_ext->shot.ctl.request.outputStreams[0]); |
13d8c7b4 | 2867 | |
ad37861e SK |
2868 | ALOGD("#### DM Section"); |
2869 | ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)", | |
13d8c7b4 | 2870 | shot_ext->shot.dm.request.metadataMode, |
b56dcc00 | 2871 | shot_ext->shot.dm.lens.aperture, |
13d8c7b4 SK |
2872 | shot_ext->shot.dm.sensor.exposureTime, |
2873 | shot_ext->shot.dm.sensor.frameDuration, | |
2874 | shot_ext->shot.dm.sensor.sensitivity, | |
b56dcc00 SK |
2875 | shot_ext->shot.dm.sensor.timeStamp, |
2876 | shot_ext->shot.dm.aa.awbMode, | |
2877 | shot_ext->shot.dm.request.frameCount ); | |
13d8c7b4 SK |
2878 | } |
2879 | ||
e117f756 | 2880 | void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext) |
6f19b6cf | 2881 | { |
e117f756 YJ |
2882 | // Flash |
2883 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2884 | case IS_FLASH_STATE_ON: | |
40acdcc8 | 2885 | ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
4a9565ae YJ |
2886 | // check AF locked |
2887 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
2888 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { | |
2889 | if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) { | |
2890 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS; | |
2891 | m_ctlInfo.flash.m_flashTimeOut = 5; | |
2892 | } else | |
2893 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON; | |
2894 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT; | |
2895 | } else { | |
2896 | m_ctlInfo.flash.m_flashTimeOut--; | |
2897 | } | |
2898 | } else { | |
2899 | if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) { | |
2900 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS; | |
2901 | m_ctlInfo.flash.m_flashTimeOut = 5; | |
2902 | } else | |
2903 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON; | |
2904 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT; | |
2905 | } | |
9a710a45 | 2906 | break; |
e117f756 | 2907 | case IS_FLASH_STATE_ON_WAIT: |
6f19b6cf | 2908 | break; |
e117f756 YJ |
2909 | case IS_FLASH_STATE_ON_DONE: |
2910 | if (!m_ctlInfo.flash.m_afFlashDoneFlg) | |
2911 | // auto transition at pre-capture trigger | |
2912 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
6f19b6cf | 2913 | break; |
e117f756 | 2914 | case IS_FLASH_STATE_AUTO_AE_AWB_LOCK: |
40acdcc8 | 2915 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 YJ |
2916 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO; |
2917 | //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED; | |
2918 | shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED; | |
2919 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT; | |
2920 | break; | |
2921 | case IS_FLASH_STATE_AE_AWB_LOCK_WAIT: | |
2922 | case IS_FLASH_STATE_AUTO_WAIT: | |
2923 | shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0; | |
2924 | shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0; | |
2925 | break; | |
2926 | case IS_FLASH_STATE_AUTO_DONE: | |
40acdcc8 | 2927 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
d91c0269 | 2928 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
e117f756 YJ |
2929 | break; |
2930 | case IS_FLASH_STATE_AUTO_OFF: | |
40acdcc8 | 2931 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 | 2932 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
e117f756 YJ |
2933 | m_ctlInfo.flash.m_flashEnableFlg = false; |
2934 | break; | |
2935 | case IS_FLASH_STATE_CAPTURE: | |
40acdcc8 | 2936 | ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 YJ |
2937 | m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT; |
2938 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE; | |
2939 | shot_ext->request_scc = 0; | |
2940 | shot_ext->request_scp = 0; | |
2941 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition | |
2942 | break; | |
2943 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
2944 | shot_ext->request_scc = 0; | |
2945 | shot_ext->request_scp = 0; | |
2946 | break; | |
2947 | case IS_FLASH_STATE_CAPTURE_JPEG: | |
73f5ad60 | 2948 | ALOGV("(%s): [Flash] Flash Capture (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut)); |
e117f756 YJ |
2949 | shot_ext->request_scc = 1; |
2950 | shot_ext->request_scp = 1; | |
2951 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END; // auto transition | |
2952 | break; | |
2953 | case IS_FLASH_STATE_CAPTURE_END: | |
40acdcc8 | 2954 | ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 YJ |
2955 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
2956 | shot_ext->request_scc = 0; | |
2957 | shot_ext->request_scp = 0; | |
2958 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
2959 | m_ctlInfo.flash.m_flashCnt = 0; | |
2960 | m_ctlInfo.flash.m_afFlashDoneFlg= false; | |
cdd53a9f YJ |
2961 | break; |
2962 | case IS_FLASH_STATE_NONE: | |
e117f756 YJ |
2963 | break; |
2964 | default: | |
2965 | ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); | |
6f19b6cf YJ |
2966 | } |
2967 | } | |
2968 | ||
e117f756 | 2969 | void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext) |
6f19b6cf | 2970 | { |
e117f756 YJ |
2971 | // Flash |
2972 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2973 | case IS_FLASH_STATE_AUTO_WAIT: | |
2974 | if (m_ctlInfo.flash.m_flashDecisionResult) { | |
2975 | if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) { | |
2976 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
73f5ad60 | 2977 | ALOGV("(%s): [Flash] Lis : AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode); |
9257e29e | 2978 | } else { |
73f5ad60 | 2979 | ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__); |
9257e29e | 2980 | } |
e117f756 YJ |
2981 | } else { |
2982 | //If flash isn't activated at flash auto mode, skip flash auto control | |
2983 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
73f5ad60 | 2984 | ALOGV("(%s): [Flash] Skip : AUTO -> OFF", __FUNCTION__); |
9257e29e | 2985 | } |
e117f756 | 2986 | break; |
9257e29e | 2987 | } |
9257e29e YJ |
2988 | } |
2989 | ||
e117f756 | 2990 | void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext) |
9257e29e | 2991 | { |
e117f756 YJ |
2992 | // Flash |
2993 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2994 | case IS_FLASH_STATE_ON_WAIT: | |
2995 | if (shot_ext->shot.dm.flash.decision > 0) { | |
2996 | // store decision result to skip capture sequenece | |
73f5ad60 | 2997 | ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision); |
e117f756 YJ |
2998 | if (shot_ext->shot.dm.flash.decision == 2) |
2999 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
3000 | else | |
3001 | m_ctlInfo.flash.m_flashDecisionResult = true; | |
3002 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE; | |
3003 | } else { | |
3004 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { | |
73f5ad60 | 3005 | ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__); |
e117f756 YJ |
3006 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE; |
3007 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
caea49e6 | 3008 | } else { |
e117f756 | 3009 | m_ctlInfo.flash.m_flashTimeOut--; |
6f19b6cf | 3010 | } |
6f19b6cf | 3011 | } |
e117f756 YJ |
3012 | break; |
3013 | case IS_FLASH_STATE_AE_AWB_LOCK_WAIT: | |
3014 | if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) { | |
73f5ad60 | 3015 | ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode); |
e117f756 YJ |
3016 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT; |
3017 | } else { | |
73f5ad60 | 3018 | ALOGV("(%s): [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__); |
e117f756 YJ |
3019 | } |
3020 | break; | |
3021 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
3022 | if (m_ctlInfo.flash.m_flashDecisionResult) { | |
3023 | if (shot_ext->shot.dm.flash.firingStable) { | |
3024 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
6f19b6cf | 3025 | } else { |
9257e29e | 3026 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { |
e117f756 YJ |
3027 | ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__); |
3028 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
9257e29e | 3029 | } else { |
e117f756 | 3030 | ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut); |
9257e29e YJ |
3031 | m_ctlInfo.flash.m_flashTimeOut--; |
3032 | } | |
6f19b6cf | 3033 | } |
e117f756 YJ |
3034 | } else { |
3035 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
6f19b6cf | 3036 | } |
e117f756 | 3037 | break; |
6f19b6cf | 3038 | } |
6f19b6cf YJ |
3039 | } |
3040 | ||
cdd53a9f YJ |
3041 | void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext) |
3042 | { | |
3043 | switch (m_ctlInfo.flash.i_flashMode) { | |
3044 | case AA_AEMODE_ON: | |
3045 | // At flash off mode, capture can be done as zsl capture | |
3046 | shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED; | |
3047 | break; | |
3048 | case AA_AEMODE_ON_AUTO_FLASH: | |
3049 | // At flash auto mode, main flash have to be done if pre-flash was done. | |
3050 | if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg) | |
3051 | shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED; | |
3052 | break; | |
3053 | } | |
3054 | } | |
3055 | ||
4a9565ae YJ |
3056 | void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext) |
3057 | { | |
ca714238 SK |
3058 | shot_ext->shot.ctl.aa.afRegions[0] = currentAfRegion[0]; |
3059 | shot_ext->shot.ctl.aa.afRegions[1] = currentAfRegion[1]; | |
3060 | shot_ext->shot.ctl.aa.afRegions[2] = currentAfRegion[2]; | |
3061 | shot_ext->shot.ctl.aa.afRegions[3] = currentAfRegion[3]; | |
3062 | } | |
3063 | ||
3064 | void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion) | |
3065 | { | |
3066 | currentAfRegion[0] = afRegion[0]; | |
3067 | currentAfRegion[1] = afRegion[1]; | |
3068 | currentAfRegion[2] = afRegion[2]; | |
3069 | currentAfRegion[3] = afRegion[3]; | |
4a9565ae YJ |
3070 | } |
3071 | ||
8a3fc5dd | 3072 | void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode) |
4a9565ae YJ |
3073 | { |
3074 | if (m_afState == HAL_AFSTATE_SCANNING) { | |
3075 | ALOGD("(%s): restarting trigger ", __FUNCTION__); | |
8a3fc5dd | 3076 | } else if (!mode) { |
4a9565ae YJ |
3077 | if (m_afState != HAL_AFSTATE_NEEDS_COMMAND) |
3078 | ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState); | |
3079 | else | |
40acdcc8 | 3080 | m_afState = HAL_AFSTATE_STARTED; |
4a9565ae | 3081 | } |
40acdcc8 | 3082 | ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState); |
4a9565ae YJ |
3083 | shot_ext->shot.ctl.aa.afTrigger = 1; |
3084 | shot_ext->shot.ctl.aa.afMode = m_afMode; | |
3085 | m_IsAfTriggerRequired = false; | |
3086 | } | |
3087 | ||
13d8c7b4 SK |
3088 | void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self) |
3089 | { | |
3090 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3091 | SensorThread * selfThread = ((SensorThread*)self); | |
3092 | int index; | |
ad37861e | 3093 | int index_isp; |
13d8c7b4 SK |
3094 | status_t res; |
3095 | nsecs_t frameTime; | |
3096 | int bayersOnSensor = 0, bayersOnIsp = 0; | |
ad37861e SK |
3097 | int j = 0; |
3098 | bool isCapture = false; | |
13d8c7b4 SK |
3099 | ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal); |
3100 | ||
3101 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
2c872806 | 3102 | CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__); |
13d8c7b4 | 3103 | |
9dd63e1f | 3104 | ALOGV("(%s): calling sensor streamoff", __FUNCTION__); |
13d8c7b4 | 3105 | cam_int_streamoff(&(m_camera_info.sensor)); |
9dd63e1f | 3106 | ALOGV("(%s): calling sensor streamoff done", __FUNCTION__); |
b5237e6b SK |
3107 | |
3108 | m_camera_info.sensor.buffers = 0; | |
3109 | ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__); | |
3110 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
3111 | ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__); | |
5506cebf | 3112 | m_camera_info.sensor.status = false; |
ad37861e | 3113 | |
9dd63e1f SK |
3114 | ALOGV("(%s): calling ISP streamoff", __FUNCTION__); |
3115 | isp_int_streamoff(&(m_camera_info.isp)); | |
3116 | ALOGV("(%s): calling ISP streamoff done", __FUNCTION__); | |
ad37861e | 3117 | |
b5237e6b SK |
3118 | m_camera_info.isp.buffers = 0; |
3119 | ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__); | |
3120 | cam_int_reqbufs(&(m_camera_info.isp)); | |
3121 | ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__); | |
3122 | ||
13d8c7b4 | 3123 | exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM); |
13d8c7b4 | 3124 | |
52f54308 | 3125 | m_requestManager->releaseSensorQ(); |
2adfa429 | 3126 | m_requestManager->ResetEntry(); |
ad37861e | 3127 | ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__); |
13d8c7b4 SK |
3128 | selfThread->SetSignal(SIGNAL_THREAD_TERMINATE); |
3129 | return; | |
3130 | } | |
3131 | ||
3132 | if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING) | |
3133 | { | |
3134 | ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__); | |
9dd63e1f | 3135 | int targetStreamIndex = 0, i=0; |
ad37861e | 3136 | int matchedFrameCnt = -1, processingReqIndex; |
13d8c7b4 | 3137 | struct camera2_shot_ext *shot_ext; |
ad37861e | 3138 | struct camera2_shot_ext *shot_ext_capture; |
0f26b20f | 3139 | bool triggered = false; |
9dd63e1f | 3140 | |
ad37861e | 3141 | /* dqbuf from sensor */ |
5506cebf | 3142 | ALOGV("Sensor DQbuf start"); |
13d8c7b4 | 3143 | index = cam_int_dqbuf(&(m_camera_info.sensor)); |
52f54308 SK |
3144 | m_requestManager->pushSensorQ(index); |
3145 | ALOGV("Sensor DQbuf done(%d)", index); | |
9dd63e1f | 3146 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]); |
ad37861e | 3147 | |
a15b4e3f SK |
3148 | if (m_nightCaptureCnt != 0) { |
3149 | matchedFrameCnt = m_nightCaptureFrameCnt; | |
e117f756 | 3150 | } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) { |
9a710a45 | 3151 | matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount; |
caea49e6 | 3152 | ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt); |
6f19b6cf | 3153 | } else { |
a15b4e3f SK |
3154 | matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext); |
3155 | } | |
ad37861e | 3156 | |
5c88d1f2 C |
3157 | if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) { |
3158 | matchedFrameCnt = m_vdisDupFrame; | |
3159 | } | |
5c88d1f2 | 3160 | |
a07cbd98 | 3161 | if (matchedFrameCnt != -1) { |
ce77365b | 3162 | if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) { |
5c88d1f2 C |
3163 | frameTime = systemTime(); |
3164 | m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime); | |
3165 | m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo); | |
ce77365b HC |
3166 | } else { |
3167 | ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt); | |
5c88d1f2 | 3168 | } |
9a710a45 | 3169 | |
40acdcc8 YJ |
3170 | // face af mode setting in case of face priority scene mode |
3171 | if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) { | |
3172 | ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode); | |
3173 | m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode; | |
3174 | } | |
3175 | ||
308291de | 3176 | m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2]; |
e4657e32 SK |
3177 | float zoomLeft, zoomTop, zoomWidth, zoomHeight; |
3178 | int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0; | |
3179 | ||
3180 | m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(), | |
5506cebf | 3181 | m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height, |
e4657e32 SK |
3182 | &crop_x, &crop_y, |
3183 | &crop_w, &crop_h, | |
3184 | 0); | |
3185 | ||
5506cebf | 3186 | if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) { |
308291de | 3187 | zoomWidth = m_camera2->getSensorW() / m_zoomRatio; |
e4657e32 | 3188 | zoomHeight = zoomWidth * |
5506cebf | 3189 | m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width; |
e4657e32 | 3190 | } else { |
308291de | 3191 | zoomHeight = m_camera2->getSensorH() / m_zoomRatio; |
e4657e32 | 3192 | zoomWidth = zoomHeight * |
5506cebf | 3193 | m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height; |
e4657e32 SK |
3194 | } |
3195 | zoomLeft = (crop_w - zoomWidth) / 2; | |
3196 | zoomTop = (crop_h - zoomHeight) / 2; | |
3197 | ||
3198 | int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth }; | |
3199 | ||
408f6161 HC |
3200 | int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4); |
3201 | if (cropCompensation) | |
3202 | new_cropRegion[2] -= cropCompensation; | |
308291de | 3203 | |
e4657e32 SK |
3204 | shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0]; |
3205 | shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1]; | |
3206 | shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2]; | |
8a3fc5dd | 3207 | if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) { |
ca714238 | 3208 | ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode); |
0f26b20f SK |
3209 | shot_ext->shot.ctl.aa.afMode = m_afMode; |
3210 | if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) { | |
ed4ad5fe | 3211 | ALOGD("### With Automatic triger for continuous modes"); |
0f26b20f SK |
3212 | m_afState = HAL_AFSTATE_STARTED; |
3213 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3214 | triggered = true; | |
40acdcc8 YJ |
3215 | if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) || |
3216 | (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) { | |
3217 | switch (m_afMode) { | |
3218 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
3219 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE_FACE; | |
3220 | ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode); | |
3221 | break; | |
40acdcc8 YJ |
3222 | } |
3223 | } | |
cdd53a9f YJ |
3224 | // reset flash result |
3225 | if (m_ctlInfo.flash.m_afFlashDoneFlg) { | |
3226 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
3227 | m_ctlInfo.flash.m_afFlashDoneFlg = false; | |
3228 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
3229 | m_ctlInfo.flash.m_flashCnt = 0; | |
3230 | } | |
40acdcc8 | 3231 | m_ctlInfo.af.m_afTriggerTimeOut = 1; |
0f26b20f | 3232 | } |
40acdcc8 | 3233 | |
0f26b20f | 3234 | m_IsAfModeUpdateRequired = false; |
311d52eb | 3235 | // support inifinity focus mode |
fdbaf5d2 | 3236 | if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) { |
311d52eb YJ |
3237 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY; |
3238 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3239 | triggered = true; | |
3240 | } | |
0f26b20f SK |
3241 | if (m_afMode2 != NO_CHANGE) { |
3242 | enum aa_afmode tempAfMode = m_afMode2; | |
3243 | m_afMode2 = NO_CHANGE; | |
3244 | SetAfMode(tempAfMode); | |
3245 | } | |
3246 | } | |
3247 | else { | |
3248 | shot_ext->shot.ctl.aa.afMode = NO_CHANGE; | |
3249 | } | |
3250 | if (m_IsAfTriggerRequired) { | |
e117f756 | 3251 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
4a9565ae | 3252 | // flash case |
e117f756 | 3253 | if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) { |
4a9565ae YJ |
3254 | if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) { |
3255 | // Flash is enabled and start AF | |
8a3fc5dd | 3256 | m_afTrigger(shot_ext, 1); |
6f19b6cf | 3257 | } else { |
ca714238 | 3258 | m_afTrigger(shot_ext, 0); |
6f19b6cf | 3259 | } |
6f19b6cf YJ |
3260 | } |
3261 | } else { | |
4a9565ae | 3262 | // non-flash case |
ca714238 | 3263 | m_afTrigger(shot_ext, 0); |
9900d0c4 | 3264 | } |
4a9565ae | 3265 | } else { |
8e2c2fdb | 3266 | shot_ext->shot.ctl.aa.afTrigger = 0; |
0f26b20f | 3267 | } |
5506cebf SK |
3268 | |
3269 | if (m_wideAspect) { | |
3270 | shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO; | |
5506cebf SK |
3271 | } else { |
3272 | shot_ext->setfile = ISS_SUB_SCENARIO_STILL; | |
0f26b20f SK |
3273 | } |
3274 | if (triggered) | |
3275 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3276 | ||
3277 | // TODO : check collision with AFMode Update | |
3278 | if (m_IsAfLockRequired) { | |
3279 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF; | |
3280 | m_IsAfLockRequired = false; | |
3281 | } | |
4ed2f103 | 3282 | ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)", |
ad37861e SK |
3283 | index, |
3284 | shot_ext->shot.ctl.request.frameCount, | |
3285 | shot_ext->request_scp, | |
3286 | shot_ext->request_scc, | |
3287 | shot_ext->dis_bypass, sizeof(camera2_shot)); | |
4a9565ae YJ |
3288 | |
3289 | // update AF region | |
3290 | m_updateAfRegion(shot_ext); | |
3291 | ||
572470e2 | 3292 | m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode; |
6bf36b60 SK |
3293 | if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT |
3294 | && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED) | |
3295 | shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON; | |
a15b4e3f | 3296 | if (m_nightCaptureCnt == 0) { |
5506cebf | 3297 | if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE |
a15b4e3f SK |
3298 | && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) { |
3299 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d | 3300 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
a15b4e3f SK |
3301 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; |
3302 | m_nightCaptureCnt = 4; | |
3303 | m_nightCaptureFrameCnt = matchedFrameCnt; | |
3304 | shot_ext->request_scc = 0; | |
3305 | } | |
3306 | } | |
3307 | else if (m_nightCaptureCnt == 1) { | |
3308 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d SK |
3309 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30; |
3310 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
a15b4e3f | 3311 | m_nightCaptureCnt--; |
2f4d175d | 3312 | m_nightCaptureFrameCnt = 0; |
a15b4e3f SK |
3313 | shot_ext->request_scc = 1; |
3314 | } | |
6bf36b60 SK |
3315 | else if (m_nightCaptureCnt == 2) { |
3316 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d SK |
3317 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
3318 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
6bf36b60 SK |
3319 | m_nightCaptureCnt--; |
3320 | shot_ext->request_scc = 0; | |
3321 | } | |
1c5e692d | 3322 | else if (m_nightCaptureCnt == 3) { |
a15b4e3f | 3323 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; |
1c5e692d SK |
3324 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
3325 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
3326 | m_nightCaptureCnt--; | |
3327 | shot_ext->request_scc = 0; | |
3328 | } | |
3329 | else if (m_nightCaptureCnt == 4) { | |
3330 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
3331 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; | |
3332 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
a15b4e3f SK |
3333 | m_nightCaptureCnt--; |
3334 | shot_ext->request_scc = 0; | |
3335 | } | |
6f19b6cf | 3336 | |
3c17a3f7 SK |
3337 | switch (shot_ext->shot.ctl.aa.aeTargetFpsRange[1]) { |
3338 | case 15: | |
3339 | shot_ext->shot.ctl.sensor.frameDuration = (66666 * 1000); | |
3340 | break; | |
3341 | ||
3342 | case 24: | |
3343 | shot_ext->shot.ctl.sensor.frameDuration = (41666 * 1000); | |
3344 | break; | |
3345 | ||
3346 | case 25: | |
3347 | shot_ext->shot.ctl.sensor.frameDuration = (40000 * 1000); | |
3348 | break; | |
3349 | ||
3350 | case 30: | |
3351 | default: | |
3352 | shot_ext->shot.ctl.sensor.frameDuration = (33333 * 1000); | |
3353 | break; | |
3354 | } | |
3355 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
3356 | ||
6f19b6cf YJ |
3357 | // Flash mode |
3358 | // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence | |
e117f756 YJ |
3359 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) |
3360 | && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) | |
6f19b6cf | 3361 | && (m_cameraId == 0)) { |
e117f756 YJ |
3362 | if (!m_ctlInfo.flash.m_flashDecisionResult) { |
3363 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
3364 | m_ctlInfo.flash.m_afFlashDoneFlg = false; | |
3365 | m_ctlInfo.flash.m_flashCnt = 0; | |
cdd53a9f YJ |
3366 | } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) || |
3367 | (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) { | |
40acdcc8 | 3368 | ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__); |
6f19b6cf | 3369 | shot_ext->request_scc = 0; |
9a710a45 YJ |
3370 | m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt; |
3371 | m_ctlInfo.flash.m_flashEnableFlg = true; | |
e117f756 YJ |
3372 | m_ctlInfo.flash.m_afFlashDoneFlg = false; |
3373 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE; | |
73f5ad60 | 3374 | } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) { |
40acdcc8 YJ |
3375 | ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
3376 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; | |
3377 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
40acdcc8 | 3378 | m_ctlInfo.flash.m_afFlashDoneFlg= false; |
cdd53a9f | 3379 | m_ctlInfo.flash.m_flashCnt = 0; |
caea49e6 | 3380 | } |
4a9565ae YJ |
3381 | } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) { |
3382 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
caea49e6 YJ |
3383 | } |
3384 | ||
34d2b94a SK |
3385 | if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) { |
3386 | if (m_ctlInfo.flash.m_flashTorchMode == false) { | |
3387 | m_ctlInfo.flash.m_flashTorchMode = true; | |
3388 | } | |
caea49e6 | 3389 | } else { |
34d2b94a | 3390 | if (m_ctlInfo.flash.m_flashTorchMode == true) { |
caea49e6 YJ |
3391 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF; |
3392 | shot_ext->shot.ctl.flash.firingPower = 0; | |
3393 | m_ctlInfo.flash.m_flashTorchMode = false; | |
3394 | } else { | |
3395 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP; | |
6f19b6cf YJ |
3396 | } |
3397 | } | |
3398 | ||
5506cebf | 3399 | if (shot_ext->isReprocessing) { |
69d1e6e9 | 3400 | ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__); |
5506cebf SK |
3401 | m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0]; |
3402 | shot_ext->request_scp = 0; | |
3403 | shot_ext->request_scc = 0; | |
3404 | m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount; | |
69d1e6e9 SK |
3405 | m_ctlInfo.flash.m_flashDecisionResult = false; |
3406 | memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt)), | |
3407 | sizeof(struct camera2_shot_ext)); | |
5506cebf | 3408 | m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START); |
cdd53a9f YJ |
3409 | m_ctlInfo.flash.m_flashEnableFlg = false; |
3410 | } | |
3411 | ||
3412 | if (m_ctlInfo.flash.m_flashEnableFlg) { | |
3413 | m_preCaptureListenerSensor(shot_ext); | |
3414 | m_preCaptureSetter(shot_ext); | |
5506cebf | 3415 | } |
fdbaf5d2 | 3416 | |
8e2c2fdb SK |
3417 | ALOGV("(%s): queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__, |
3418 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
3419 | (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode), | |
3420 | (int)(shot_ext->shot.ctl.aa.afTrigger)); | |
7d0efb59 | 3421 | |
5c88d1f2 C |
3422 | if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) { |
3423 | shot_ext->dis_bypass = 1; | |
9c046e3a | 3424 | shot_ext->dnr_bypass = 1; |
5c88d1f2 C |
3425 | shot_ext->request_scp = 0; |
3426 | shot_ext->request_scc = 0; | |
3427 | m_vdisBubbleCnt--; | |
3428 | matchedFrameCnt = -1; | |
3429 | } else { | |
3430 | m_vdisDupFrame = matchedFrameCnt; | |
3431 | } | |
f9a06609 SK |
3432 | if (m_scpForceSuspended) |
3433 | shot_ext->request_scc = 0; | |
5c88d1f2 | 3434 | |
5506cebf | 3435 | uint32_t current_scp = shot_ext->request_scp; |
a85ec381 | 3436 | uint32_t current_scc = shot_ext->request_scc; |
7d0efb59 | 3437 | |
c0b6e17e | 3438 | if (shot_ext->shot.dm.request.frameCount == 0) { |
4aa4d739 | 3439 | CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount); |
c0b6e17e C |
3440 | } |
3441 | ||
ad37861e | 3442 | cam_int_qbuf(&(m_camera_info.isp), index); |
13d8c7b4 | 3443 | |
ad37861e SK |
3444 | ALOGV("### isp DQBUF start"); |
3445 | index_isp = cam_int_dqbuf(&(m_camera_info.isp)); | |
ef6f83ca | 3446 | |
ad37861e | 3447 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]); |
13d8c7b4 | 3448 | |
e117f756 YJ |
3449 | if (m_ctlInfo.flash.m_flashEnableFlg) |
3450 | m_preCaptureListenerISP(shot_ext); | |
9a710a45 | 3451 | |
7ef20f42 | 3452 | ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)", |
ad37861e SK |
3453 | index, |
3454 | shot_ext->shot.ctl.request.frameCount, | |
3455 | shot_ext->request_scp, | |
3456 | shot_ext->request_scc, | |
7ef20f42 HC |
3457 | shot_ext->dis_bypass, |
3458 | shot_ext->dnr_bypass, sizeof(camera2_shot)); | |
fdbaf5d2 | 3459 | |
ef6f83ca | 3460 | ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__, |
8e2c2fdb SK |
3461 | (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode), |
3462 | (int)(shot_ext->shot.dm.aa.awbMode), | |
3463 | (int)(shot_ext->shot.dm.aa.afMode)); | |
13d8c7b4 | 3464 | |
2f4d175d | 3465 | #ifndef ENABLE_FRAME_SYNC |
5506cebf | 3466 | m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0]; |
2f4d175d | 3467 | #endif |
2adfa429 | 3468 | |
fd2d78a2 SK |
3469 | if (!shot_ext->fd_bypass) { |
3470 | /* FD orientation axis transformation */ | |
3471 | for (int i=0; i < CAMERA2_MAX_FACES; i++) { | |
3472 | if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0) | |
3473 | shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW | |
ef6f83ca | 3474 | * shot_ext->shot.dm.stats.faceRectangles[i][0]) |
5506cebf | 3475 | / m_streamThreads[0].get()->m_parameters.width; |
fd2d78a2 SK |
3476 | if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0) |
3477 | shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH | |
ef6f83ca | 3478 | * shot_ext->shot.dm.stats.faceRectangles[i][1]) |
5506cebf | 3479 | / m_streamThreads[0].get()->m_parameters.height; |
fd2d78a2 SK |
3480 | if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0) |
3481 | shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW | |
ef6f83ca | 3482 | * shot_ext->shot.dm.stats.faceRectangles[i][2]) |
5506cebf | 3483 | / m_streamThreads[0].get()->m_parameters.width; |
fd2d78a2 SK |
3484 | if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0) |
3485 | shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH | |
ef6f83ca | 3486 | * shot_ext->shot.dm.stats.faceRectangles[i][3]) |
5506cebf | 3487 | / m_streamThreads[0].get()->m_parameters.height; |
fd2d78a2 SK |
3488 | } |
3489 | } | |
cdd53a9f YJ |
3490 | // aeState control |
3491 | if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT) | |
3492 | m_preCaptureAeState(shot_ext); | |
47d3a1ea | 3493 | |
275c9744 | 3494 | // At scene mode face priority |
40acdcc8 YJ |
3495 | if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE) |
3496 | shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE; | |
275c9744 | 3497 | |
48728d49 | 3498 | if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) { |
a15b4e3f SK |
3499 | m_requestManager->ApplyDynamicMetadata(shot_ext); |
3500 | } | |
69d1e6e9 SK |
3501 | |
3502 | if (current_scc != shot_ext->request_scc) { | |
3503 | ALOGD("(%s): scc frame drop1 request_scc(%d to %d)", | |
3504 | __FUNCTION__, current_scc, shot_ext->request_scc); | |
3505 | m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount); | |
3506 | } | |
3507 | if (shot_ext->request_scc) { | |
3508 | ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)"); | |
3509 | if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) { | |
3510 | if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE) | |
3511 | memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(shot_ext->shot.ctl.request.frameCount)), | |
3512 | sizeof(struct camera2_shot_ext)); | |
3513 | else | |
3514 | memcpy(&m_jpegMetadata, (void*)shot_ext, sizeof(struct camera2_shot_ext)); | |
3515 | } | |
3516 | m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING); | |
3517 | } | |
3518 | if (current_scp != shot_ext->request_scp) { | |
3519 | ALOGD("(%s): scp frame drop1 request_scp(%d to %d)", | |
3520 | __FUNCTION__, current_scp, shot_ext->request_scp); | |
3521 | m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount); | |
3522 | } | |
3523 | if (shot_ext->request_scp) { | |
3524 | ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)"); | |
3525 | m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING); | |
3526 | } | |
3527 | ||
3528 | ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__, | |
3529 | shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp); | |
3530 | if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) { | |
3531 | ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__); | |
3532 | m_scp_closed = true; | |
3533 | } | |
3534 | else | |
3535 | m_scp_closed = false; | |
3536 | ||
0f26b20f | 3537 | OnAfNotification(shot_ext->shot.dm.aa.afState); |
10e122bd SK |
3538 | OnPrecaptureMeteringNotificationISP(); |
3539 | } else { | |
8b5b8078 | 3540 | memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl)); |
10e122bd SK |
3541 | shot_ext->shot.ctl.request.frameCount = 0xfffffffe; |
3542 | shot_ext->request_sensor = 1; | |
3543 | shot_ext->dis_bypass = 1; | |
3544 | shot_ext->dnr_bypass = 1; | |
3545 | shot_ext->fd_bypass = 1; | |
3546 | shot_ext->drc_bypass = 1; | |
3547 | shot_ext->request_scc = 0; | |
3548 | shot_ext->request_scp = 0; | |
3549 | if (m_wideAspect) { | |
3550 | shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO; | |
10e122bd SK |
3551 | } else { |
3552 | shot_ext->setfile = ISS_SUB_SCENARIO_STILL; | |
3553 | } | |
572470e2 SK |
3554 | shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode; |
3555 | if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) { | |
3556 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8; | |
3557 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
3558 | } | |
10e122bd | 3559 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
cdd53a9f | 3560 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF; |
10e122bd | 3561 | ALOGV("### isp QBUF start (bubble)"); |
8b5b8078 HC |
3562 | ALOGV("bubble: queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", |
3563 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
3564 | (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode), | |
3565 | (int)(shot_ext->shot.ctl.aa.afTrigger)); | |
3566 | ||
10e122bd SK |
3567 | cam_int_qbuf(&(m_camera_info.isp), index); |
3568 | ALOGV("### isp DQBUF start (bubble)"); | |
3569 | index_isp = cam_int_dqbuf(&(m_camera_info.isp)); | |
3570 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]); | |
8b5b8078 HC |
3571 | ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)", |
3572 | (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode), | |
3573 | (int)(shot_ext->shot.dm.aa.awbMode), | |
3574 | (int)(shot_ext->shot.dm.aa.afMode)); | |
3575 | ||
10e122bd | 3576 | OnAfNotification(shot_ext->shot.dm.aa.afState); |
ad37861e | 3577 | } |
13d8c7b4 | 3578 | |
52f54308 SK |
3579 | index = m_requestManager->popSensorQ(); |
3580 | if(index < 0){ | |
3581 | ALOGE("sensorQ is empty"); | |
3582 | return; | |
3583 | } | |
3584 | ||
ca714238 | 3585 | processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index])); |
ad37861e SK |
3586 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]); |
3587 | if (m_scp_closing || m_scp_closed) { | |
3588 | ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed); | |
3589 | shot_ext->request_scc = 0; | |
3590 | shot_ext->request_scp = 0; | |
3591 | shot_ext->request_sensor = 0; | |
3592 | } | |
ad37861e | 3593 | cam_int_qbuf(&(m_camera_info.sensor), index); |
52f54308 | 3594 | ALOGV("Sensor Qbuf done(%d)", index); |
c15a6b00 | 3595 | |
ef6f83ca SK |
3596 | if (!m_scp_closing |
3597 | && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){ | |
da7ca692 | 3598 | ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)", |
ef6f83ca | 3599 | matchedFrameCnt, processingReqIndex); |
ad37861e SK |
3600 | selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); |
3601 | } | |
c15a6b00 | 3602 | } |
ad37861e SK |
3603 | return; |
3604 | } | |
9dd63e1f | 3605 | |
86646da4 SK |
3606 | void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self) |
3607 | { | |
3608 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3609 | StreamThread * selfThread = ((StreamThread*)self); | |
3610 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
5506cebf SK |
3611 | node_info_t *currentNode = selfStreamParms->node; |
3612 | substream_parameters_t *subParms; | |
86646da4 SK |
3613 | buffer_handle_t * buf = NULL; |
3614 | status_t res; | |
3615 | void *virtAddr[3]; | |
3616 | int i, j; | |
3617 | int index; | |
3618 | nsecs_t timestamp; | |
3619 | ||
3620 | if (!(selfThread->m_isBufferInit)) | |
3621 | { | |
3622 | for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) { | |
3623 | res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf); | |
3624 | if (res != NO_ERROR || buf == NULL) { | |
3625 | ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res); | |
3626 | return; | |
3627 | } | |
3628 | ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
3629 | ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
3630 | ||
5506cebf | 3631 | index = selfThread->findBufferIndex(buf); |
86646da4 SK |
3632 | if (index == -1) { |
3633 | ALOGE("ERR(%s): could not find buffer index", __FUNCTION__); | |
3634 | } | |
3635 | else { | |
3636 | ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)", | |
3637 | __FUNCTION__, index, selfStreamParms->svcBufStatus[index]); | |
3638 | if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC) | |
3639 | selfStreamParms->svcBufStatus[index] = ON_DRIVER; | |
3640 | else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE) | |
3641 | selfStreamParms->svcBufStatus[index] = ON_HAL; | |
3642 | else { | |
3643 | ALOGV("DBG(%s): buffer status abnormal (%d) " | |
3644 | , __FUNCTION__, selfStreamParms->svcBufStatus[index]); | |
3645 | } | |
3646 | selfStreamParms->numSvcBufsInHal++; | |
86646da4 | 3647 | } |
5506cebf | 3648 | selfStreamParms->bufIndex = 0; |
86646da4 SK |
3649 | } |
3650 | selfThread->m_isBufferInit = true; | |
3651 | } | |
5506cebf SK |
3652 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3653 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3654 | continue; | |
86646da4 | 3655 | |
5506cebf SK |
3656 | subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId]; |
3657 | if (subParms->type && subParms->needBufferInit) { | |
3658 | ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)", | |
3659 | __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers); | |
74d78ebe SK |
3660 | int checkingIndex = 0; |
3661 | bool found = false; | |
5506cebf SK |
3662 | for ( i = 0 ; i < subParms->numSvcBuffers; i++) { |
3663 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
74d78ebe SK |
3664 | if (res != NO_ERROR || buf == NULL) { |
3665 | ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res); | |
3666 | return; | |
3667 | } | |
5506cebf SK |
3668 | subParms->numSvcBufsInHal++; |
3669 | ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
3670 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
74d78ebe SK |
3671 | |
3672 | if (m_grallocHal->lock(m_grallocHal, *buf, | |
5506cebf SK |
3673 | subParms->usage, 0, 0, |
3674 | subParms->width, subParms->height, virtAddr) != 0) { | |
74d78ebe SK |
3675 | ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__); |
3676 | } | |
3677 | else { | |
5506cebf | 3678 | ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)", |
74d78ebe SK |
3679 | __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]); |
3680 | } | |
3681 | found = false; | |
5506cebf SK |
3682 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { |
3683 | if (subParms->svcBufHandle[checkingIndex] == *buf ) { | |
74d78ebe SK |
3684 | found = true; |
3685 | break; | |
3686 | } | |
3687 | } | |
5506cebf | 3688 | ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex); |
74d78ebe SK |
3689 | if (!found) break; |
3690 | ||
3691 | index = checkingIndex; | |
3692 | ||
3693 | if (index == -1) { | |
3694 | ALOGV("ERR(%s): could not find buffer index", __FUNCTION__); | |
3695 | } | |
3696 | else { | |
3697 | ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)", | |
5506cebf SK |
3698 | __FUNCTION__, index, subParms->svcBufStatus[index]); |
3699 | if (subParms->svcBufStatus[index]== ON_SERVICE) | |
3700 | subParms->svcBufStatus[index] = ON_HAL; | |
74d78ebe SK |
3701 | else { |
3702 | ALOGV("DBG(%s): buffer status abnormal (%d) " | |
5506cebf | 3703 | , __FUNCTION__, subParms->svcBufStatus[index]); |
74d78ebe | 3704 | } |
5506cebf | 3705 | if (*buf != subParms->svcBufHandle[index]) |
74d78ebe SK |
3706 | ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__); |
3707 | else | |
3708 | ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__); | |
3709 | } | |
5506cebf | 3710 | subParms->svcBufIndex = 0; |
74d78ebe | 3711 | } |
5506cebf SK |
3712 | if (subParms->type == SUBSTREAM_TYPE_JPEG) { |
3713 | m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2; | |
3714 | m_resizeBuf.size.extS[1] = 0; | |
3715 | m_resizeBuf.size.extS[2] = 0; | |
3716 | ||
3717 | if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) { | |
3718 | ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__); | |
3719 | } | |
3720 | } | |
3721 | if (subParms->type == SUBSTREAM_TYPE_PRVCB) { | |
3722 | m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width, | |
3723 | subParms->height, &m_previewCbBuf); | |
86646da4 | 3724 | |
5506cebf SK |
3725 | if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) { |
3726 | ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__); | |
3727 | } | |
3728 | } | |
3729 | subParms->needBufferInit= false; | |
3730 | } | |
3731 | } | |
86646da4 SK |
3732 | } |
3733 | ||
c15a6b00 JS |
3734 | void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self) |
3735 | { | |
13d8c7b4 SK |
3736 | StreamThread * selfThread = ((StreamThread*)self); |
3737 | ALOGV("DEBUG(%s): ", __FUNCTION__ ); | |
3738 | memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t)); | |
3739 | selfThread->m_isBufferInit = false; | |
5506cebf SK |
3740 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3741 | selfThread->m_attachedSubStreams[i].streamId = -1; | |
3742 | selfThread->m_attachedSubStreams[i].priority = 0; | |
3743 | } | |
c15a6b00 JS |
3744 | return; |
3745 | } | |
3746 | ||
5506cebf SK |
3747 | int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf, |
3748 | int stream_id, nsecs_t frameTimeStamp) | |
c15a6b00 | 3749 | { |
5506cebf SK |
3750 | substream_parameters_t *subParms = &m_subStreams[stream_id]; |
3751 | ||
3752 | switch (stream_id) { | |
c15a6b00 | 3753 | |
5506cebf SK |
3754 | case STREAM_ID_JPEG: |
3755 | return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp); | |
c15a6b00 | 3756 | |
5506cebf SK |
3757 | case STREAM_ID_RECORD: |
3758 | return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp); | |
c15a6b00 | 3759 | |
5506cebf SK |
3760 | case STREAM_ID_PRVCB: |
3761 | return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp); | |
3762 | ||
3763 | default: | |
3764 | return 0; | |
c15a6b00 | 3765 | } |
5506cebf SK |
3766 | } |
3767 | void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self) | |
3768 | { | |
3769 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3770 | StreamThread * selfThread = ((StreamThread*)self); | |
3771 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
3772 | node_info_t *currentNode = selfStreamParms->node; | |
3773 | int i = 0; | |
3774 | nsecs_t frameTimeStamp; | |
13d8c7b4 | 3775 | |
b55ed664 | 3776 | if (currentSignal & SIGNAL_THREAD_RELEASE) { |
5506cebf | 3777 | CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); |
b55ed664 SK |
3778 | |
3779 | if (selfThread->m_isBufferInit) { | |
a8be0011 SK |
3780 | if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) { |
3781 | ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__, | |
5506cebf | 3782 | selfThread->m_index, currentNode->fd); |
a8be0011 SK |
3783 | if (cam_int_streamoff(currentNode) < 0 ) { |
3784 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
3785 | } | |
3786 | ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__, | |
3787 | selfThread->m_index, currentNode->fd); | |
3788 | currentNode->buffers = 0; | |
3789 | cam_int_reqbufs(currentNode); | |
3790 | ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__, | |
3791 | selfThread->m_index, currentNode->fd); | |
3792 | } | |
b55ed664 SK |
3793 | } |
3794 | #ifdef ENABLE_FRAME_SYNC | |
3795 | // free metabuffers | |
5506cebf SK |
3796 | for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++) |
3797 | if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) { | |
b55ed664 SK |
3798 | freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1); |
3799 | selfStreamParms->metaBuffers[i].fd.extFd[0] = 0; | |
3800 | selfStreamParms->metaBuffers[i].size.extS[0] = 0; | |
3801 | } | |
3802 | #endif | |
3803 | selfThread->m_isBufferInit = false; | |
b55ed664 | 3804 | selfThread->m_releasing = false; |
5506cebf SK |
3805 | selfThread->m_activated = false; |
3806 | ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
3807 | return; | |
3808 | } | |
3809 | if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) { | |
3810 | status_t res; | |
3811 | buffer_handle_t * buf = NULL; | |
3812 | bool found = false; | |
3813 | ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START", | |
3814 | __FUNCTION__, selfThread->m_index); | |
3815 | res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf); | |
3816 | if (res != NO_ERROR || buf == NULL) { | |
3817 | ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res); | |
3818 | return; | |
3819 | } | |
3820 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
3821 | int checkingIndex = 0; | |
3822 | for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) { | |
3823 | if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
3824 | found = true; | |
3825 | break; | |
3826 | } | |
3827 | } | |
3828 | ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ", | |
3829 | __FUNCTION__, (unsigned int)buf, found, checkingIndex); | |
b55ed664 | 3830 | |
5506cebf SK |
3831 | if (!found) return; |
3832 | ||
3833 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { | |
3834 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3835 | continue; | |
3836 | ||
3837 | #ifdef ENABLE_FRAME_SYNC | |
a8be0011 | 3838 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt); |
5506cebf SK |
3839 | m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt); |
3840 | #else | |
3841 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()); | |
3842 | #endif | |
3843 | if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) | |
3844 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]), | |
3845 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
3846 | } | |
3847 | ||
3848 | res = m_reprocessOps->release_buffer(m_reprocessOps, buf); | |
3849 | if (res != NO_ERROR) { | |
3850 | ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res); | |
3851 | return; | |
3852 | } | |
3853 | ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_REPROCESSING_START", | |
3854 | __FUNCTION__,selfThread->m_index); | |
b55ed664 SK |
3855 | |
3856 | return; | |
3857 | } | |
13d8c7b4 | 3858 | if (currentSignal & SIGNAL_STREAM_DATA_COMING) { |
c15a6b00 | 3859 | buffer_handle_t * buf = NULL; |
5506cebf | 3860 | status_t res = 0; |
c15a6b00 JS |
3861 | int i, j; |
3862 | int index; | |
ad37861e | 3863 | nsecs_t timestamp; |
5506cebf | 3864 | #ifdef ENABLE_FRAME_SYNC |
feb7df4c | 3865 | camera2_stream *frame; |
2f4d175d | 3866 | uint8_t currentOutputStreams; |
a85ec381 | 3867 | bool directOutputEnabled = false; |
5506cebf | 3868 | #endif |
c0b6e17e | 3869 | int numOfUndqbuf = 0; |
c0b6e17e | 3870 | |
5506cebf | 3871 | ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index); |
ad37861e | 3872 | |
86646da4 | 3873 | m_streamBufferInit(self); |
c15a6b00 | 3874 | |
b5237e6b | 3875 | do { |
5506cebf SK |
3876 | ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__, |
3877 | selfThread->m_index, selfThread->streamType); | |
b5237e6b | 3878 | |
feb7df4c | 3879 | #ifdef ENABLE_FRAME_SYNC |
5506cebf SK |
3880 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes); |
3881 | frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]); | |
5506cebf | 3882 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount); |
2f4d175d SK |
3883 | currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount); |
3884 | ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams); | |
a85ec381 SK |
3885 | if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)|| |
3886 | ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) { | |
3887 | directOutputEnabled = true; | |
3888 | } | |
3889 | if (!directOutputEnabled) { | |
3890 | if (!m_nightCaptureFrameCnt) | |
3891 | m_requestManager->NotifyStreamOutput(frame->rcount); | |
3892 | } | |
feb7df4c | 3893 | #else |
5506cebf SK |
3894 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode); |
3895 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()) | |
feb7df4c | 3896 | #endif |
5506cebf SK |
3897 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d) sigcnt(%d)",__FUNCTION__, |
3898 | selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt); | |
804236a7 | 3899 | |
5506cebf | 3900 | if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] != ON_DRIVER) |
86646da4 | 3901 | ALOGV("DBG(%s): DQed buffer status abnormal (%d) ", |
5506cebf SK |
3902 | __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]); |
3903 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL; | |
b5237e6b | 3904 | |
5506cebf SK |
3905 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3906 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3907 | continue; | |
5506cebf | 3908 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 3909 | if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { |
5506cebf SK |
3910 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]), |
3911 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
b5237e6b | 3912 | } |
2f4d175d SK |
3913 | #else |
3914 | if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { | |
3915 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]), | |
3916 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
3917 | } | |
3918 | #endif | |
86646da4 | 3919 | } |
c0b6e17e | 3920 | |
5506cebf | 3921 | if (m_requestManager->GetSkipCnt() <= 0) { |
5506cebf | 3922 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 3923 | if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) { |
5506cebf | 3924 | ALOGV("** Display Preview(frameCnt:%d)", frame->rcount); |
2f4d175d SK |
3925 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3926 | frameTimeStamp, | |
3927 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3928 | } | |
3929 | else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) { | |
c48f0170 | 3930 | ALOGV("** SCC output (frameCnt:%d)", frame->rcount); |
2f4d175d SK |
3931 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3932 | frameTimeStamp, | |
3933 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3934 | } | |
a85ec381 SK |
3935 | else { |
3936 | res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps, | |
3937 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3938 | ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); | |
3939 | } | |
5506cebf | 3940 | #else |
2f4d175d | 3941 | if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) { |
5506cebf | 3942 | ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex()); |
5506cebf SK |
3943 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3944 | frameTimeStamp, | |
3945 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
74d78ebe | 3946 | } |
5506cebf | 3947 | else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) { |
5506cebf | 3948 | ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex()); |
5506cebf SK |
3949 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3950 | frameTimeStamp, | |
3951 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
74d78ebe | 3952 | } |
2f4d175d | 3953 | #endif |
5506cebf | 3954 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); |
86646da4 SK |
3955 | } |
3956 | else { | |
3957 | res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps, | |
5506cebf SK |
3958 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); |
3959 | ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); | |
86646da4 | 3960 | } |
2f4d175d | 3961 | #ifdef ENABLE_FRAME_SYNC |
a85ec381 SK |
3962 | if (directOutputEnabled) { |
3963 | if (!m_nightCaptureFrameCnt) | |
3964 | m_requestManager->NotifyStreamOutput(frame->rcount); | |
3965 | } | |
2f4d175d | 3966 | #endif |
86646da4 | 3967 | if (res == 0) { |
5506cebf | 3968 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE; |
86646da4 SK |
3969 | selfStreamParms->numSvcBufsInHal--; |
3970 | } | |
3971 | else { | |
5506cebf | 3972 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL; |
b5237e6b | 3973 | } |
86646da4 | 3974 | |
c15a6b00 | 3975 | } |
ce8e830a | 3976 | while(0); |
b5237e6b | 3977 | |
a85ec381 SK |
3978 | while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS)) |
3979 | < selfStreamParms->minUndequedBuffer) { | |
86646da4 SK |
3980 | res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf); |
3981 | if (res != NO_ERROR || buf == NULL) { | |
a85ec381 | 3982 | ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index, res, selfStreamParms->numSvcBufsInHal); |
86646da4 SK |
3983 | break; |
3984 | } | |
3985 | selfStreamParms->numSvcBufsInHal++; | |
5506cebf | 3986 | ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, |
86646da4 SK |
3987 | selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal, |
3988 | ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
3989 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
3990 | ||
3991 | bool found = false; | |
3992 | int checkingIndex = 0; | |
3993 | for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) { | |
3994 | if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
3995 | found = true; | |
9dd63e1f SK |
3996 | break; |
3997 | } | |
86646da4 | 3998 | } |
86646da4 | 3999 | if (!found) break; |
5506cebf SK |
4000 | selfStreamParms->bufIndex = checkingIndex; |
4001 | if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) { | |
86646da4 | 4002 | uint32_t plane_index = 0; |
5506cebf | 4003 | ExynosBuffer* currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]); |
86646da4 SK |
4004 | struct v4l2_buffer v4l2_buf; |
4005 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
13d8c7b4 | 4006 | |
86646da4 SK |
4007 | v4l2_buf.m.planes = planes; |
4008 | v4l2_buf.type = currentNode->type; | |
4009 | v4l2_buf.memory = currentNode->memory; | |
5506cebf | 4010 | v4l2_buf.index = selfStreamParms->bufIndex; |
86646da4 SK |
4011 | v4l2_buf.length = currentNode->planes; |
4012 | ||
5506cebf SK |
4013 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; |
4014 | v4l2_buf.m.planes[2].m.fd = priv_handle->fd1; | |
4015 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd2; | |
4016 | for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) { | |
4017 | v4l2_buf.m.planes[plane_index].length = currentBuf->size.extS[plane_index]; | |
4018 | } | |
4019 | #ifdef ENABLE_FRAME_SYNC | |
4020 | /* add plane for metadata*/ | |
4021 | v4l2_buf.length += selfStreamParms->metaPlanes; | |
4022 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0]; | |
4023 | v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0]; | |
4024 | #endif | |
4025 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { | |
4026 | ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail", | |
4027 | __FUNCTION__, selfThread->m_index); | |
4028 | return; | |
4029 | } | |
4030 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER; | |
4031 | ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)", | |
4032 | __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex); | |
4033 | } | |
4034 | } | |
4035 | ||
4036 | ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index); | |
4037 | } | |
4038 | return; | |
4039 | } | |
4040 | ||
4041 | void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self) | |
4042 | { | |
4043 | uint32_t currentSignal = self->GetProcessingSignal(); | |
4044 | StreamThread * selfThread = ((StreamThread*)self); | |
4045 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4046 | node_info_t *currentNode = selfStreamParms->node; | |
4047 | ||
4048 | ||
4049 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
4050 | CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
4051 | ||
4052 | if (selfThread->m_isBufferInit) { | |
4053 | if (currentNode->fd == m_camera_info.capture.fd) { | |
4054 | if (m_camera_info.capture.status == true) { | |
4055 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__, | |
4056 | selfThread->m_index, currentNode->fd); | |
4057 | if (cam_int_streamoff(currentNode) < 0 ){ | |
4058 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
4059 | } else { | |
4060 | m_camera_info.capture.status = false; | |
4061 | } | |
4062 | } | |
4063 | } else { | |
4064 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__, | |
4065 | selfThread->m_index, currentNode->fd); | |
4066 | if (cam_int_streamoff(currentNode) < 0 ){ | |
4067 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
4068 | } | |
4069 | } | |
4070 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index); | |
4071 | ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__, | |
4072 | selfThread->m_index, currentNode->fd); | |
4073 | currentNode->buffers = 0; | |
4074 | cam_int_reqbufs(currentNode); | |
4075 | ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__, | |
4076 | selfThread->m_index, currentNode->fd); | |
4077 | } | |
4078 | ||
4079 | selfThread->m_isBufferInit = false; | |
4080 | selfThread->m_releasing = false; | |
4081 | selfThread->m_activated = false; | |
4082 | ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
4083 | return; | |
4084 | } | |
4085 | ||
4086 | if (currentSignal & SIGNAL_STREAM_DATA_COMING) { | |
4087 | #ifdef ENABLE_FRAME_SYNC | |
4088 | camera2_stream *frame; | |
2f4d175d | 4089 | uint8_t currentOutputStreams; |
5506cebf SK |
4090 | #endif |
4091 | nsecs_t frameTimeStamp; | |
4092 | ||
4093 | ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING", | |
4094 | __FUNCTION__,selfThread->m_index); | |
4095 | ||
4096 | m_streamBufferInit(self); | |
4097 | ||
ed4ad5fe | 4098 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index); |
5506cebf | 4099 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode); |
ed4ad5fe | 4100 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__, |
5506cebf SK |
4101 | selfThread->m_index, selfStreamParms->bufIndex); |
4102 | ||
4103 | #ifdef ENABLE_FRAME_SYNC | |
4104 | frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]); | |
5506cebf | 4105 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount); |
2f4d175d SK |
4106 | currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount); |
4107 | ALOGV("frame count(SCC) : %d outputStream(%x)", frame->rcount, currentOutputStreams); | |
5506cebf SK |
4108 | #else |
4109 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()); | |
4110 | #endif | |
4111 | ||
4112 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { | |
4113 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
4114 | continue; | |
feb7df4c | 4115 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 4116 | if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { |
5506cebf | 4117 | m_requestManager->NotifyStreamOutput(frame->rcount); |
5506cebf SK |
4118 | m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]), |
4119 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
be494d19 | 4120 | } |
2f4d175d SK |
4121 | #else |
4122 | if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { | |
4123 | m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]), | |
4124 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
4125 | } | |
4126 | #endif | |
be494d19 | 4127 | } |
5506cebf SK |
4128 | cam_int_qbuf(currentNode, selfStreamParms->bufIndex); |
4129 | ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index); | |
86646da4 | 4130 | |
5506cebf SK |
4131 | |
4132 | ||
4133 | ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE", | |
4134 | __FUNCTION__, selfThread->m_index); | |
86646da4 | 4135 | } |
5506cebf SK |
4136 | |
4137 | ||
86646da4 SK |
4138 | return; |
4139 | } | |
4140 | ||
5506cebf | 4141 | void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self) |
86646da4 SK |
4142 | { |
4143 | uint32_t currentSignal = self->GetProcessingSignal(); | |
4144 | StreamThread * selfThread = ((StreamThread*)self); | |
4145 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
5506cebf SK |
4146 | node_info_t *currentNode = selfStreamParms->node; |
4147 | ||
4148 | ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal); | |
86646da4 | 4149 | |
5506cebf SK |
4150 | // Do something in Child thread handler |
4151 | // Should change function to class that inherited StreamThread class to support dynamic stream allocation | |
4152 | if (selfThread->streamType == STREAM_TYPE_DIRECT) { | |
4153 | m_streamFunc_direct(self); | |
4154 | } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) { | |
4155 | m_streamFunc_indirect(self); | |
4156 | } | |
86646da4 | 4157 | |
5506cebf SK |
4158 | return; |
4159 | } | |
4160 | int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) | |
4161 | { | |
4162 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4163 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_JPEG]; | |
4164 | status_t res; | |
4165 | ExynosRect jpegRect; | |
4166 | bool found = false; | |
de48e362 | 4167 | int srcW, srcH, srcCropX, srcCropY; |
5506cebf SK |
4168 | int pictureW, pictureH, pictureFramesize = 0; |
4169 | int pictureFormat; | |
4170 | int cropX, cropY, cropW, cropH = 0; | |
4171 | ExynosBuffer resizeBufInfo; | |
4172 | ExynosRect m_jpegPictureRect; | |
4173 | buffer_handle_t * buf = NULL; | |
c06b3290 SK |
4174 | camera2_jpeg_blob * jpegBlob = NULL; |
4175 | int jpegBufSize = 0; | |
86646da4 | 4176 | |
5506cebf SK |
4177 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4178 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4179 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4180 | found = true; | |
4181 | break; | |
86646da4 | 4182 | } |
5506cebf SK |
4183 | subParms->svcBufIndex++; |
4184 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4185 | subParms->svcBufIndex = 0; | |
4186 | } | |
4187 | if (!found) { | |
4188 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4189 | subParms->svcBufIndex++; | |
4190 | return 1; | |
86646da4 SK |
4191 | } |
4192 | ||
1264ab16 AR |
4193 | { |
4194 | Mutex::Autolock lock(m_jpegEncoderLock); | |
4195 | m_jpegEncodingCount++; | |
4196 | } | |
32cf9401 | 4197 | |
de48e362 SK |
4198 | m_getRatioSize(selfStreamParms->width, selfStreamParms->height, |
4199 | m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height, | |
4200 | &srcCropX, &srcCropY, | |
4201 | &srcW, &srcH, | |
4202 | 0); | |
4203 | ||
5506cebf SK |
4204 | m_jpegPictureRect.w = subParms->width; |
4205 | m_jpegPictureRect.h = subParms->height; | |
7d0efb59 | 4206 | |
5506cebf SK |
4207 | ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d", |
4208 | __FUNCTION__, selfStreamParms->width, selfStreamParms->height, | |
4209 | m_jpegPictureRect.w, m_jpegPictureRect.h); | |
7d0efb59 | 4210 | |
de48e362 | 4211 | m_getRatioSize(srcW, srcH, |
5506cebf SK |
4212 | m_jpegPictureRect.w, m_jpegPictureRect.h, |
4213 | &cropX, &cropY, | |
4214 | &pictureW, &pictureH, | |
4215 | 0); | |
4216 | pictureFormat = V4L2_PIX_FMT_YUYV; | |
4217 | pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH); | |
4218 | ||
4219 | if (m_exynosPictureCSC) { | |
4220 | float zoom_w = 0, zoom_h = 0; | |
4221 | if (m_zoomRatio == 0) | |
4222 | m_zoomRatio = 1; | |
4223 | ||
4224 | if (m_jpegPictureRect.w >= m_jpegPictureRect.h) { | |
4225 | zoom_w = pictureW / m_zoomRatio; | |
4226 | zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w; | |
4227 | } else { | |
4228 | zoom_h = pictureH / m_zoomRatio; | |
4229 | zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h; | |
7d0efb59 | 4230 | } |
de48e362 SK |
4231 | cropX = (srcW - zoom_w) / 2; |
4232 | cropY = (srcH - zoom_h) / 2; | |
5506cebf SK |
4233 | cropW = zoom_w; |
4234 | cropH = zoom_h; | |
4235 | ||
4236 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", | |
4237 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
4238 | ||
4239 | csc_set_src_format(m_exynosPictureCSC, | |
de48e362 | 4240 | ALIGN(srcW, 16), ALIGN(srcH, 16), |
5506cebf SK |
4241 | cropX, cropY, cropW, cropH, |
4242 | V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), | |
4243 | 0); | |
7d0efb59 | 4244 | |
5506cebf SK |
4245 | csc_set_dst_format(m_exynosPictureCSC, |
4246 | m_jpegPictureRect.w, m_jpegPictureRect.h, | |
4247 | 0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h, | |
4248 | V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16), | |
4249 | 0); | |
4250 | for (int i = 0 ; i < 3 ; i++) | |
4251 | ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ", | |
4252 | __FUNCTION__, i, srcImageBuf->fd.extFd[i]); | |
4253 | csc_set_src_buffer(m_exynosPictureCSC, | |
4254 | (void **)&srcImageBuf->fd.fd); | |
7d0efb59 | 4255 | |
5506cebf SK |
4256 | csc_set_dst_buffer(m_exynosPictureCSC, |
4257 | (void **)&m_resizeBuf.fd.fd); | |
4258 | for (int i = 0 ; i < 3 ; i++) | |
4259 | ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d", | |
4260 | __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]); | |
7d0efb59 | 4261 | |
5506cebf SK |
4262 | if (csc_convert(m_exynosPictureCSC) != 0) |
4263 | ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__); | |
4264 | ||
4265 | } | |
4266 | else { | |
4267 | ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__); | |
7d0efb59 C |
4268 | } |
4269 | ||
5506cebf | 4270 | resizeBufInfo = m_resizeBuf; |
86646da4 | 4271 | |
5506cebf | 4272 | m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf); |
86646da4 | 4273 | |
5506cebf SK |
4274 | for (int i = 1; i < 3; i++) { |
4275 | if (m_resizeBuf.size.extS[i] != 0) | |
4276 | m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1]; | |
86646da4 | 4277 | |
5506cebf SK |
4278 | ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]); |
4279 | } | |
2adfa429 | 4280 | |
5506cebf SK |
4281 | jpegRect.w = m_jpegPictureRect.w; |
4282 | jpegRect.h = m_jpegPictureRect.h; | |
4283 | jpegRect.colorFormat = V4L2_PIX_FMT_NV16; | |
86646da4 | 4284 | |
5506cebf SK |
4285 | for (int j = 0 ; j < 3 ; j++) |
4286 | ALOGV("DEBUG(%s): dest buf node fd.extFd[%d]=%d size=%d virt=%x ", | |
4287 | __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j], | |
4288 | (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j], | |
4289 | (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]); | |
86646da4 | 4290 | |
c06b3290 SK |
4291 | jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0]; |
4292 | if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) { | |
5506cebf | 4293 | ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__); |
c06b3290 SK |
4294 | } else { |
4295 | m_resizeBuf = resizeBufInfo; | |
2adfa429 | 4296 | |
c06b3290 SK |
4297 | int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s; |
4298 | ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__, | |
4299 | m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize); | |
4300 | char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]); | |
4301 | jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]); | |
2adfa429 | 4302 | |
c06b3290 SK |
4303 | if (jpegBuffer[jpegSize-1] == 0) |
4304 | jpegSize--; | |
4305 | jpegBlob->jpeg_size = jpegSize; | |
4306 | jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID; | |
4307 | } | |
cf593314 | 4308 | subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize; |
5506cebf | 4309 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); |
be494d19 | 4310 | |
5506cebf SK |
4311 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", |
4312 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4313 | if (res == 0) { | |
4314 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4315 | subParms->numSvcBufsInHal--; | |
4316 | } | |
4317 | else { | |
4318 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4319 | } | |
86646da4 | 4320 | |
5506cebf SK |
4321 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4322 | { | |
4323 | bool found = false; | |
4324 | int checkingIndex = 0; | |
86646da4 | 4325 | |
5506cebf SK |
4326 | ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
4327 | ||
4328 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
4329 | if (res != NO_ERROR || buf == NULL) { | |
4330 | ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4331 | break; | |
4332 | } | |
4333 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4334 | subParms->numSvcBufsInHal ++; | |
4335 | ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4336 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
4337 | ||
4338 | ||
4339 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { | |
4340 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4341 | found = true; | |
4342 | break; | |
86646da4 | 4343 | } |
5506cebf SK |
4344 | } |
4345 | ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found); | |
be494d19 | 4346 | |
5506cebf SK |
4347 | if (!found) { |
4348 | break; | |
4349 | } | |
4350 | ||
4351 | subParms->svcBufIndex = checkingIndex; | |
4352 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4353 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4354 | } | |
4355 | else { | |
4356 | ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4357 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4358 | } | |
4359 | } | |
1264ab16 AR |
4360 | { |
4361 | Mutex::Autolock lock(m_jpegEncoderLock); | |
4362 | m_jpegEncodingCount--; | |
4363 | } | |
5506cebf SK |
4364 | return 0; |
4365 | } | |
86646da4 | 4366 | |
5506cebf SK |
4367 | int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) |
4368 | { | |
4369 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4370 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_RECORD]; | |
4371 | status_t res; | |
4372 | ExynosRect jpegRect; | |
4373 | bool found = false; | |
4374 | int cropX, cropY, cropW, cropH = 0; | |
4375 | buffer_handle_t * buf = NULL; | |
86646da4 | 4376 | |
5506cebf SK |
4377 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4378 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4379 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4380 | found = true; | |
4381 | break; | |
4382 | } | |
4383 | subParms->svcBufIndex++; | |
4384 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4385 | subParms->svcBufIndex = 0; | |
4386 | } | |
4387 | if (!found) { | |
4388 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4389 | subParms->svcBufIndex++; | |
4390 | return 1; | |
4391 | } | |
86646da4 | 4392 | |
5506cebf SK |
4393 | if (m_exynosVideoCSC) { |
4394 | int videoW = subParms->width, videoH = subParms->height; | |
4395 | int cropX, cropY, cropW, cropH = 0; | |
4396 | int previewW = selfStreamParms->width, previewH = selfStreamParms->height; | |
4397 | m_getRatioSize(previewW, previewH, | |
4398 | videoW, videoH, | |
4399 | &cropX, &cropY, | |
4400 | &cropW, &cropH, | |
4401 | 0); | |
86646da4 | 4402 | |
5506cebf SK |
4403 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", |
4404 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
86646da4 | 4405 | |
5506cebf | 4406 | csc_set_src_format(m_exynosVideoCSC, |
4a3f1820 | 4407 | ALIGN(previewW, 32), previewH, |
5506cebf SK |
4408 | cropX, cropY, cropW, cropH, |
4409 | selfStreamParms->format, | |
4410 | 0); | |
86646da4 | 4411 | |
5506cebf SK |
4412 | csc_set_dst_format(m_exynosVideoCSC, |
4413 | videoW, videoH, | |
4414 | 0, 0, videoW, videoH, | |
4415 | subParms->format, | |
4416 | 1); | |
86646da4 | 4417 | |
5506cebf SK |
4418 | csc_set_src_buffer(m_exynosVideoCSC, |
4419 | (void **)&srcImageBuf->fd.fd); | |
86646da4 | 4420 | |
5506cebf SK |
4421 | csc_set_dst_buffer(m_exynosVideoCSC, |
4422 | (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd))); | |
4423 | ||
4424 | if (csc_convert(m_exynosVideoCSC) != 0) { | |
4425 | ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__); | |
4426 | } | |
4427 | else { | |
4428 | ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__); | |
86646da4 | 4429 | } |
5506cebf SK |
4430 | } |
4431 | else { | |
4432 | ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__); | |
4433 | } | |
86646da4 | 4434 | |
5506cebf | 4435 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); |
86646da4 | 4436 | |
5506cebf SK |
4437 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", |
4438 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4439 | if (res == 0) { | |
4440 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4441 | subParms->numSvcBufsInHal--; | |
4442 | } | |
4443 | else { | |
4444 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4445 | } | |
86646da4 | 4446 | |
5506cebf SK |
4447 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4448 | { | |
4449 | bool found = false; | |
4450 | int checkingIndex = 0; | |
86646da4 | 4451 | |
5506cebf SK |
4452 | ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
4453 | ||
4454 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
4455 | if (res != NO_ERROR || buf == NULL) { | |
4456 | ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4457 | break; | |
4458 | } | |
4459 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4460 | subParms->numSvcBufsInHal ++; | |
4461 | ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4462 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
4463 | ||
4464 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { | |
4465 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4466 | found = true; | |
4467 | break; | |
86646da4 | 4468 | } |
13d8c7b4 | 4469 | } |
5506cebf | 4470 | ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex); |
86646da4 | 4471 | |
5506cebf SK |
4472 | if (!found) { |
4473 | break; | |
4474 | } | |
86646da4 | 4475 | |
5506cebf SK |
4476 | subParms->svcBufIndex = checkingIndex; |
4477 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4478 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4479 | } | |
4480 | else { | |
4481 | ALOGV("DEBUG(%s): record bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4482 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4483 | } | |
4484 | } | |
4485 | return 0; | |
86646da4 SK |
4486 | } |
4487 | ||
5506cebf | 4488 | int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) |
86646da4 | 4489 | { |
5506cebf SK |
4490 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); |
4491 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_PRVCB]; | |
4492 | status_t res; | |
4493 | bool found = false; | |
4494 | int cropX, cropY, cropW, cropH = 0; | |
4495 | buffer_handle_t * buf = NULL; | |
86646da4 | 4496 | |
5506cebf SK |
4497 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4498 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4499 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4500 | found = true; | |
4501 | break; | |
4502 | } | |
4503 | subParms->svcBufIndex++; | |
4504 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4505 | subParms->svcBufIndex = 0; | |
4506 | } | |
4507 | if (!found) { | |
4508 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4509 | subParms->svcBufIndex++; | |
4510 | return 1; | |
4511 | } | |
86646da4 | 4512 | |
5506cebf SK |
4513 | if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { |
4514 | if (m_exynosVideoCSC) { | |
4515 | int previewCbW = subParms->width, previewCbH = subParms->height; | |
4516 | int cropX, cropY, cropW, cropH = 0; | |
4517 | int previewW = selfStreamParms->width, previewH = selfStreamParms->height; | |
4518 | m_getRatioSize(previewW, previewH, | |
4519 | previewCbW, previewCbH, | |
4520 | &cropX, &cropY, | |
4521 | &cropW, &cropH, | |
4522 | 0); | |
86646da4 | 4523 | |
5506cebf SK |
4524 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", |
4525 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
4526 | csc_set_src_format(m_exynosVideoCSC, | |
4a3f1820 | 4527 | ALIGN(previewW, 32), previewH, |
5506cebf SK |
4528 | cropX, cropY, cropW, cropH, |
4529 | selfStreamParms->format, | |
4530 | 0); | |
86646da4 | 4531 | |
5506cebf SK |
4532 | csc_set_dst_format(m_exynosVideoCSC, |
4533 | previewCbW, previewCbH, | |
4534 | 0, 0, previewCbW, previewCbH, | |
4535 | subParms->internalFormat, | |
4536 | 1); | |
86646da4 | 4537 | |
5506cebf SK |
4538 | csc_set_src_buffer(m_exynosVideoCSC, |
4539 | (void **)&srcImageBuf->fd.fd); | |
4540 | ||
4541 | csc_set_dst_buffer(m_exynosVideoCSC, | |
4542 | (void **)(&(m_previewCbBuf.fd.fd))); | |
4543 | ||
4544 | if (csc_convert(m_exynosVideoCSC) != 0) { | |
4545 | ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__); | |
4546 | } | |
4547 | else { | |
4548 | ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__); | |
4549 | } | |
4550 | if (previewCbW == ALIGN(previewCbW, 16)) { | |
4551 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0], | |
4552 | m_previewCbBuf.virt.extP[0], previewCbW * previewCbH); | |
4553 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH, | |
4554 | m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 ); | |
4555 | } | |
4556 | else { | |
4557 | // TODO : copy line by line ? | |
4558 | } | |
4559 | } | |
4560 | else { | |
4561 | ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__); | |
4562 | } | |
86646da4 | 4563 | } |
5506cebf SK |
4564 | else if (subParms->format == HAL_PIXEL_FORMAT_YV12) { |
4565 | int previewCbW = subParms->width, previewCbH = subParms->height; | |
4566 | int stride = ALIGN(previewCbW, 16); | |
4a3f1820 | 4567 | int uv_stride = ALIGN(previewCbW/2, 16); |
0d220b42 | 4568 | int c_stride = ALIGN(stride / 2, 16); |
4a3f1820 SK |
4569 | |
4570 | if (previewCbW == ALIGN(previewCbW, 32)) { | |
4571 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0], | |
4572 | srcImageBuf->virt.extP[0], stride * previewCbH); | |
4573 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH, | |
4574 | srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 ); | |
4575 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2), | |
4576 | srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 ); | |
4577 | } else { | |
4578 | char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]); | |
4579 | char * srcAddr = (char *)(srcImageBuf->virt.extP[0]); | |
4580 | for (int i = 0 ; i < previewCbH ; i++) { | |
4581 | memcpy(dstAddr, srcAddr, previewCbW); | |
4582 | dstAddr += stride; | |
4583 | srcAddr += ALIGN(stride, 32); | |
4584 | } | |
4585 | dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH); | |
4586 | srcAddr = (char *)(srcImageBuf->virt.extP[1]); | |
4587 | for (int i = 0 ; i < previewCbH/2 ; i++) { | |
4588 | memcpy(dstAddr, srcAddr, previewCbW/2); | |
4589 | dstAddr += c_stride; | |
4590 | srcAddr += uv_stride; | |
4591 | } | |
4592 | srcAddr = (char *)(srcImageBuf->virt.extP[2]); | |
4593 | for (int i = 0 ; i < previewCbH/2 ; i++) { | |
4594 | memcpy(dstAddr, srcAddr, previewCbW/2); | |
4595 | dstAddr += c_stride; | |
4596 | srcAddr += uv_stride; | |
4597 | } | |
4598 | } | |
5506cebf SK |
4599 | } |
4600 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); | |
4601 | ||
4602 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", | |
4603 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4604 | if (res == 0) { | |
4605 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4606 | subParms->numSvcBufsInHal--; | |
4607 | } | |
4608 | else { | |
4609 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
86646da4 SK |
4610 | } |
4611 | ||
5506cebf SK |
4612 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4613 | { | |
4614 | bool found = false; | |
4615 | int checkingIndex = 0; | |
86646da4 | 4616 | |
5506cebf | 4617 | ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
86646da4 | 4618 | |
5506cebf SK |
4619 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); |
4620 | if (res != NO_ERROR || buf == NULL) { | |
4621 | ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4622 | break; | |
4623 | } | |
4624 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4625 | subParms->numSvcBufsInHal ++; | |
4626 | ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4627 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
86646da4 | 4628 | |
86646da4 | 4629 | |
5506cebf SK |
4630 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { |
4631 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4632 | found = true; | |
4633 | break; | |
4634 | } | |
4635 | } | |
4636 | ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex); | |
4637 | ||
4638 | if (!found) { | |
4639 | break; | |
4640 | } | |
4641 | ||
4642 | subParms->svcBufIndex = checkingIndex; | |
4643 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4644 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4645 | } | |
4646 | else { | |
4647 | ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4648 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4649 | } | |
4650 | } | |
4651 | return 0; | |
c15a6b00 JS |
4652 | } |
4653 | ||
2d5e6ec2 SK |
4654 | bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h) |
4655 | { | |
4656 | int sizeOfSupportList; | |
4657 | ||
4658 | //REAR Camera | |
4659 | if(this->getCameraId() == 0) { | |
4660 | sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2); | |
4661 | ||
4662 | for(int i = 0; i < sizeOfSupportList; i++) { | |
4663 | if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h)) | |
4664 | return true; | |
4665 | } | |
4666 | ||
4667 | } | |
4668 | else { | |
4669 | sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2); | |
4670 | ||
4671 | for(int i = 0; i < sizeOfSupportList; i++) { | |
4672 | if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h)) | |
4673 | return true; | |
4674 | } | |
4675 | } | |
4676 | ||
4677 | return false; | |
4678 | } | |
13d8c7b4 SK |
4679 | bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf, |
4680 | ExynosBuffer *jpegBuf, | |
4681 | ExynosRect *rect) | |
4682 | { | |
4683 | unsigned char *addr; | |
4684 | ||
4685 | ExynosJpegEncoderForCamera jpegEnc; | |
4686 | bool ret = false; | |
4687 | int res = 0; | |
4688 | ||
4689 | unsigned int *yuvSize = yuvBuf->size.extS; | |
4690 | ||
4691 | if (jpegEnc.create()) { | |
9dd63e1f | 4692 | ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__); |
13d8c7b4 SK |
4693 | goto jpeg_encode_done; |
4694 | } | |
4695 | ||
87423e56 | 4696 | if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) { |
9dd63e1f | 4697 | ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__); |
13d8c7b4 SK |
4698 | goto jpeg_encode_done; |
4699 | } | |
4700 | ||
4701 | if (jpegEnc.setSize(rect->w, rect->h)) { | |
9dd63e1f | 4702 | ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__); |
13d8c7b4 SK |
4703 | goto jpeg_encode_done; |
4704 | } | |
4705 | ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h); | |
4706 | ||
4707 | if (jpegEnc.setColorFormat(rect->colorFormat)) { | |
9dd63e1f | 4708 | ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__); |
13d8c7b4 SK |
4709 | goto jpeg_encode_done; |
4710 | } | |
13d8c7b4 SK |
4711 | |
4712 | if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) { | |
9dd63e1f | 4713 | ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__); |
13d8c7b4 SK |
4714 | goto jpeg_encode_done; |
4715 | } | |
13d8c7b4 | 4716 | |
48728d49 | 4717 | if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) { |
2d5e6ec2 | 4718 | mExifInfo.enableThumb = true; |
48728d49 | 4719 | if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) { |
4017b08c SK |
4720 | // in the case of unsupported parameter, disable thumbnail |
4721 | mExifInfo.enableThumb = false; | |
2d5e6ec2 | 4722 | } else { |
48728d49 SK |
4723 | m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0]; |
4724 | m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1]; | |
2d5e6ec2 SK |
4725 | } |
4726 | ||
4727 | ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH); | |
4728 | ||
4729 | } else { | |
4730 | mExifInfo.enableThumb = false; | |
4731 | } | |
54f4971e | 4732 | |
2d5e6ec2 SK |
4733 | if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) { |
4734 | ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH); | |
54f4971e SK |
4735 | goto jpeg_encode_done; |
4736 | } | |
4737 | ||
2d5e6ec2 | 4738 | ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW); |
87423e56 | 4739 | if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) { |
54f4971e SK |
4740 | ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__); |
4741 | goto jpeg_encode_done; | |
4742 | } | |
13d8c7b4 | 4743 | |
54f4971e | 4744 | m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata); |
9dd63e1f | 4745 | ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize); |
54f4971e | 4746 | if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) { |
9dd63e1f | 4747 | ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__); |
13d8c7b4 SK |
4748 | goto jpeg_encode_done; |
4749 | } | |
54f4971e | 4750 | if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) { |
9dd63e1f | 4751 | ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__); |
13d8c7b4 SK |
4752 | goto jpeg_encode_done; |
4753 | } | |
13d8c7b4 SK |
4754 | |
4755 | if (jpegEnc.updateConfig()) { | |
9dd63e1f | 4756 | ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__); |
13d8c7b4 SK |
4757 | goto jpeg_encode_done; |
4758 | } | |
4759 | ||
54f4971e | 4760 | if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) { |
9dd63e1f | 4761 | ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res); |
13d8c7b4 SK |
4762 | goto jpeg_encode_done; |
4763 | } | |
4764 | ||
4765 | ret = true; | |
4766 | ||
4767 | jpeg_encode_done: | |
4768 | ||
4769 | if (jpegEnc.flagCreate() == true) | |
4770 | jpegEnc.destroy(); | |
4771 | ||
4772 | return ret; | |
4773 | } | |
4774 | ||
e117f756 YJ |
4775 | void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id) |
4776 | { | |
4777 | m_ctlInfo.flash.m_precaptureTriggerId = id; | |
73f5ad60 | 4778 | m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE; |
e117f756 YJ |
4779 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) { |
4780 | // flash is required | |
4781 | switch (m_ctlInfo.flash.m_flashCnt) { | |
4782 | case IS_FLASH_STATE_AUTO_DONE: | |
d91c0269 | 4783 | case IS_FLASH_STATE_AUTO_OFF: |
e117f756 YJ |
4784 | // Flash capture sequence, AF flash was executed before |
4785 | break; | |
4786 | default: | |
4787 | // Full flash sequence | |
4788 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
4789 | m_ctlInfo.flash.m_flashEnableFlg = true; | |
8a3fc5dd | 4790 | m_ctlInfo.flash.m_flashTimeOut = 0; |
e117f756 YJ |
4791 | } |
4792 | } else { | |
73f5ad60 YJ |
4793 | // Skip pre-capture in case of non-flash. |
4794 | ALOGV("[PreCap] Flash OFF mode "); | |
e117f756 YJ |
4795 | m_ctlInfo.flash.m_flashEnableFlg = false; |
4796 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE; | |
e117f756 | 4797 | } |
73f5ad60 YJ |
4798 | ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt); |
4799 | OnPrecaptureMeteringNotificationSensor(); | |
e117f756 | 4800 | } |
13d8c7b4 | 4801 | |
0f26b20f SK |
4802 | void ExynosCameraHWInterface2::OnAfTrigger(int id) |
4803 | { | |
8e2c2fdb | 4804 | m_afTriggerId = id; |
6f19b6cf | 4805 | |
0f26b20f SK |
4806 | switch (m_afMode) { |
4807 | case AA_AFMODE_AUTO: | |
4808 | case AA_AFMODE_MACRO: | |
34d2b94a | 4809 | case AA_AFMODE_MANUAL: |
73f5ad60 | 4810 | ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode); |
caea49e6 YJ |
4811 | // If flash is enable, Flash operation is executed before triggering AF |
4812 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) | |
e117f756 | 4813 | && (m_ctlInfo.flash.m_flashEnableFlg == false) |
caea49e6 | 4814 | && (m_cameraId == 0)) { |
73f5ad60 | 4815 | ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode); |
e117f756 YJ |
4816 | m_ctlInfo.flash.m_flashEnableFlg = true; |
4817 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
caea49e6 | 4818 | m_ctlInfo.flash.m_flashDecisionResult = false; |
e117f756 | 4819 | m_ctlInfo.flash.m_afFlashDoneFlg = true; |
caea49e6 | 4820 | } |
0f26b20f SK |
4821 | OnAfTriggerAutoMacro(id); |
4822 | break; | |
4823 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
73f5ad60 | 4824 | ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode); |
0f26b20f SK |
4825 | OnAfTriggerCAFVideo(id); |
4826 | break; | |
4827 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
73f5ad60 | 4828 | ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode); |
0f26b20f SK |
4829 | OnAfTriggerCAFPicture(id); |
4830 | break; | |
8e2c2fdb | 4831 | |
34d2b94a | 4832 | case AA_AFMODE_OFF: |
0f26b20f SK |
4833 | default: |
4834 | break; | |
4835 | } | |
4836 | } | |
4837 | ||
4838 | void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id) | |
4839 | { | |
4840 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4841 | |
4842 | switch (m_afState) { | |
4843 | case HAL_AFSTATE_INACTIVE: | |
6caa0c80 SK |
4844 | case HAL_AFSTATE_PASSIVE_FOCUSED: |
4845 | case HAL_AFSTATE_SCANNING: | |
0f26b20f SK |
4846 | nextState = HAL_AFSTATE_NEEDS_COMMAND; |
4847 | m_IsAfTriggerRequired = true; | |
4848 | break; | |
4849 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4850 | nextState = NO_TRANSITION; | |
4851 | break; | |
4852 | case HAL_AFSTATE_STARTED: | |
4853 | nextState = NO_TRANSITION; | |
4854 | break; | |
0f26b20f SK |
4855 | case HAL_AFSTATE_LOCKED: |
4856 | nextState = HAL_AFSTATE_NEEDS_COMMAND; | |
4857 | m_IsAfTriggerRequired = true; | |
4858 | break; | |
4859 | case HAL_AFSTATE_FAILED: | |
4860 | nextState = HAL_AFSTATE_NEEDS_COMMAND; | |
4861 | m_IsAfTriggerRequired = true; | |
4862 | break; | |
4863 | default: | |
4864 | break; | |
4865 | } | |
4866 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4867 | if (nextState != NO_TRANSITION) | |
4868 | m_afState = nextState; | |
4869 | } | |
4870 | ||
4871 | void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id) | |
4872 | { | |
4873 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4874 | |
4875 | switch (m_afState) { | |
4876 | case HAL_AFSTATE_INACTIVE: | |
4877 | nextState = HAL_AFSTATE_FAILED; | |
4878 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4879 | break; | |
4880 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4881 | // not used | |
4882 | break; | |
4883 | case HAL_AFSTATE_STARTED: | |
4884 | nextState = HAL_AFSTATE_NEEDS_DETERMINATION; | |
9900d0c4 | 4885 | m_AfHwStateFailed = false; |
0f26b20f SK |
4886 | break; |
4887 | case HAL_AFSTATE_SCANNING: | |
4888 | nextState = HAL_AFSTATE_NEEDS_DETERMINATION; | |
9900d0c4 | 4889 | m_AfHwStateFailed = false; |
caea49e6 YJ |
4890 | // If flash is enable, Flash operation is executed before triggering AF |
4891 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) | |
e117f756 | 4892 | && (m_ctlInfo.flash.m_flashEnableFlg == false) |
caea49e6 | 4893 | && (m_cameraId == 0)) { |
73f5ad60 | 4894 | ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id); |
e117f756 YJ |
4895 | m_ctlInfo.flash.m_flashEnableFlg = true; |
4896 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
caea49e6 | 4897 | m_ctlInfo.flash.m_flashDecisionResult = false; |
e117f756 | 4898 | m_ctlInfo.flash.m_afFlashDoneFlg = true; |
caea49e6 | 4899 | } |
0f26b20f SK |
4900 | break; |
4901 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
4902 | nextState = NO_TRANSITION; | |
4903 | break; | |
4904 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
4905 | m_IsAfLockRequired = true; | |
9900d0c4 | 4906 | if (m_AfHwStateFailed) { |
caea49e6 | 4907 | ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__); |
9900d0c4 SK |
4908 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); |
4909 | nextState = HAL_AFSTATE_FAILED; | |
4910 | } | |
4911 | else { | |
caea49e6 | 4912 | ALOGV("(%s): [CAF] LAST : success", __FUNCTION__); |
9900d0c4 SK |
4913 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); |
4914 | nextState = HAL_AFSTATE_LOCKED; | |
4915 | } | |
4916 | m_AfHwStateFailed = false; | |
0f26b20f SK |
4917 | break; |
4918 | case HAL_AFSTATE_LOCKED: | |
4919 | nextState = NO_TRANSITION; | |
4920 | break; | |
4921 | case HAL_AFSTATE_FAILED: | |
4922 | nextState = NO_TRANSITION; | |
4923 | break; | |
4924 | default: | |
4925 | break; | |
4926 | } | |
4927 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4928 | if (nextState != NO_TRANSITION) | |
4929 | m_afState = nextState; | |
4930 | } | |
4931 | ||
4932 | ||
4933 | void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id) | |
4934 | { | |
4935 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4936 | |
4937 | switch (m_afState) { | |
4938 | case HAL_AFSTATE_INACTIVE: | |
4939 | nextState = HAL_AFSTATE_FAILED; | |
4940 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4941 | break; | |
4942 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4943 | // not used | |
4944 | break; | |
4945 | case HAL_AFSTATE_STARTED: | |
4946 | m_IsAfLockRequired = true; | |
4947 | nextState = HAL_AFSTATE_FAILED; | |
4948 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4949 | break; | |
4950 | case HAL_AFSTATE_SCANNING: | |
4951 | m_IsAfLockRequired = true; | |
4952 | nextState = HAL_AFSTATE_FAILED; | |
4953 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4954 | break; | |
4955 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
4956 | // not used | |
4957 | break; | |
4958 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
4959 | m_IsAfLockRequired = true; | |
4960 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
4961 | nextState = HAL_AFSTATE_LOCKED; | |
4962 | break; | |
4963 | case HAL_AFSTATE_LOCKED: | |
4964 | nextState = NO_TRANSITION; | |
4965 | break; | |
4966 | case HAL_AFSTATE_FAILED: | |
4967 | nextState = NO_TRANSITION; | |
4968 | break; | |
4969 | default: | |
4970 | break; | |
4971 | } | |
4972 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4973 | if (nextState != NO_TRANSITION) | |
4974 | m_afState = nextState; | |
4975 | } | |
4976 | ||
73f5ad60 YJ |
4977 | void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor() |
4978 | { | |
4979 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
4980 | // Just noti of pre-capture start | |
4981 | if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) { | |
4982 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
4983 | ANDROID_CONTROL_AE_STATE_PRECAPTURE, | |
4984 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
4985 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); | |
4986 | m_notifyCb(CAMERA2_MSG_AUTOWB, | |
4987 | ANDROID_CONTROL_AWB_STATE_CONVERGED, | |
4988 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
4989 | m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE; | |
4990 | } | |
4991 | } | |
4992 | } | |
4993 | ||
4994 | void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP() | |
e117f756 YJ |
4995 | { |
4996 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
4997 | if (m_ctlInfo.flash.m_flashEnableFlg) { | |
4998 | // flash case | |
4999 | switch (m_ctlInfo.flash.m_flashCnt) { | |
5000 | case IS_FLASH_STATE_AUTO_DONE: | |
d91c0269 | 5001 | case IS_FLASH_STATE_AUTO_OFF: |
73f5ad60 YJ |
5002 | if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) { |
5003 | // End notification | |
e117f756 | 5004 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, |
8a3fc5dd | 5005 | ANDROID_CONTROL_AE_STATE_CONVERGED, |
e117f756 | 5006 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
8a3fc5dd | 5007 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 | 5008 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
8a3fc5dd | 5009 | ANDROID_CONTROL_AWB_STATE_CONVERGED, |
e117f756 YJ |
5010 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
5011 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
5012 | } else { | |
5013 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
5014 | ANDROID_CONTROL_AE_STATE_PRECAPTURE, | |
5015 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
40acdcc8 | 5016 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 YJ |
5017 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
5018 | ANDROID_CONTROL_AWB_STATE_CONVERGED, | |
5019 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
73f5ad60 | 5020 | m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE; |
e117f756 | 5021 | } |
73f5ad60 YJ |
5022 | break; |
5023 | case IS_FLASH_STATE_CAPTURE: | |
5024 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
5025 | case IS_FLASH_STATE_CAPTURE_JPEG: | |
5026 | case IS_FLASH_STATE_CAPTURE_END: | |
5027 | ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt); | |
5028 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
5029 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
8a3fc5dd | 5030 | ANDROID_CONTROL_AE_STATE_CONVERGED, |
73f5ad60 YJ |
5031 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
5032 | m_notifyCb(CAMERA2_MSG_AUTOWB, | |
8a3fc5dd | 5033 | ANDROID_CONTROL_AWB_STATE_CONVERGED, |
73f5ad60 YJ |
5034 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
5035 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
5036 | break; | |
e117f756 YJ |
5037 | } |
5038 | } else { | |
5039 | // non-flash case | |
73f5ad60 | 5040 | if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) { |
d91c0269 | 5041 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, |
8a3fc5dd | 5042 | ANDROID_CONTROL_AE_STATE_CONVERGED, |
d91c0269 | 5043 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
8a3fc5dd | 5044 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 | 5045 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
8a3fc5dd | 5046 | ANDROID_CONTROL_AWB_STATE_CONVERGED, |
e117f756 | 5047 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
d91c0269 | 5048 | m_ctlInfo.flash.m_precaptureTriggerId = 0; |
e117f756 YJ |
5049 | } |
5050 | } | |
5051 | } | |
5052 | } | |
5053 | ||
0f26b20f SK |
5054 | void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti) |
5055 | { | |
5056 | switch (m_afMode) { | |
5057 | case AA_AFMODE_AUTO: | |
5058 | case AA_AFMODE_MACRO: | |
5059 | OnAfNotificationAutoMacro(noti); | |
5060 | break; | |
5061 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
5062 | OnAfNotificationCAFVideo(noti); | |
5063 | break; | |
5064 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
5065 | OnAfNotificationCAFPicture(noti); | |
5066 | break; | |
5067 | case AA_AFMODE_OFF: | |
5068 | default: | |
5069 | break; | |
5070 | } | |
5071 | } | |
5072 | ||
5073 | void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti) | |
5074 | { | |
5075 | int nextState = NO_TRANSITION; | |
5076 | bool bWrongTransition = false; | |
5077 | ||
5078 | if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) { | |
5079 | switch (noti) { | |
5080 | case AA_AFSTATE_INACTIVE: | |
5081 | case AA_AFSTATE_ACTIVE_SCAN: | |
5082 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5083 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5084 | default: | |
5085 | nextState = NO_TRANSITION; | |
5086 | break; | |
5087 | } | |
5088 | } | |
5089 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5090 | switch (noti) { | |
5091 | case AA_AFSTATE_INACTIVE: | |
5092 | nextState = NO_TRANSITION; | |
5093 | break; | |
5094 | case AA_AFSTATE_ACTIVE_SCAN: | |
5095 | nextState = HAL_AFSTATE_SCANNING; | |
5096 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN); | |
5097 | break; | |
5098 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5099 | nextState = NO_TRANSITION; | |
5100 | break; | |
5101 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5102 | nextState = NO_TRANSITION; | |
5103 | break; | |
5104 | default: | |
5105 | bWrongTransition = true; | |
5106 | break; | |
5107 | } | |
5108 | } | |
5109 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5110 | switch (noti) { | |
5111 | case AA_AFSTATE_INACTIVE: | |
5112 | bWrongTransition = true; | |
5113 | break; | |
5114 | case AA_AFSTATE_ACTIVE_SCAN: | |
5115 | nextState = NO_TRANSITION; | |
5116 | break; | |
5117 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
9a710a45 | 5118 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 YJ |
5119 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
5120 | switch (m_ctlInfo.flash.m_flashCnt) { | |
5121 | case IS_FLASH_STATE_ON_DONE: | |
5122 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
4a9565ae | 5123 | nextState = NO_TRANSITION; |
e117f756 YJ |
5124 | break; |
5125 | case IS_FLASH_STATE_AUTO_DONE: | |
4a9565ae | 5126 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
9a710a45 YJ |
5127 | nextState = HAL_AFSTATE_LOCKED; |
5128 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
e117f756 YJ |
5129 | break; |
5130 | default: | |
9a710a45 YJ |
5131 | nextState = NO_TRANSITION; |
5132 | } | |
5133 | } else { | |
5134 | nextState = HAL_AFSTATE_LOCKED; | |
5135 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5136 | } | |
0f26b20f SK |
5137 | break; |
5138 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
9a710a45 | 5139 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 YJ |
5140 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
5141 | switch (m_ctlInfo.flash.m_flashCnt) { | |
5142 | case IS_FLASH_STATE_ON_DONE: | |
5143 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
4a9565ae | 5144 | nextState = NO_TRANSITION; |
e117f756 YJ |
5145 | break; |
5146 | case IS_FLASH_STATE_AUTO_DONE: | |
4a9565ae | 5147 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
9a710a45 YJ |
5148 | nextState = HAL_AFSTATE_FAILED; |
5149 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
e117f756 YJ |
5150 | break; |
5151 | default: | |
9a710a45 YJ |
5152 | nextState = NO_TRANSITION; |
5153 | } | |
5154 | } else { | |
5155 | nextState = HAL_AFSTATE_FAILED; | |
5156 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5157 | } | |
0f26b20f SK |
5158 | break; |
5159 | default: | |
5160 | bWrongTransition = true; | |
5161 | break; | |
5162 | } | |
5163 | } | |
5164 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5165 | switch (noti) { | |
5166 | case AA_AFSTATE_INACTIVE: | |
5167 | case AA_AFSTATE_ACTIVE_SCAN: | |
5168 | bWrongTransition = true; | |
5169 | break; | |
5170 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5171 | nextState = NO_TRANSITION; | |
5172 | break; | |
5173 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5174 | default: | |
5175 | bWrongTransition = true; | |
5176 | break; | |
5177 | } | |
5178 | } | |
5179 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5180 | switch (noti) { | |
5181 | case AA_AFSTATE_INACTIVE: | |
5182 | case AA_AFSTATE_ACTIVE_SCAN: | |
5183 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5184 | bWrongTransition = true; | |
5185 | break; | |
5186 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5187 | nextState = NO_TRANSITION; | |
5188 | break; | |
5189 | default: | |
5190 | bWrongTransition = true; | |
5191 | break; | |
5192 | } | |
5193 | } | |
5194 | if (bWrongTransition) { | |
5195 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5196 | return; | |
5197 | } | |
5198 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5199 | if (nextState != NO_TRANSITION) | |
5200 | m_afState = nextState; | |
5201 | } | |
5202 | ||
5203 | void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti) | |
5204 | { | |
5205 | int nextState = NO_TRANSITION; | |
5206 | bool bWrongTransition = false; | |
5207 | ||
5208 | if (m_afState == HAL_AFSTATE_INACTIVE) { | |
5209 | switch (noti) { | |
5210 | case AA_AFSTATE_INACTIVE: | |
5211 | case AA_AFSTATE_ACTIVE_SCAN: | |
5212 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5213 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5214 | default: | |
5215 | nextState = NO_TRANSITION; | |
5216 | break; | |
5217 | } | |
40acdcc8 YJ |
5218 | // Check AF notification after triggering |
5219 | if (m_ctlInfo.af.m_afTriggerTimeOut > 0) { | |
5220 | if (m_ctlInfo.af.m_afTriggerTimeOut > 5) { | |
5221 | ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode); | |
5222 | SetAfMode(AA_AFMODE_OFF); | |
5223 | SetAfMode(m_afMode); | |
5224 | m_ctlInfo.af.m_afTriggerTimeOut = 0; | |
5225 | } else { | |
5226 | m_ctlInfo.af.m_afTriggerTimeOut++; | |
5227 | } | |
5228 | } | |
0f26b20f SK |
5229 | } |
5230 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5231 | switch (noti) { | |
5232 | case AA_AFSTATE_INACTIVE: | |
5233 | nextState = NO_TRANSITION; | |
5234 | break; | |
5235 | case AA_AFSTATE_ACTIVE_SCAN: | |
5236 | nextState = HAL_AFSTATE_SCANNING; | |
5237 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
40acdcc8 | 5238 | m_ctlInfo.af.m_afTriggerTimeOut = 0; |
0f26b20f SK |
5239 | break; |
5240 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5241 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5242 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
40acdcc8 | 5243 | m_ctlInfo.af.m_afTriggerTimeOut = 0; |
0f26b20f SK |
5244 | break; |
5245 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
cd13bb78 SK |
5246 | //nextState = HAL_AFSTATE_FAILED; |
5247 | //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5248 | nextState = NO_TRANSITION; | |
0f26b20f SK |
5249 | break; |
5250 | default: | |
5251 | bWrongTransition = true; | |
5252 | break; | |
5253 | } | |
5254 | } | |
5255 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5256 | switch (noti) { | |
5257 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 | 5258 | nextState = NO_TRANSITION; |
0f26b20f SK |
5259 | break; |
5260 | case AA_AFSTATE_ACTIVE_SCAN: | |
5261 | nextState = NO_TRANSITION; | |
9900d0c4 | 5262 | m_AfHwStateFailed = false; |
0f26b20f SK |
5263 | break; |
5264 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5265 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
9900d0c4 | 5266 | m_AfHwStateFailed = false; |
0f26b20f SK |
5267 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); |
5268 | break; | |
5269 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
9900d0c4 SK |
5270 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; |
5271 | m_AfHwStateFailed = true; | |
5272 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
0f26b20f SK |
5273 | break; |
5274 | default: | |
5275 | bWrongTransition = true; | |
5276 | break; | |
5277 | } | |
5278 | } | |
5279 | else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) { | |
5280 | switch (noti) { | |
5281 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 | 5282 | nextState = NO_TRANSITION; |
0f26b20f SK |
5283 | break; |
5284 | case AA_AFSTATE_ACTIVE_SCAN: | |
5285 | nextState = HAL_AFSTATE_SCANNING; | |
9900d0c4 | 5286 | m_AfHwStateFailed = false; |
0f26b20f SK |
5287 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); |
5288 | break; | |
5289 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5290 | nextState = NO_TRANSITION; | |
9900d0c4 | 5291 | m_AfHwStateFailed = false; |
0f26b20f SK |
5292 | break; |
5293 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
9900d0c4 SK |
5294 | nextState = NO_TRANSITION; |
5295 | m_AfHwStateFailed = true; | |
0f26b20f SK |
5296 | break; |
5297 | default: | |
5298 | bWrongTransition = true; | |
5299 | break; | |
5300 | } | |
5301 | } | |
5302 | else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) { | |
f7f8d321 YJ |
5303 | //Skip notification in case of flash, wait the end of flash on |
5304 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { | |
5305 | if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE) | |
5306 | return; | |
5307 | } | |
0f26b20f SK |
5308 | switch (noti) { |
5309 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 | 5310 | nextState = NO_TRANSITION; |
0f26b20f SK |
5311 | break; |
5312 | case AA_AFSTATE_ACTIVE_SCAN: | |
5313 | nextState = NO_TRANSITION; | |
5314 | break; | |
5315 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
caea49e6 | 5316 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 | 5317 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
e117f756 YJ |
5318 | switch (m_ctlInfo.flash.m_flashCnt) { |
5319 | case IS_FLASH_STATE_ON_DONE: | |
f7f8d321 | 5320 | ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
e117f756 | 5321 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; |
4a9565ae | 5322 | nextState = NO_TRANSITION; |
e117f756 YJ |
5323 | break; |
5324 | case IS_FLASH_STATE_AUTO_DONE: | |
f7f8d321 | 5325 | ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
4a9565ae | 5326 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
caea49e6 YJ |
5327 | m_IsAfLockRequired = true; |
5328 | nextState = HAL_AFSTATE_LOCKED; | |
5329 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
e117f756 YJ |
5330 | break; |
5331 | default: | |
caea49e6 YJ |
5332 | nextState = NO_TRANSITION; |
5333 | } | |
5334 | } else { | |
5335 | m_IsAfLockRequired = true; | |
5336 | nextState = HAL_AFSTATE_LOCKED; | |
5337 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5338 | } | |
0f26b20f SK |
5339 | break; |
5340 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
caea49e6 | 5341 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 | 5342 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
e117f756 YJ |
5343 | switch (m_ctlInfo.flash.m_flashCnt) { |
5344 | case IS_FLASH_STATE_ON_DONE: | |
f7f8d321 | 5345 | ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
e117f756 | 5346 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; |
4a9565ae | 5347 | nextState = NO_TRANSITION; |
e117f756 YJ |
5348 | break; |
5349 | case IS_FLASH_STATE_AUTO_DONE: | |
f7f8d321 | 5350 | ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
4a9565ae | 5351 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
caea49e6 YJ |
5352 | m_IsAfLockRequired = true; |
5353 | nextState = HAL_AFSTATE_FAILED; | |
5354 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
e117f756 YJ |
5355 | break; |
5356 | default: | |
caea49e6 YJ |
5357 | nextState = NO_TRANSITION; |
5358 | } | |
5359 | } else { | |
5360 | m_IsAfLockRequired = true; | |
5361 | nextState = HAL_AFSTATE_FAILED; | |
5362 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5363 | } | |
0f26b20f SK |
5364 | break; |
5365 | default: | |
5366 | bWrongTransition = true; | |
5367 | break; | |
5368 | } | |
5369 | } | |
5370 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5371 | switch (noti) { | |
5372 | case AA_AFSTATE_INACTIVE: | |
5373 | nextState = NO_TRANSITION; | |
5374 | break; | |
5375 | case AA_AFSTATE_ACTIVE_SCAN: | |
5376 | bWrongTransition = true; | |
5377 | break; | |
5378 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5379 | nextState = NO_TRANSITION; | |
5380 | break; | |
5381 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5382 | default: | |
5383 | bWrongTransition = true; | |
5384 | break; | |
5385 | } | |
5386 | } | |
5387 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5388 | switch (noti) { | |
5389 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 SK |
5390 | bWrongTransition = true; |
5391 | break; | |
0f26b20f | 5392 | case AA_AFSTATE_ACTIVE_SCAN: |
9900d0c4 SK |
5393 | nextState = HAL_AFSTATE_SCANNING; |
5394 | break; | |
0f26b20f SK |
5395 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: |
5396 | bWrongTransition = true; | |
5397 | break; | |
5398 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5399 | nextState = NO_TRANSITION; | |
5400 | break; | |
5401 | default: | |
5402 | bWrongTransition = true; | |
5403 | break; | |
5404 | } | |
5405 | } | |
5406 | if (bWrongTransition) { | |
5407 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5408 | return; | |
5409 | } | |
5410 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5411 | if (nextState != NO_TRANSITION) | |
5412 | m_afState = nextState; | |
5413 | } | |
5414 | ||
5415 | void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti) | |
5416 | { | |
5417 | int nextState = NO_TRANSITION; | |
5418 | bool bWrongTransition = false; | |
5419 | ||
5420 | if (m_afState == HAL_AFSTATE_INACTIVE) { | |
5421 | switch (noti) { | |
5422 | case AA_AFSTATE_INACTIVE: | |
5423 | case AA_AFSTATE_ACTIVE_SCAN: | |
5424 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5425 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5426 | default: | |
5427 | nextState = NO_TRANSITION; | |
5428 | break; | |
5429 | } | |
5430 | } | |
5431 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5432 | switch (noti) { | |
5433 | case AA_AFSTATE_INACTIVE: | |
5434 | nextState = NO_TRANSITION; | |
5435 | break; | |
5436 | case AA_AFSTATE_ACTIVE_SCAN: | |
5437 | nextState = HAL_AFSTATE_SCANNING; | |
5438 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
5439 | break; | |
5440 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5441 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5442 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
5443 | break; | |
5444 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5445 | nextState = HAL_AFSTATE_FAILED; | |
5446 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5447 | break; | |
5448 | default: | |
5449 | bWrongTransition = true; | |
5450 | break; | |
5451 | } | |
5452 | } | |
5453 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5454 | switch (noti) { | |
5455 | case AA_AFSTATE_INACTIVE: | |
5456 | bWrongTransition = true; | |
5457 | break; | |
5458 | case AA_AFSTATE_ACTIVE_SCAN: | |
5459 | nextState = NO_TRANSITION; | |
5460 | break; | |
5461 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5462 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5463 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
5464 | break; | |
5465 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5466 | nextState = NO_TRANSITION; | |
5467 | break; | |
5468 | default: | |
5469 | bWrongTransition = true; | |
5470 | break; | |
5471 | } | |
5472 | } | |
5473 | else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) { | |
5474 | switch (noti) { | |
5475 | case AA_AFSTATE_INACTIVE: | |
5476 | bWrongTransition = true; | |
5477 | break; | |
5478 | case AA_AFSTATE_ACTIVE_SCAN: | |
5479 | nextState = HAL_AFSTATE_SCANNING; | |
5480 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
5481 | break; | |
5482 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5483 | nextState = NO_TRANSITION; | |
5484 | break; | |
5485 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5486 | nextState = HAL_AFSTATE_FAILED; | |
5487 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
8e2c2fdb | 5488 | // TODO : needs NO_TRANSITION ? |
0f26b20f SK |
5489 | break; |
5490 | default: | |
5491 | bWrongTransition = true; | |
5492 | break; | |
5493 | } | |
5494 | } | |
5495 | else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) { | |
5496 | switch (noti) { | |
5497 | case AA_AFSTATE_INACTIVE: | |
5498 | bWrongTransition = true; | |
5499 | break; | |
5500 | case AA_AFSTATE_ACTIVE_SCAN: | |
5501 | nextState = NO_TRANSITION; | |
5502 | break; | |
5503 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5504 | m_IsAfLockRequired = true; | |
5505 | nextState = HAL_AFSTATE_LOCKED; | |
5506 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5507 | break; | |
5508 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5509 | nextState = HAL_AFSTATE_FAILED; | |
5510 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5511 | break; | |
5512 | default: | |
5513 | bWrongTransition = true; | |
5514 | break; | |
5515 | } | |
5516 | } | |
5517 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5518 | switch (noti) { | |
5519 | case AA_AFSTATE_INACTIVE: | |
5520 | nextState = NO_TRANSITION; | |
5521 | break; | |
5522 | case AA_AFSTATE_ACTIVE_SCAN: | |
5523 | bWrongTransition = true; | |
5524 | break; | |
5525 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5526 | nextState = NO_TRANSITION; | |
5527 | break; | |
5528 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5529 | default: | |
5530 | bWrongTransition = true; | |
5531 | break; | |
5532 | } | |
5533 | } | |
5534 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5535 | switch (noti) { | |
5536 | case AA_AFSTATE_INACTIVE: | |
5537 | case AA_AFSTATE_ACTIVE_SCAN: | |
5538 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5539 | bWrongTransition = true; | |
5540 | break; | |
5541 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5542 | nextState = NO_TRANSITION; | |
5543 | break; | |
5544 | default: | |
5545 | bWrongTransition = true; | |
5546 | break; | |
5547 | } | |
5548 | } | |
5549 | if (bWrongTransition) { | |
5550 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5551 | return; | |
5552 | } | |
5553 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5554 | if (nextState != NO_TRANSITION) | |
5555 | m_afState = nextState; | |
5556 | } | |
5557 | ||
5558 | void ExynosCameraHWInterface2::OnAfCancel(int id) | |
5559 | { | |
8e2c2fdb SK |
5560 | m_afTriggerId = id; |
5561 | ||
0f26b20f SK |
5562 | switch (m_afMode) { |
5563 | case AA_AFMODE_AUTO: | |
5564 | case AA_AFMODE_MACRO: | |
8e2c2fdb | 5565 | case AA_AFMODE_OFF: |
c48f0170 | 5566 | case AA_AFMODE_MANUAL: |
0f26b20f SK |
5567 | OnAfCancelAutoMacro(id); |
5568 | break; | |
5569 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
5570 | OnAfCancelCAFVideo(id); | |
5571 | break; | |
5572 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
5573 | OnAfCancelCAFPicture(id); | |
5574 | break; | |
0f26b20f SK |
5575 | default: |
5576 | break; | |
5577 | } | |
5578 | } | |
5579 | ||
5580 | void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id) | |
5581 | { | |
5582 | int nextState = NO_TRANSITION; | |
0f26b20f | 5583 | |
e117f756 YJ |
5584 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
5585 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; | |
6f19b6cf | 5586 | } |
0f26b20f SK |
5587 | switch (m_afState) { |
5588 | case HAL_AFSTATE_INACTIVE: | |
5589 | nextState = NO_TRANSITION; | |
cd13bb78 | 5590 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); |
0f26b20f SK |
5591 | break; |
5592 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5593 | case HAL_AFSTATE_STARTED: | |
5594 | case HAL_AFSTATE_SCANNING: | |
5595 | case HAL_AFSTATE_LOCKED: | |
5596 | case HAL_AFSTATE_FAILED: | |
5597 | SetAfMode(AA_AFMODE_OFF); | |
5598 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5599 | nextState = HAL_AFSTATE_INACTIVE; | |
5600 | break; | |
5601 | default: | |
5602 | break; | |
5603 | } | |
5604 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5605 | if (nextState != NO_TRANSITION) | |
5606 | m_afState = nextState; | |
5607 | } | |
5608 | ||
5609 | void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id) | |
5610 | { | |
5611 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
5612 | |
5613 | switch (m_afState) { | |
5614 | case HAL_AFSTATE_INACTIVE: | |
5615 | nextState = NO_TRANSITION; | |
5616 | break; | |
5617 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5618 | case HAL_AFSTATE_STARTED: | |
5619 | case HAL_AFSTATE_SCANNING: | |
5620 | case HAL_AFSTATE_LOCKED: | |
5621 | case HAL_AFSTATE_FAILED: | |
5622 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
5623 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
5624 | SetAfMode(AA_AFMODE_OFF); | |
5625 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5626 | SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE); | |
5627 | nextState = HAL_AFSTATE_INACTIVE; | |
5628 | break; | |
5629 | default: | |
5630 | break; | |
5631 | } | |
5632 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5633 | if (nextState != NO_TRANSITION) | |
5634 | m_afState = nextState; | |
5635 | } | |
5636 | ||
5637 | void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id) | |
5638 | { | |
5639 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
5640 | |
5641 | switch (m_afState) { | |
5642 | case HAL_AFSTATE_INACTIVE: | |
5643 | nextState = NO_TRANSITION; | |
5644 | break; | |
5645 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5646 | case HAL_AFSTATE_STARTED: | |
5647 | case HAL_AFSTATE_SCANNING: | |
5648 | case HAL_AFSTATE_LOCKED: | |
5649 | case HAL_AFSTATE_FAILED: | |
5650 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
5651 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
5652 | SetAfMode(AA_AFMODE_OFF); | |
5653 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5654 | SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO); | |
5655 | nextState = HAL_AFSTATE_INACTIVE; | |
5656 | break; | |
5657 | default: | |
5658 | break; | |
5659 | } | |
5660 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5661 | if (nextState != NO_TRANSITION) | |
5662 | m_afState = nextState; | |
5663 | } | |
5664 | ||
5665 | void ExynosCameraHWInterface2::SetAfStateForService(int newState) | |
5666 | { | |
8e2c2fdb SK |
5667 | if (m_serviceAfState != newState || newState == 0) |
5668 | m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie); | |
0f26b20f | 5669 | m_serviceAfState = newState; |
0f26b20f SK |
5670 | } |
5671 | ||
5672 | int ExynosCameraHWInterface2::GetAfStateForService() | |
5673 | { | |
5674 | return m_serviceAfState; | |
5675 | } | |
5676 | ||
5677 | void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode) | |
5678 | { | |
5679 | if (m_afMode != afMode) { | |
ca714238 | 5680 | if (m_IsAfModeUpdateRequired && m_afMode != AA_AFMODE_OFF) { |
0f26b20f SK |
5681 | m_afMode2 = afMode; |
5682 | ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode); | |
5683 | } | |
5684 | else { | |
5685 | ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode); | |
5686 | m_IsAfModeUpdateRequired = true; | |
5687 | m_afMode = afMode; | |
c48f0170 SK |
5688 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); |
5689 | m_afState = HAL_AFSTATE_INACTIVE; | |
8e2c2fdb | 5690 | } |
0f26b20f SK |
5691 | } |
5692 | } | |
5693 | ||
54f4971e SK |
5694 | void ExynosCameraHWInterface2::m_setExifFixedAttribute(void) |
5695 | { | |
5696 | char property[PROPERTY_VALUE_MAX]; | |
5697 | ||
5698 | //2 0th IFD TIFF Tags | |
5699 | //3 Maker | |
5700 | property_get("ro.product.brand", property, EXIF_DEF_MAKER); | |
5701 | strncpy((char *)mExifInfo.maker, property, | |
5702 | sizeof(mExifInfo.maker) - 1); | |
5703 | mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0'; | |
5704 | //3 Model | |
5705 | property_get("ro.product.model", property, EXIF_DEF_MODEL); | |
5706 | strncpy((char *)mExifInfo.model, property, | |
5707 | sizeof(mExifInfo.model) - 1); | |
5708 | mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0'; | |
5709 | //3 Software | |
5710 | property_get("ro.build.id", property, EXIF_DEF_SOFTWARE); | |
5711 | strncpy((char *)mExifInfo.software, property, | |
5712 | sizeof(mExifInfo.software) - 1); | |
5713 | mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0'; | |
5714 | ||
5715 | //3 YCbCr Positioning | |
5716 | mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING; | |
5717 | ||
5718 | //2 0th IFD Exif Private Tags | |
5719 | //3 F Number | |
e00f6591 | 5720 | mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN); |
54f4971e SK |
5721 | mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN; |
5722 | //3 Exposure Program | |
5723 | mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM; | |
5724 | //3 Exif Version | |
5725 | memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version)); | |
5726 | //3 Aperture | |
e00f6591 SK |
5727 | double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den); |
5728 | mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN); | |
54f4971e SK |
5729 | mExifInfo.aperture.den = EXIF_DEF_APEX_DEN; |
5730 | //3 Maximum lens aperture | |
5731 | mExifInfo.max_aperture.num = mExifInfo.aperture.num; | |
5732 | mExifInfo.max_aperture.den = mExifInfo.aperture.den; | |
5733 | //3 Lens Focal Length | |
e00f6591 SK |
5734 | mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100); |
5735 | ||
54f4971e SK |
5736 | mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN; |
5737 | //3 User Comments | |
5738 | strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS); | |
5739 | //3 Color Space information | |
5740 | mExifInfo.color_space = EXIF_DEF_COLOR_SPACE; | |
5741 | //3 Exposure Mode | |
5742 | mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE; | |
5743 | ||
5744 | //2 0th IFD GPS Info Tags | |
5745 | unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 }; | |
5746 | memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version)); | |
5747 | ||
5748 | //2 1th IFD TIFF Tags | |
5749 | mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION; | |
5750 | mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM; | |
5751 | mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN; | |
5752 | mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM; | |
5753 | mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN; | |
5754 | mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT; | |
5755 | } | |
5756 | ||
5757 | void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect, | |
48728d49 | 5758 | camera2_shot_ext *currentEntry) |
54f4971e | 5759 | { |
48728d49 SK |
5760 | camera2_dm *dm = &(currentEntry->shot.dm); |
5761 | camera2_ctl *ctl = &(currentEntry->shot.ctl); | |
54f4971e SK |
5762 | |
5763 | ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue ); | |
5764 | if (!ctl->request.frameCount) | |
5765 | return; | |
5766 | //2 0th IFD TIFF Tags | |
5767 | //3 Width | |
5768 | exifInfo->width = rect->w; | |
5769 | //3 Height | |
5770 | exifInfo->height = rect->h; | |
5771 | //3 Orientation | |
5772 | switch (ctl->jpeg.orientation) { | |
5773 | case 90: | |
5774 | exifInfo->orientation = EXIF_ORIENTATION_90; | |
5775 | break; | |
5776 | case 180: | |
5777 | exifInfo->orientation = EXIF_ORIENTATION_180; | |
5778 | break; | |
5779 | case 270: | |
5780 | exifInfo->orientation = EXIF_ORIENTATION_270; | |
5781 | break; | |
5782 | case 0: | |
5783 | default: | |
5784 | exifInfo->orientation = EXIF_ORIENTATION_UP; | |
5785 | break; | |
5786 | } | |
5787 | ||
5788 | //3 Date time | |
5789 | time_t rawtime; | |
5790 | struct tm *timeinfo; | |
5791 | time(&rawtime); | |
5792 | timeinfo = localtime(&rawtime); | |
5793 | strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo); | |
5794 | ||
5795 | //2 0th IFD Exif Private Tags | |
5796 | //3 Exposure Time | |
5797 | int shutterSpeed = (dm->sensor.exposureTime/1000); | |
5798 | ||
9a77d67e SK |
5799 | // To display exposure time just above 500ms as 1/2sec, not 1 sec. |
5800 | if (shutterSpeed > 500000) | |
5801 | shutterSpeed -= 100000; | |
5802 | ||
54f4971e SK |
5803 | if (shutterSpeed < 0) { |
5804 | shutterSpeed = 100; | |
5805 | } | |
5806 | ||
5807 | exifInfo->exposure_time.num = 1; | |
5808 | // x us -> 1/x s */ | |
5809 | //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed); | |
5810 | exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed); | |
5811 | ||
5812 | //3 ISO Speed Rating | |
5813 | exifInfo->iso_speed_rating = dm->aa.isoValue; | |
5814 | ||
5815 | uint32_t av, tv, bv, sv, ev; | |
5816 | av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den); | |
5817 | tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den); | |
5818 | sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating); | |
5819 | bv = av + tv - sv; | |
5820 | ev = av + tv; | |
5821 | //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating); | |
c06b3290 | 5822 | ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv); |
54f4971e SK |
5823 | |
5824 | //3 Shutter Speed | |
5825 | exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN; | |
5826 | exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN; | |
5827 | //3 Brightness | |
5828 | exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN; | |
5829 | exifInfo->brightness.den = EXIF_DEF_APEX_DEN; | |
5830 | //3 Exposure Bias | |
5831 | if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH|| | |
5832 | ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) { | |
5833 | exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN; | |
5834 | exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN; | |
5835 | } else { | |
5836 | exifInfo->exposure_bias.num = 0; | |
5837 | exifInfo->exposure_bias.den = 0; | |
5838 | } | |
5839 | //3 Metering Mode | |
5840 | /*switch (m_curCameraInfo->metering) { | |
5841 | case METERING_MODE_CENTER: | |
5842 | exifInfo->metering_mode = EXIF_METERING_CENTER; | |
5843 | break; | |
5844 | case METERING_MODE_MATRIX: | |
5845 | exifInfo->metering_mode = EXIF_METERING_MULTISPOT; | |
5846 | break; | |
5847 | case METERING_MODE_SPOT: | |
5848 | exifInfo->metering_mode = EXIF_METERING_SPOT; | |
5849 | break; | |
5850 | case METERING_MODE_AVERAGE: | |
5851 | default: | |
5852 | exifInfo->metering_mode = EXIF_METERING_AVERAGE; | |
5853 | break; | |
5854 | }*/ | |
5855 | exifInfo->metering_mode = EXIF_METERING_CENTER; | |
5856 | ||
5857 | //3 Flash | |
9257e29e YJ |
5858 | if (m_ctlInfo.flash.m_flashDecisionResult) |
5859 | exifInfo->flash = 1; | |
5860 | else | |
5861 | exifInfo->flash = EXIF_DEF_FLASH; | |
54f4971e SK |
5862 | |
5863 | //3 White Balance | |
48728d49 | 5864 | if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO) |
54f4971e SK |
5865 | exifInfo->white_balance = EXIF_WB_AUTO; |
5866 | else | |
5867 | exifInfo->white_balance = EXIF_WB_MANUAL; | |
5868 | ||
5869 | //3 Scene Capture Type | |
5870 | switch (ctl->aa.sceneMode) { | |
5871 | case AA_SCENE_MODE_PORTRAIT: | |
5872 | exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; | |
5873 | break; | |
5874 | case AA_SCENE_MODE_LANDSCAPE: | |
5875 | exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE; | |
5876 | break; | |
5877 | case AA_SCENE_MODE_NIGHT_PORTRAIT: | |
5878 | exifInfo->scene_capture_type = EXIF_SCENE_NIGHT; | |
5879 | break; | |
5880 | default: | |
5881 | exifInfo->scene_capture_type = EXIF_SCENE_STANDARD; | |
5882 | break; | |
5883 | } | |
5884 | ||
5885 | //2 0th IFD GPS Info Tags | |
5886 | if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) { | |
5887 | ||
5888 | if (ctl->jpeg.gpsCoordinates[0] > 0) | |
5889 | strcpy((char *)exifInfo->gps_latitude_ref, "N"); | |
5890 | else | |
5891 | strcpy((char *)exifInfo->gps_latitude_ref, "S"); | |
5892 | ||
5893 | if (ctl->jpeg.gpsCoordinates[1] > 0) | |
5894 | strcpy((char *)exifInfo->gps_longitude_ref, "E"); | |
5895 | else | |
5896 | strcpy((char *)exifInfo->gps_longitude_ref, "W"); | |
5897 | ||
5898 | if (ctl->jpeg.gpsCoordinates[2] > 0) | |
5899 | exifInfo->gps_altitude_ref = 0; | |
5900 | else | |
5901 | exifInfo->gps_altitude_ref = 1; | |
5902 | ||
0066b2cb SK |
5903 | double latitude = fabs(ctl->jpeg.gpsCoordinates[0]); |
5904 | double longitude = fabs(ctl->jpeg.gpsCoordinates[1]); | |
5905 | double altitude = fabs(ctl->jpeg.gpsCoordinates[2]); | |
54f4971e SK |
5906 | |
5907 | exifInfo->gps_latitude[0].num = (uint32_t)latitude; | |
5908 | exifInfo->gps_latitude[0].den = 1; | |
5909 | exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60); | |
5910 | exifInfo->gps_latitude[1].den = 1; | |
3db6fe61 | 5911 | exifInfo->gps_latitude[2].num = (uint32_t)round((((latitude - exifInfo->gps_latitude[0].num) * 60) |
54f4971e SK |
5912 | - exifInfo->gps_latitude[1].num) * 60); |
5913 | exifInfo->gps_latitude[2].den = 1; | |
5914 | ||
5915 | exifInfo->gps_longitude[0].num = (uint32_t)longitude; | |
5916 | exifInfo->gps_longitude[0].den = 1; | |
5917 | exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60); | |
5918 | exifInfo->gps_longitude[1].den = 1; | |
3db6fe61 | 5919 | exifInfo->gps_longitude[2].num = (uint32_t)round((((longitude - exifInfo->gps_longitude[0].num) * 60) |
54f4971e SK |
5920 | - exifInfo->gps_longitude[1].num) * 60); |
5921 | exifInfo->gps_longitude[2].den = 1; | |
5922 | ||
3db6fe61 | 5923 | exifInfo->gps_altitude.num = (uint32_t)round(altitude); |
54f4971e SK |
5924 | exifInfo->gps_altitude.den = 1; |
5925 | ||
5926 | struct tm tm_data; | |
5927 | long timestamp; | |
5928 | timestamp = (long)ctl->jpeg.gpsTimestamp; | |
5929 | gmtime_r(×tamp, &tm_data); | |
5930 | exifInfo->gps_timestamp[0].num = tm_data.tm_hour; | |
5931 | exifInfo->gps_timestamp[0].den = 1; | |
5932 | exifInfo->gps_timestamp[1].num = tm_data.tm_min; | |
5933 | exifInfo->gps_timestamp[1].den = 1; | |
5934 | exifInfo->gps_timestamp[2].num = tm_data.tm_sec; | |
5935 | exifInfo->gps_timestamp[2].den = 1; | |
5936 | snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp), | |
5937 | "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday); | |
5938 | ||
07ed0359 | 5939 | memset(exifInfo->gps_processing_method, 0, 100); |
0066b2cb | 5940 | memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32); |
54f4971e SK |
5941 | exifInfo->enableGps = true; |
5942 | } else { | |
5943 | exifInfo->enableGps = false; | |
5944 | } | |
5945 | ||
5946 | //2 1th IFD TIFF Tags | |
5947 | exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0]; | |
5948 | exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1]; | |
5949 | } | |
5950 | ||
13d8c7b4 SK |
5951 | ExynosCameraHWInterface2::MainThread::~MainThread() |
5952 | { | |
ad37861e | 5953 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5954 | } |
5955 | ||
5956 | void ExynosCameraHWInterface2::MainThread::release() | |
5957 | { | |
ad37861e | 5958 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5959 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5960 | } |
5961 | ||
5962 | ExynosCameraHWInterface2::SensorThread::~SensorThread() | |
5963 | { | |
ad37861e | 5964 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5965 | } |
5966 | ||
5967 | void ExynosCameraHWInterface2::SensorThread::release() | |
5968 | { | |
ad37861e | 5969 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5970 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5971 | } |
5972 | ||
13d8c7b4 SK |
5973 | ExynosCameraHWInterface2::StreamThread::~StreamThread() |
5974 | { | |
ad37861e | 5975 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5976 | } |
5977 | ||
5978 | void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters) | |
5979 | { | |
5980 | ALOGV("DEBUG(%s):", __FUNCTION__); | |
5506cebf | 5981 | memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t)); |
c15a6b00 JS |
5982 | } |
5983 | ||
13d8c7b4 | 5984 | void ExynosCameraHWInterface2::StreamThread::release() |
c15a6b00 | 5985 | { |
9dd63e1f | 5986 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5987 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5988 | } |
5989 | ||
5990 | int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr) | |
5991 | { | |
5992 | int index; | |
5993 | for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) { | |
5994 | if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr) | |
5995 | return index; | |
5996 | } | |
5997 | return -1; | |
c15a6b00 JS |
5998 | } |
5999 | ||
5506cebf SK |
6000 | int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle) |
6001 | { | |
6002 | int index; | |
6003 | for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) { | |
6004 | if (m_parameters.svcBufHandle[index] == *bufHandle) | |
6005 | return index; | |
6006 | } | |
6007 | return -1; | |
6008 | } | |
6009 | ||
6010 | status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority) | |
9dd63e1f | 6011 | { |
5506cebf SK |
6012 | ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id); |
6013 | int index, vacantIndex; | |
6014 | bool vacancy = false; | |
6015 | ||
6016 | for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) { | |
6017 | if (!vacancy && m_attachedSubStreams[index].streamId == -1) { | |
6018 | vacancy = true; | |
6019 | vacantIndex = index; | |
6020 | } else if (m_attachedSubStreams[index].streamId == stream_id) { | |
6021 | return BAD_VALUE; | |
6022 | } | |
6023 | } | |
6024 | if (!vacancy) | |
6025 | return NO_MEMORY; | |
6026 | m_attachedSubStreams[vacantIndex].streamId = stream_id; | |
6027 | m_attachedSubStreams[vacantIndex].priority = priority; | |
6028 | m_numRegisteredStream++; | |
6029 | return NO_ERROR; | |
9dd63e1f SK |
6030 | } |
6031 | ||
5506cebf | 6032 | status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id) |
74d78ebe | 6033 | { |
5506cebf SK |
6034 | ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id); |
6035 | int index; | |
6036 | bool found = false; | |
6037 | ||
6038 | for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) { | |
6039 | if (m_attachedSubStreams[index].streamId == stream_id) { | |
6040 | found = true; | |
6041 | break; | |
6042 | } | |
6043 | } | |
6044 | if (!found) | |
6045 | return BAD_VALUE; | |
6046 | m_attachedSubStreams[index].streamId = -1; | |
6047 | m_attachedSubStreams[index].priority = 0; | |
6048 | m_numRegisteredStream--; | |
6049 | return NO_ERROR; | |
74d78ebe SK |
6050 | } |
6051 | ||
c15a6b00 JS |
6052 | int ExynosCameraHWInterface2::createIonClient(ion_client ionClient) |
6053 | { | |
6054 | if (ionClient == 0) { | |
6055 | ionClient = ion_client_create(); | |
6056 | if (ionClient < 0) { | |
13d8c7b4 | 6057 | ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient); |
c15a6b00 JS |
6058 | return 0; |
6059 | } | |
6060 | } | |
c15a6b00 JS |
6061 | return ionClient; |
6062 | } | |
6063 | ||
6064 | int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient) | |
6065 | { | |
6066 | if (ionClient != 0) { | |
6067 | if (ionClient > 0) { | |
6068 | ion_client_destroy(ionClient); | |
6069 | } | |
6070 | ionClient = 0; | |
6071 | } | |
c15a6b00 JS |
6072 | return ionClient; |
6073 | } | |
6074 | ||
13d8c7b4 | 6075 | int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum) |
181e425e SK |
6076 | { |
6077 | return allocCameraMemory(ionClient, buf, iMemoryNum, 0); | |
6078 | } | |
6079 | ||
6080 | int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag) | |
c15a6b00 JS |
6081 | { |
6082 | int ret = 0; | |
6083 | int i = 0; | |
181e425e | 6084 | int flag = 0; |
c15a6b00 JS |
6085 | |
6086 | if (ionClient == 0) { | |
13d8c7b4 | 6087 | ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient); |
c15a6b00 JS |
6088 | return -1; |
6089 | } | |
6090 | ||
181e425e | 6091 | for (i = 0 ; i < iMemoryNum ; i++) { |
13d8c7b4 | 6092 | if (buf->size.extS[i] == 0) { |
c15a6b00 JS |
6093 | break; |
6094 | } | |
181e425e | 6095 | if (1 << i & cacheFlag) |
88c9cff4 | 6096 | flag = ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC; |
181e425e SK |
6097 | else |
6098 | flag = 0; | |
13d8c7b4 | 6099 | buf->fd.extFd[i] = ion_alloc(ionClient, \ |
601acb66 | 6100 | buf->size.extS[i], 0, ION_HEAP_SYSTEM_MASK, flag); |
13d8c7b4 SK |
6101 | if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) { |
6102 | ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]); | |
6103 | buf->fd.extFd[i] = -1; | |
c15a6b00 JS |
6104 | freeCameraMemory(buf, iMemoryNum); |
6105 | return -1; | |
6106 | } | |
6107 | ||
13d8c7b4 SK |
6108 | buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \ |
6109 | buf->size.extS[i], 0); | |
6110 | if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) { | |
6111 | ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]); | |
6112 | buf->virt.extP[i] = (char *)MAP_FAILED; | |
c15a6b00 JS |
6113 | freeCameraMemory(buf, iMemoryNum); |
6114 | return -1; | |
6115 | } | |
181e425e | 6116 | ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag); |
c15a6b00 JS |
6117 | } |
6118 | ||
6119 | return ret; | |
6120 | } | |
6121 | ||
13d8c7b4 | 6122 | void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum) |
c15a6b00 | 6123 | { |
13d8c7b4 | 6124 | |
5506cebf | 6125 | int i = 0 ; |
6d8e5b08 | 6126 | int ret = 0; |
c15a6b00 JS |
6127 | |
6128 | for (i=0;i<iMemoryNum;i++) { | |
13d8c7b4 SK |
6129 | if (buf->fd.extFd[i] != -1) { |
6130 | if (buf->virt.extP[i] != (char *)MAP_FAILED) { | |
6d8e5b08 SK |
6131 | ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]); |
6132 | if (ret < 0) | |
6133 | ALOGE("ERR(%s)", __FUNCTION__); | |
c15a6b00 | 6134 | } |
13d8c7b4 | 6135 | ion_free(buf->fd.extFd[i]); |
90e439c1 | 6136 | ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]); |
c15a6b00 | 6137 | } |
13d8c7b4 SK |
6138 | buf->fd.extFd[i] = -1; |
6139 | buf->virt.extP[i] = (char *)MAP_FAILED; | |
6140 | buf->size.extS[i] = 0; | |
c15a6b00 JS |
6141 | } |
6142 | } | |
6143 | ||
13d8c7b4 | 6144 | void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum) |
c15a6b00 JS |
6145 | { |
6146 | int i =0 ; | |
6147 | for (i=0;i<iMemoryNum;i++) { | |
13d8c7b4 SK |
6148 | buf->virt.extP[i] = (char *)MAP_FAILED; |
6149 | buf->fd.extFd[i] = -1; | |
6150 | buf->size.extS[i] = 0; | |
c15a6b00 JS |
6151 | } |
6152 | } | |
6153 | ||
6154 | ||
13d8c7b4 SK |
6155 | |
6156 | ||
9dd63e1f | 6157 | static camera2_device_t *g_cam2_device = NULL; |
b5237e6b | 6158 | static bool g_camera_vaild = false; |
e43660b0 | 6159 | static Mutex g_camera_mutex; |
daa1fcd6 | 6160 | ExynosCamera2 * g_camera2[2] = { NULL, NULL }; |
c15a6b00 JS |
6161 | |
6162 | static int HAL2_camera_device_close(struct hw_device_t* device) | |
6163 | { | |
e43660b0 | 6164 | Mutex::Autolock lock(g_camera_mutex); |
ed4ad5fe | 6165 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 6166 | if (device) { |
9dd63e1f | 6167 | |
c15a6b00 | 6168 | camera2_device_t *cam_device = (camera2_device_t *)device; |
ad37861e SK |
6169 | ALOGV("cam_device(0x%08x):", (unsigned int)cam_device); |
6170 | ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device); | |
c15a6b00 JS |
6171 | delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv); |
6172 | free(cam_device); | |
b5237e6b | 6173 | g_camera_vaild = false; |
053d38cf | 6174 | g_cam2_device = NULL; |
c15a6b00 | 6175 | } |
6d8e5b08 | 6176 | |
ed4ad5fe | 6177 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
6178 | return 0; |
6179 | } | |
6180 | ||
6181 | static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev) | |
6182 | { | |
6183 | return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv); | |
6184 | } | |
6185 | ||
6186 | static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev, | |
6187 | const camera2_request_queue_src_ops_t *request_src_ops) | |
6188 | { | |
13d8c7b4 | 6189 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6190 | return obj(dev)->setRequestQueueSrcOps(request_src_ops); |
6191 | } | |
6192 | ||
6193 | static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev) | |
6194 | { | |
13d8c7b4 | 6195 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6196 | return obj(dev)->notifyRequestQueueNotEmpty(); |
6197 | } | |
6198 | ||
6199 | static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev, | |
6200 | const camera2_frame_queue_dst_ops_t *frame_dst_ops) | |
6201 | { | |
13d8c7b4 | 6202 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6203 | return obj(dev)->setFrameQueueDstOps(frame_dst_ops); |
6204 | } | |
6205 | ||
6206 | static int HAL2_device_get_in_progress_count(const struct camera2_device *dev) | |
6207 | { | |
13d8c7b4 | 6208 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6209 | return obj(dev)->getInProgressCount(); |
6210 | } | |
6211 | ||
6212 | static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev) | |
6213 | { | |
13d8c7b4 | 6214 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6215 | return obj(dev)->flushCapturesInProgress(); |
6216 | } | |
6217 | ||
6218 | static int HAL2_device_construct_default_request(const struct camera2_device *dev, | |
6219 | int request_template, camera_metadata_t **request) | |
6220 | { | |
13d8c7b4 | 6221 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6222 | return obj(dev)->constructDefaultRequest(request_template, request); |
6223 | } | |
6224 | ||
6225 | static int HAL2_device_allocate_stream( | |
6226 | const struct camera2_device *dev, | |
6227 | // inputs | |
6228 | uint32_t width, | |
6229 | uint32_t height, | |
6230 | int format, | |
6231 | const camera2_stream_ops_t *stream_ops, | |
6232 | // outputs | |
6233 | uint32_t *stream_id, | |
6234 | uint32_t *format_actual, | |
6235 | uint32_t *usage, | |
6236 | uint32_t *max_buffers) | |
6237 | { | |
9dd63e1f | 6238 | ALOGV("(%s): ", __FUNCTION__); |
c15a6b00 JS |
6239 | return obj(dev)->allocateStream(width, height, format, stream_ops, |
6240 | stream_id, format_actual, usage, max_buffers); | |
6241 | } | |
6242 | ||
c15a6b00 JS |
6243 | static int HAL2_device_register_stream_buffers(const struct camera2_device *dev, |
6244 | uint32_t stream_id, | |
6245 | int num_buffers, | |
6246 | buffer_handle_t *buffers) | |
6247 | { | |
13d8c7b4 | 6248 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6249 | return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers); |
6250 | } | |
6251 | ||
6252 | static int HAL2_device_release_stream( | |
6253 | const struct camera2_device *dev, | |
6254 | uint32_t stream_id) | |
6255 | { | |
ad37861e | 6256 | ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id); |
b5237e6b SK |
6257 | if (!g_camera_vaild) |
6258 | return 0; | |
c15a6b00 JS |
6259 | return obj(dev)->releaseStream(stream_id); |
6260 | } | |
6261 | ||
6262 | static int HAL2_device_allocate_reprocess_stream( | |
6263 | const struct camera2_device *dev, | |
6264 | uint32_t width, | |
6265 | uint32_t height, | |
6266 | uint32_t format, | |
6267 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
6268 | // outputs | |
6269 | uint32_t *stream_id, | |
6270 | uint32_t *consumer_usage, | |
6271 | uint32_t *max_buffers) | |
6272 | { | |
13d8c7b4 | 6273 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6274 | return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops, |
6275 | stream_id, consumer_usage, max_buffers); | |
6276 | } | |
6277 | ||
2b0421d1 EVT |
6278 | static int HAL2_device_allocate_reprocess_stream_from_stream( |
6279 | const struct camera2_device *dev, | |
6280 | uint32_t output_stream_id, | |
6281 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
6282 | // outputs | |
6283 | uint32_t *stream_id) | |
6284 | { | |
6285 | ALOGV("DEBUG(%s):", __FUNCTION__); | |
5506cebf SK |
6286 | return obj(dev)->allocateReprocessStreamFromStream(output_stream_id, |
6287 | reprocess_stream_ops, stream_id); | |
2b0421d1 EVT |
6288 | } |
6289 | ||
c15a6b00 JS |
6290 | static int HAL2_device_release_reprocess_stream( |
6291 | const struct camera2_device *dev, | |
6292 | uint32_t stream_id) | |
6293 | { | |
13d8c7b4 | 6294 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6295 | return obj(dev)->releaseReprocessStream(stream_id); |
6296 | } | |
6297 | ||
6298 | static int HAL2_device_trigger_action(const struct camera2_device *dev, | |
6299 | uint32_t trigger_id, | |
6300 | int ext1, | |
6301 | int ext2) | |
6302 | { | |
13d8c7b4 | 6303 | ALOGV("DEBUG(%s):", __FUNCTION__); |
b8d41ae2 SK |
6304 | if (!g_camera_vaild) |
6305 | return 0; | |
c15a6b00 JS |
6306 | return obj(dev)->triggerAction(trigger_id, ext1, ext2); |
6307 | } | |
6308 | ||
6309 | static int HAL2_device_set_notify_callback(const struct camera2_device *dev, | |
6310 | camera2_notify_callback notify_cb, | |
6311 | void *user) | |
6312 | { | |
13d8c7b4 | 6313 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6314 | return obj(dev)->setNotifyCallback(notify_cb, user); |
6315 | } | |
6316 | ||
6317 | static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev, | |
6318 | vendor_tag_query_ops_t **ops) | |
6319 | { | |
13d8c7b4 | 6320 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6321 | return obj(dev)->getMetadataVendorTagOps(ops); |
6322 | } | |
6323 | ||
6324 | static int HAL2_device_dump(const struct camera2_device *dev, int fd) | |
6325 | { | |
13d8c7b4 | 6326 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6327 | return obj(dev)->dump(fd); |
6328 | } | |
6329 | ||
6330 | ||
6331 | ||
6332 | ||
6333 | ||
6334 | static int HAL2_getNumberOfCameras() | |
6335 | { | |
9dd63e1f SK |
6336 | ALOGV("(%s): returning 2", __FUNCTION__); |
6337 | return 2; | |
c15a6b00 JS |
6338 | } |
6339 | ||
6340 | ||
c15a6b00 JS |
6341 | static int HAL2_getCameraInfo(int cameraId, struct camera_info *info) |
6342 | { | |
ad37861e | 6343 | ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId); |
9dd63e1f | 6344 | static camera_metadata_t * mCameraInfo[2] = {NULL, NULL}; |
ad37861e | 6345 | |
c15a6b00 | 6346 | status_t res; |
13d8c7b4 | 6347 | |
daa1fcd6 | 6348 | if (cameraId == 0) { |
9dd63e1f | 6349 | info->facing = CAMERA_FACING_BACK; |
daa1fcd6 SK |
6350 | if (!g_camera2[0]) |
6351 | g_camera2[0] = new ExynosCamera2(0); | |
6352 | } | |
6353 | else if (cameraId == 1) { | |
9dd63e1f | 6354 | info->facing = CAMERA_FACING_FRONT; |
daa1fcd6 SK |
6355 | if (!g_camera2[1]) |
6356 | g_camera2[1] = new ExynosCamera2(1); | |
6357 | } | |
6358 | else | |
6359 | return BAD_VALUE; | |
6360 | ||
c15a6b00 JS |
6361 | info->orientation = 0; |
6362 | info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0); | |
9dd63e1f | 6363 | if (mCameraInfo[cameraId] == NULL) { |
daa1fcd6 | 6364 | res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true); |
c15a6b00 JS |
6365 | if (res != OK) { |
6366 | ALOGE("%s: Unable to allocate static info: %s (%d)", | |
13d8c7b4 | 6367 | __FUNCTION__, strerror(-res), res); |
c15a6b00 JS |
6368 | return res; |
6369 | } | |
daa1fcd6 | 6370 | res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false); |
c15a6b00 JS |
6371 | if (res != OK) { |
6372 | ALOGE("%s: Unable to fill in static info: %s (%d)", | |
13d8c7b4 | 6373 | __FUNCTION__, strerror(-res), res); |
c15a6b00 JS |
6374 | return res; |
6375 | } | |
6376 | } | |
9dd63e1f | 6377 | info->static_camera_characteristics = mCameraInfo[cameraId]; |
13d8c7b4 | 6378 | return NO_ERROR; |
c15a6b00 JS |
6379 | } |
6380 | ||
6381 | #define SET_METHOD(m) m : HAL2_device_##m | |
6382 | ||
6383 | static camera2_device_ops_t camera2_device_ops = { | |
6384 | SET_METHOD(set_request_queue_src_ops), | |
6385 | SET_METHOD(notify_request_queue_not_empty), | |
6386 | SET_METHOD(set_frame_queue_dst_ops), | |
6387 | SET_METHOD(get_in_progress_count), | |
6388 | SET_METHOD(flush_captures_in_progress), | |
6389 | SET_METHOD(construct_default_request), | |
6390 | SET_METHOD(allocate_stream), | |
6391 | SET_METHOD(register_stream_buffers), | |
6392 | SET_METHOD(release_stream), | |
6393 | SET_METHOD(allocate_reprocess_stream), | |
2b0421d1 | 6394 | SET_METHOD(allocate_reprocess_stream_from_stream), |
c15a6b00 JS |
6395 | SET_METHOD(release_reprocess_stream), |
6396 | SET_METHOD(trigger_action), | |
6397 | SET_METHOD(set_notify_callback), | |
6398 | SET_METHOD(get_metadata_vendor_tag_ops), | |
6399 | SET_METHOD(dump), | |
6400 | }; | |
6401 | ||
6402 | #undef SET_METHOD | |
6403 | ||
6404 | ||
6405 | static int HAL2_camera_device_open(const struct hw_module_t* module, | |
6406 | const char *id, | |
6407 | struct hw_device_t** device) | |
6408 | { | |
c15a6b00 | 6409 | int cameraId = atoi(id); |
6044e509 | 6410 | int openInvalid = 0; |
9dd63e1f | 6411 | |
e43660b0 AR |
6412 | Mutex::Autolock lock(g_camera_mutex); |
6413 | if (g_camera_vaild) { | |
6414 | ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__); | |
a6302fad | 6415 | return -EUSERS; |
e43660b0 | 6416 | } |
b5237e6b | 6417 | g_camera_vaild = false; |
0eb27a9d | 6418 | ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId); |
c15a6b00 | 6419 | if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) { |
13d8c7b4 | 6420 | ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id); |
c15a6b00 JS |
6421 | return -EINVAL; |
6422 | } | |
6423 | ||
0eb27a9d | 6424 | ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device); |
c15a6b00 JS |
6425 | if (g_cam2_device) { |
6426 | if (obj(g_cam2_device)->getCameraId() == cameraId) { | |
0eb27a9d | 6427 | ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id); |
c15a6b00 JS |
6428 | goto done; |
6429 | } else { | |
0eb27a9d | 6430 | ALOGD("(%s): START waiting for cam device free", __FUNCTION__); |
9dd63e1f | 6431 | while (g_cam2_device) |
041f38de | 6432 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 6433 | ALOGD("(%s): END waiting for cam device free", __FUNCTION__); |
c15a6b00 JS |
6434 | } |
6435 | } | |
6436 | ||
6437 | g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t)); | |
ad37861e | 6438 | ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device); |
9dd63e1f | 6439 | |
c15a6b00 JS |
6440 | if (!g_cam2_device) |
6441 | return -ENOMEM; | |
6442 | ||
6443 | g_cam2_device->common.tag = HARDWARE_DEVICE_TAG; | |
6444 | g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0; | |
6445 | g_cam2_device->common.module = const_cast<hw_module_t *>(module); | |
6446 | g_cam2_device->common.close = HAL2_camera_device_close; | |
6447 | ||
6448 | g_cam2_device->ops = &camera2_device_ops; | |
6449 | ||
13d8c7b4 | 6450 | ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id); |
c15a6b00 | 6451 | |
6044e509 SK |
6452 | g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid); |
6453 | if (!openInvalid) { | |
5506cebf | 6454 | ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__); |
6f19b6cf | 6455 | return -ENODEV; |
6044e509 | 6456 | } |
c15a6b00 JS |
6457 | done: |
6458 | *device = (hw_device_t *)g_cam2_device; | |
13d8c7b4 | 6459 | ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device); |
b5237e6b | 6460 | g_camera_vaild = true; |
c15a6b00 JS |
6461 | |
6462 | return 0; | |
6463 | } | |
6464 | ||
6465 | ||
6466 | static hw_module_methods_t camera_module_methods = { | |
6467 | open : HAL2_camera_device_open | |
6468 | }; | |
6469 | ||
6470 | extern "C" { | |
6471 | struct camera_module HAL_MODULE_INFO_SYM = { | |
6472 | common : { | |
6473 | tag : HARDWARE_MODULE_TAG, | |
6474 | module_api_version : CAMERA_MODULE_API_VERSION_2_0, | |
6475 | hal_api_version : HARDWARE_HAL_API_VERSION, | |
6476 | id : CAMERA_HARDWARE_MODULE_ID, | |
6477 | name : "Exynos Camera HAL2", | |
6478 | author : "Samsung Corporation", | |
6479 | methods : &camera_module_methods, | |
6480 | dso: NULL, | |
6481 | reserved: {0}, | |
6482 | }, | |
6483 | get_number_of_cameras : HAL2_getNumberOfCameras, | |
6484 | get_camera_info : HAL2_getCameraInfo | |
6485 | }; | |
6486 | } | |
6487 | ||
6488 | }; // namespace android |