Commit | Line | Data |
---|---|---|
c15a6b00 JS |
1 | /* |
2 | ** | |
3 | ** Copyright 2008, The Android Open Source Project | |
4 | ** Copyright 2012, Samsung Electronics Co. LTD | |
5 | ** | |
6 | ** Licensed under the Apache License, Version 2.0 (the "License"); | |
7 | ** you may not use this file except in compliance with the License. | |
8 | ** You may obtain a copy of the License at | |
9 | ** | |
10 | ** http://www.apache.org/licenses/LICENSE-2.0 | |
11 | ** | |
12 | ** Unless required by applicable law or agreed to in writing, software | |
13 | ** distributed under the License is distributed on an "AS IS" BASIS, | |
14 | ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
15 | ** See the License for the specific language governing permissions and | |
16 | ** limitations under the License. | |
17 | */ | |
18 | ||
19 | /*! | |
20 | * \file ExynosCameraHWInterface2.cpp | |
21 | * \brief source file for Android Camera API 2.0 HAL | |
22 | * \author Sungjoong Kang(sj3.kang@samsung.com) | |
13d8c7b4 | 23 | * \date 2012/07/10 |
c15a6b00 JS |
24 | * |
25 | * <b>Revision History: </b> | |
26 | * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n | |
27 | * Initial Release | |
13d8c7b4 SK |
28 | * |
29 | * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n | |
30 | * 2nd Release | |
31 | * | |
c15a6b00 JS |
32 | */ |
33 | ||
34 | //#define LOG_NDEBUG 0 | |
9dd63e1f | 35 | #define LOG_TAG "ExynosCameraHAL2" |
be8daa96 | 36 | #include <sys/time.h> |
c15a6b00 | 37 | #include <utils/Log.h> |
3db6fe61 | 38 | #include <math.h> |
c15a6b00 JS |
39 | |
40 | #include "ExynosCameraHWInterface2.h" | |
41 | #include "exynos_format.h" | |
42 | ||
c15a6b00 JS |
43 | namespace android { |
44 | ||
9dd63e1f SK |
45 | void m_savePostView(const char *fname, uint8_t *buf, uint32_t size) |
46 | { | |
47 | int nw; | |
48 | int cnt = 0; | |
49 | uint32_t written = 0; | |
50 | ||
ad37861e | 51 | ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size); |
9dd63e1f SK |
52 | int fd = open(fname, O_RDWR | O_CREAT, 0644); |
53 | if (fd < 0) { | |
54 | ALOGE("failed to create file [%s]: %s", fname, strerror(errno)); | |
55 | return; | |
56 | } | |
57 | ||
ad37861e | 58 | ALOGV("writing %d bytes to file [%s]", size, fname); |
9dd63e1f SK |
59 | while (written < size) { |
60 | nw = ::write(fd, buf + written, size - written); | |
61 | if (nw < 0) { | |
62 | ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno)); | |
63 | break; | |
64 | } | |
65 | written += nw; | |
66 | cnt++; | |
67 | } | |
ad37861e | 68 | ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt); |
9dd63e1f SK |
69 | ::close(fd); |
70 | } | |
71 | ||
c15a6b00 JS |
72 | int get_pixel_depth(uint32_t fmt) |
73 | { | |
74 | int depth = 0; | |
75 | ||
76 | switch (fmt) { | |
77 | case V4L2_PIX_FMT_JPEG: | |
78 | depth = 8; | |
79 | break; | |
80 | ||
81 | case V4L2_PIX_FMT_NV12: | |
82 | case V4L2_PIX_FMT_NV21: | |
83 | case V4L2_PIX_FMT_YUV420: | |
84 | case V4L2_PIX_FMT_YVU420M: | |
85 | case V4L2_PIX_FMT_NV12M: | |
86 | case V4L2_PIX_FMT_NV12MT: | |
87 | depth = 12; | |
88 | break; | |
89 | ||
90 | case V4L2_PIX_FMT_RGB565: | |
91 | case V4L2_PIX_FMT_YUYV: | |
92 | case V4L2_PIX_FMT_YVYU: | |
93 | case V4L2_PIX_FMT_UYVY: | |
94 | case V4L2_PIX_FMT_VYUY: | |
95 | case V4L2_PIX_FMT_NV16: | |
96 | case V4L2_PIX_FMT_NV61: | |
97 | case V4L2_PIX_FMT_YUV422P: | |
98 | case V4L2_PIX_FMT_SBGGR10: | |
99 | case V4L2_PIX_FMT_SBGGR12: | |
100 | case V4L2_PIX_FMT_SBGGR16: | |
101 | depth = 16; | |
102 | break; | |
103 | ||
104 | case V4L2_PIX_FMT_RGB32: | |
105 | depth = 32; | |
106 | break; | |
107 | default: | |
108 | ALOGE("Get depth failed(format : %d)", fmt); | |
109 | break; | |
110 | } | |
111 | ||
112 | return depth; | |
13d8c7b4 | 113 | } |
c15a6b00 JS |
114 | |
115 | int cam_int_s_fmt(node_info_t *node) | |
116 | { | |
117 | struct v4l2_format v4l2_fmt; | |
118 | unsigned int framesize; | |
119 | int ret; | |
120 | ||
121 | memset(&v4l2_fmt, 0, sizeof(struct v4l2_format)); | |
122 | ||
123 | v4l2_fmt.type = node->type; | |
124 | framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8; | |
125 | ||
126 | if (node->planes >= 1) { | |
127 | v4l2_fmt.fmt.pix_mp.width = node->width; | |
128 | v4l2_fmt.fmt.pix_mp.height = node->height; | |
129 | v4l2_fmt.fmt.pix_mp.pixelformat = node->format; | |
130 | v4l2_fmt.fmt.pix_mp.field = V4L2_FIELD_ANY; | |
131 | } else { | |
13d8c7b4 | 132 | ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__); |
c15a6b00 JS |
133 | } |
134 | ||
135 | /* Set up for capture */ | |
136 | ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt); | |
137 | ||
138 | if (ret < 0) | |
13d8c7b4 | 139 | ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret); |
c15a6b00 | 140 | |
be494d19 | 141 | |
c15a6b00 JS |
142 | return ret; |
143 | } | |
144 | ||
145 | int cam_int_reqbufs(node_info_t *node) | |
146 | { | |
147 | struct v4l2_requestbuffers req; | |
148 | int ret; | |
149 | ||
150 | req.count = node->buffers; | |
151 | req.type = node->type; | |
152 | req.memory = node->memory; | |
153 | ||
154 | ret = exynos_v4l2_reqbufs(node->fd, &req); | |
155 | ||
156 | if (ret < 0) | |
13d8c7b4 | 157 | ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret); |
c15a6b00 JS |
158 | |
159 | return req.count; | |
160 | } | |
161 | ||
162 | int cam_int_qbuf(node_info_t *node, int index) | |
163 | { | |
164 | struct v4l2_buffer v4l2_buf; | |
165 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
166 | int i; | |
167 | int ret = 0; | |
168 | ||
169 | v4l2_buf.m.planes = planes; | |
170 | v4l2_buf.type = node->type; | |
171 | v4l2_buf.memory = node->memory; | |
172 | v4l2_buf.index = index; | |
173 | v4l2_buf.length = node->planes; | |
174 | ||
175 | for(i = 0; i < node->planes; i++){ | |
13d8c7b4 SK |
176 | v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]); |
177 | v4l2_buf.m.planes[i].length = (unsigned long)(node->buffer[index].size.extS[i]); | |
c15a6b00 JS |
178 | } |
179 | ||
180 | ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf); | |
181 | ||
182 | if (ret < 0) | |
13d8c7b4 | 183 | ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret); |
c15a6b00 JS |
184 | |
185 | return ret; | |
186 | } | |
187 | ||
188 | int cam_int_streamon(node_info_t *node) | |
189 | { | |
190 | enum v4l2_buf_type type = node->type; | |
191 | int ret; | |
192 | ||
be494d19 | 193 | |
c15a6b00 JS |
194 | ret = exynos_v4l2_streamon(node->fd, type); |
195 | ||
196 | if (ret < 0) | |
ad37861e | 197 | ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret); |
c15a6b00 JS |
198 | |
199 | ALOGV("On streaming I/O... ... fd(%d)", node->fd); | |
200 | ||
201 | return ret; | |
202 | } | |
203 | ||
13d8c7b4 SK |
204 | int cam_int_streamoff(node_info_t *node) |
205 | { | |
ad37861e SK |
206 | enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
207 | int ret; | |
13d8c7b4 | 208 | |
be494d19 | 209 | |
ad37861e SK |
210 | ALOGV("Off streaming I/O... fd(%d)", node->fd); |
211 | ret = exynos_v4l2_streamoff(node->fd, type); | |
13d8c7b4 SK |
212 | |
213 | if (ret < 0) | |
214 | ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret); | |
215 | ||
ad37861e | 216 | return ret; |
13d8c7b4 SK |
217 | } |
218 | ||
9dd63e1f SK |
219 | int isp_int_streamoff(node_info_t *node) |
220 | { | |
ad37861e SK |
221 | enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
222 | int ret; | |
9dd63e1f | 223 | |
ad37861e SK |
224 | ALOGV("Off streaming I/O... fd(%d)", node->fd); |
225 | ret = exynos_v4l2_streamoff(node->fd, type); | |
9dd63e1f SK |
226 | |
227 | if (ret < 0) | |
228 | ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret); | |
229 | ||
ad37861e | 230 | return ret; |
9dd63e1f SK |
231 | } |
232 | ||
c15a6b00 JS |
233 | int cam_int_dqbuf(node_info_t *node) |
234 | { | |
235 | struct v4l2_buffer v4l2_buf; | |
236 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
237 | int ret; | |
238 | ||
239 | v4l2_buf.type = node->type; | |
240 | v4l2_buf.memory = node->memory; | |
241 | v4l2_buf.m.planes = planes; | |
242 | v4l2_buf.length = node->planes; | |
243 | ||
244 | ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf); | |
245 | if (ret < 0) | |
13d8c7b4 | 246 | ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret); |
c15a6b00 JS |
247 | |
248 | return v4l2_buf.index; | |
249 | } | |
250 | ||
feb7df4c SK |
251 | int cam_int_dqbuf(node_info_t *node, int num_plane) |
252 | { | |
253 | struct v4l2_buffer v4l2_buf; | |
254 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
255 | int ret; | |
256 | ||
257 | v4l2_buf.type = node->type; | |
258 | v4l2_buf.memory = node->memory; | |
259 | v4l2_buf.m.planes = planes; | |
260 | v4l2_buf.length = num_plane; | |
261 | ||
262 | ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf); | |
263 | if (ret < 0) | |
264 | ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret); | |
265 | ||
266 | return v4l2_buf.index; | |
267 | } | |
268 | ||
c15a6b00 JS |
269 | int cam_int_s_input(node_info_t *node, int index) |
270 | { | |
271 | int ret; | |
13d8c7b4 | 272 | |
c15a6b00 JS |
273 | ret = exynos_v4l2_s_input(node->fd, index); |
274 | if (ret < 0) | |
13d8c7b4 | 275 | ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret); |
c15a6b00 JS |
276 | |
277 | return ret; | |
278 | } | |
279 | ||
280 | ||
281 | gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal; | |
282 | ||
283 | RequestManager::RequestManager(SignalDrivenThread* main_thread): | |
a3bcc37b | 284 | m_vdisEnable(false), |
1422aff9 | 285 | m_lastCompletedFrameCnt(-1), |
2bdec060 SK |
286 | m_lastAeMode(0), |
287 | m_lastAaMode(0), | |
288 | m_lastAwbMode(0), | |
289 | m_lastAeComp(0), | |
1422aff9 | 290 | m_vdisBubbleEn(false) |
c15a6b00 JS |
291 | { |
292 | m_metadataConverter = new MetadataConverter; | |
293 | m_mainThread = main_thread; | |
2adfa429 | 294 | ResetEntry(); |
ad37861e | 295 | m_sensorPipelineSkipCnt = 0; |
c15a6b00 JS |
296 | return; |
297 | } | |
298 | ||
299 | RequestManager::~RequestManager() | |
300 | { | |
6d8e5b08 SK |
301 | ALOGV("%s", __FUNCTION__); |
302 | if (m_metadataConverter != NULL) { | |
303 | delete m_metadataConverter; | |
304 | m_metadataConverter = NULL; | |
305 | } | |
306 | ||
52f54308 | 307 | releaseSensorQ(); |
c15a6b00 JS |
308 | return; |
309 | } | |
310 | ||
2adfa429 JS |
311 | void RequestManager::ResetEntry() |
312 | { | |
313 | Mutex::Autolock lock(m_requestMutex); | |
1264ab16 | 314 | Mutex::Autolock lock2(m_numOfEntriesLock); |
2adfa429 JS |
315 | for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) { |
316 | memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t)); | |
317 | entries[i].internal_shot.shot.ctl.request.frameCount = -1; | |
318 | } | |
319 | m_numOfEntries = 0; | |
320 | m_entryInsertionIndex = -1; | |
321 | m_entryProcessingIndex = -1; | |
322 | m_entryFrameOutputIndex = -1; | |
323 | } | |
324 | ||
c15a6b00 JS |
325 | int RequestManager::GetNumEntries() |
326 | { | |
1264ab16 | 327 | Mutex::Autolock lock(m_numOfEntriesLock); |
c15a6b00 JS |
328 | return m_numOfEntries; |
329 | } | |
330 | ||
9dd63e1f SK |
331 | void RequestManager::SetDefaultParameters(int cropX) |
332 | { | |
333 | m_cropX = cropX; | |
334 | } | |
335 | ||
c15a6b00 JS |
336 | bool RequestManager::IsRequestQueueFull() |
337 | { | |
338 | Mutex::Autolock lock(m_requestMutex); | |
1264ab16 | 339 | Mutex::Autolock lock2(m_numOfEntriesLock); |
c15a6b00 JS |
340 | if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY) |
341 | return true; | |
342 | else | |
343 | return false; | |
344 | } | |
345 | ||
ca714238 | 346 | void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion) |
c15a6b00 | 347 | { |
13d8c7b4 SK |
348 | ALOGV("DEBUG(%s):", __FUNCTION__); |
349 | ||
c15a6b00 | 350 | Mutex::Autolock lock(m_requestMutex); |
1264ab16 | 351 | Mutex::Autolock lock2(m_numOfEntriesLock); |
13d8c7b4 | 352 | |
c15a6b00 | 353 | request_manager_entry * newEntry = NULL; |
9dd63e1f | 354 | int newInsertionIndex = GetNextIndex(m_entryInsertionIndex); |
ca714238 | 355 | ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries ); |
13d8c7b4 | 356 | |
c15a6b00 | 357 | |
c15a6b00 JS |
358 | newEntry = &(entries[newInsertionIndex]); |
359 | ||
360 | if (newEntry->status!=EMPTY) { | |
13d8c7b4 SK |
361 | ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__); |
362 | return; | |
c15a6b00 JS |
363 | } |
364 | newEntry->status = REGISTERED; | |
365 | newEntry->original_request = new_request; | |
ad37861e | 366 | memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext)); |
13d8c7b4 | 367 | m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot)); |
a85ec381 SK |
368 | newEntry->output_stream_count = 0; |
369 | if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP) | |
370 | newEntry->output_stream_count++; | |
371 | ||
372 | if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC) | |
373 | newEntry->output_stream_count++; | |
c15a6b00 JS |
374 | |
375 | m_numOfEntries++; | |
376 | m_entryInsertionIndex = newInsertionIndex; | |
377 | ||
13d8c7b4 | 378 | |
ca714238 SK |
379 | *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode); |
380 | afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0]; | |
381 | afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1]; | |
382 | afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2]; | |
383 | afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3]; | |
c15a6b00 | 384 | ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))", |
be494d19 | 385 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount); |
c15a6b00 JS |
386 | } |
387 | ||
388 | void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request) | |
389 | { | |
13d8c7b4 | 390 | ALOGV("DEBUG(%s):", __FUNCTION__); |
ad37861e SK |
391 | int frame_index; |
392 | request_manager_entry * currentEntry; | |
c15a6b00 | 393 | |
ad37861e | 394 | Mutex::Autolock lock(m_requestMutex); |
1264ab16 | 395 | Mutex::Autolock lock2(m_numOfEntriesLock); |
13d8c7b4 | 396 | |
f9a06609 | 397 | frame_index = GetCompletedIndex(); |
ad37861e | 398 | currentEntry = &(entries[frame_index]); |
f9a06609 | 399 | if (currentEntry->status != COMPLETED) { |
5c88d1f2 C |
400 | CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__, |
401 | m_entryProcessingIndex, frame_index,(int)(currentEntry->status)); | |
13d8c7b4 | 402 | return; |
c15a6b00 | 403 | } |
13d8c7b4 SK |
404 | if (deregistered_request) *deregistered_request = currentEntry->original_request; |
405 | ||
041f38de SK |
406 | m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount; |
407 | ||
c15a6b00 JS |
408 | currentEntry->status = EMPTY; |
409 | currentEntry->original_request = NULL; | |
be494d19 SK |
410 | memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext)); |
411 | currentEntry->internal_shot.shot.ctl.request.frameCount = -1; | |
c15a6b00 JS |
412 | currentEntry->output_stream_count = 0; |
413 | m_numOfEntries--; | |
414 | ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)", | |
415 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); | |
13d8c7b4 | 416 | |
041f38de | 417 | CheckCompleted(GetNextIndex(frame_index)); |
c15a6b00 | 418 | return; |
c15a6b00 JS |
419 | } |
420 | ||
13d8c7b4 | 421 | bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size, |
0f26b20f | 422 | camera_metadata_t ** prepared_frame, int afState) |
c15a6b00 | 423 | { |
13d8c7b4 | 424 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
425 | Mutex::Autolock lock(m_requestMutex); |
426 | status_t res = NO_ERROR; | |
f9a06609 | 427 | int tempFrameOutputIndex = GetCompletedIndex(); |
13d8c7b4 SK |
428 | request_manager_entry * currentEntry = &(entries[tempFrameOutputIndex]); |
429 | ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__, | |
430 | m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex); | |
431 | ||
f9a06609 | 432 | if (currentEntry->status != COMPLETED) { |
ad37861e | 433 | ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status)); |
13d8c7b4 SK |
434 | |
435 | return false; | |
436 | } | |
437 | m_entryFrameOutputIndex = tempFrameOutputIndex; | |
07b3ad1c | 438 | m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated |
0f26b20f | 439 | add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1); |
13d8c7b4 | 440 | res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot), |
c15a6b00 JS |
441 | m_tempFrameMetadata); |
442 | if (res!=NO_ERROR) { | |
13d8c7b4 SK |
443 | ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res); |
444 | return false; | |
c15a6b00 JS |
445 | } |
446 | *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata); | |
447 | *frame_size = get_camera_metadata_size(m_tempFrameMetadata); | |
448 | *prepared_frame = m_tempFrameMetadata; | |
5506cebf SK |
449 | ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex, |
450 | currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp); | |
9dd63e1f | 451 | // Dump(); |
13d8c7b4 | 452 | return true; |
c15a6b00 JS |
453 | } |
454 | ||
ca714238 | 455 | int RequestManager::MarkProcessingRequest(ExynosBuffer* buf) |
c15a6b00 | 456 | { |
13d8c7b4 | 457 | struct camera2_shot_ext * shot_ext; |
b56dcc00 | 458 | struct camera2_shot_ext * request_shot; |
13d8c7b4 | 459 | int targetStreamIndex = 0; |
ad37861e | 460 | request_manager_entry * newEntry = NULL; |
0f26b20f | 461 | static int count = 0; |
13d8c7b4 | 462 | |
52f54308 | 463 | Mutex::Autolock lock(m_requestMutex); |
1264ab16 | 464 | Mutex::Autolock lock2(m_numOfEntriesLock); |
13d8c7b4 | 465 | if (m_numOfEntries == 0) { |
4aa4d739 | 466 | CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__); |
13d8c7b4 SK |
467 | return -1; |
468 | } | |
469 | ||
470 | if ((m_entryProcessingIndex == m_entryInsertionIndex) | |
be494d19 | 471 | && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) { |
da7ca692 | 472 | ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)", |
13d8c7b4 SK |
473 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); |
474 | return -1; | |
475 | } | |
c15a6b00 | 476 | |
9dd63e1f | 477 | int newProcessingIndex = GetNextIndex(m_entryProcessingIndex); |
ad37861e | 478 | ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex); |
c15a6b00 | 479 | |
c15a6b00 | 480 | newEntry = &(entries[newProcessingIndex]); |
ad37861e | 481 | request_shot = &(newEntry->internal_shot); |
be494d19 | 482 | if (newEntry->status != REGISTERED) { |
5c88d1f2 C |
483 | CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status); |
484 | for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) { | |
485 | CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount); | |
486 | } | |
13d8c7b4 | 487 | return -1; |
c15a6b00 | 488 | } |
ad37861e | 489 | |
be494d19 | 490 | newEntry->status = REQUESTED; |
c15a6b00 | 491 | |
ad37861e | 492 | shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1]; |
13d8c7b4 | 493 | |
ad37861e SK |
494 | memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext)); |
495 | shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount; | |
13d8c7b4 | 496 | shot_ext->request_sensor = 1; |
9dd63e1f SK |
497 | shot_ext->dis_bypass = 1; |
498 | shot_ext->dnr_bypass = 1; | |
ad37861e SK |
499 | shot_ext->fd_bypass = 1; |
500 | shot_ext->setfile = 0; | |
501 | ||
5506cebf SK |
502 | targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0]; |
503 | shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex; | |
504 | if (targetStreamIndex & MASK_OUTPUT_SCP) | |
505 | shot_ext->request_scp = 1; | |
13d8c7b4 | 506 | |
5506cebf SK |
507 | if (targetStreamIndex & MASK_OUTPUT_SCC) |
508 | shot_ext->request_scc = 1; | |
509 | ||
510 | if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF) | |
511 | shot_ext->fd_bypass = 0; | |
13d8c7b4 | 512 | |
0f26b20f SK |
513 | if (count == 0){ |
514 | shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO; | |
515 | } else | |
516 | shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE; | |
517 | ||
518 | count++; | |
ad37861e SK |
519 | shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL; |
520 | shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL; | |
521 | shot_ext->shot.magicNumber = 0x23456789; | |
522 | shot_ext->shot.ctl.sensor.exposureTime = 0; | |
523 | shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000; | |
524 | shot_ext->shot.ctl.sensor.sensitivity = 0; | |
525 | ||
e4657e32 SK |
526 | |
527 | shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0]; | |
528 | shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1]; | |
529 | shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2]; | |
13d8c7b4 | 530 | |
ad37861e SK |
531 | m_entryProcessingIndex = newProcessingIndex; |
532 | return newProcessingIndex; | |
c15a6b00 JS |
533 | } |
534 | ||
2adfa429 | 535 | void RequestManager::NotifyStreamOutput(int frameCnt) |
c15a6b00 | 536 | { |
9dd63e1f SK |
537 | int index; |
538 | ||
2adfa429 JS |
539 | Mutex::Autolock lock(m_requestMutex); |
540 | ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt); | |
9dd63e1f SK |
541 | |
542 | index = FindEntryIndexByFrameCnt(frameCnt); | |
543 | if (index == -1) { | |
544 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
545 | return; | |
546 | } | |
2adfa429 | 547 | ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt, entries[index].output_stream_count); |
9dd63e1f | 548 | |
be494d19 SK |
549 | entries[index].output_stream_count--; //TODO : match stream id also |
550 | CheckCompleted(index); | |
13d8c7b4 SK |
551 | } |
552 | ||
553 | void RequestManager::CheckCompleted(int index) | |
554 | { | |
041f38de SK |
555 | if ((entries[index].status == METADONE || entries[index].status == COMPLETED) |
556 | && (entries[index].output_stream_count <= 0)){ | |
557 | ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__, | |
2adfa429 | 558 | index, entries[index].internal_shot.shot.ctl.request.frameCount ); |
041f38de | 559 | entries[index].status = COMPLETED; |
1422aff9 | 560 | if (m_lastCompletedFrameCnt + 1 == (int)entries[index].internal_shot.shot.ctl.request.frameCount) |
041f38de | 561 | m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE); |
2adfa429 | 562 | } |
c15a6b00 | 563 | } |
9dd63e1f | 564 | |
f9a06609 | 565 | int RequestManager::GetCompletedIndex() |
ad37861e | 566 | { |
041f38de | 567 | return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1); |
ad37861e SK |
568 | } |
569 | ||
52f54308 SK |
570 | void RequestManager::pushSensorQ(int index) |
571 | { | |
572 | Mutex::Autolock lock(m_requestMutex); | |
573 | m_sensorQ.push_back(index); | |
574 | } | |
575 | ||
576 | int RequestManager::popSensorQ() | |
577 | { | |
578 | List<int>::iterator sensor_token; | |
579 | int index; | |
580 | ||
581 | Mutex::Autolock lock(m_requestMutex); | |
582 | ||
583 | if(m_sensorQ.size() == 0) | |
584 | return -1; | |
585 | ||
586 | sensor_token = m_sensorQ.begin()++; | |
587 | index = *sensor_token; | |
588 | m_sensorQ.erase(sensor_token); | |
589 | ||
590 | return (index); | |
591 | } | |
592 | ||
593 | void RequestManager::releaseSensorQ() | |
594 | { | |
595 | List<int>::iterator r; | |
596 | ||
597 | Mutex::Autolock lock(m_requestMutex); | |
0eb27a9d | 598 | ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size()); |
52f54308 SK |
599 | |
600 | while(m_sensorQ.size() > 0){ | |
601 | r = m_sensorQ.begin()++; | |
602 | m_sensorQ.erase(r); | |
603 | } | |
604 | return; | |
605 | } | |
606 | ||
ad37861e | 607 | void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext) |
13d8c7b4 | 608 | { |
9dd63e1f | 609 | int index; |
b56dcc00 SK |
610 | struct camera2_shot_ext * request_shot; |
611 | nsecs_t timeStamp; | |
ad37861e | 612 | int i; |
13d8c7b4 | 613 | |
52f54308 | 614 | Mutex::Autolock lock(m_requestMutex); |
ad37861e | 615 | ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
9dd63e1f | 616 | |
ad37861e SK |
617 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { |
618 | if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount) | |
2adfa429 | 619 | && (entries[i].status == CAPTURED)){ |
f9a06609 | 620 | entries[i].status = METADONE; |
ad37861e | 621 | break; |
2adfa429 | 622 | } |
ad37861e SK |
623 | } |
624 | ||
625 | if (i == NUM_MAX_REQUEST_MGR_ENTRY){ | |
626 | ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); | |
9dd63e1f | 627 | return; |
13d8c7b4 | 628 | } |
9dd63e1f | 629 | |
ad37861e | 630 | request_manager_entry * newEntry = &(entries[i]); |
b56dcc00 | 631 | request_shot = &(newEntry->internal_shot); |
9dd63e1f | 632 | |
b56dcc00 | 633 | timeStamp = request_shot->shot.dm.sensor.timeStamp; |
ad37861e | 634 | memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm)); |
b56dcc00 | 635 | request_shot->shot.dm.sensor.timeStamp = timeStamp; |
5506cebf | 636 | m_lastTimeStamp = timeStamp; |
ad37861e | 637 | CheckCompleted(i); |
13d8c7b4 SK |
638 | } |
639 | ||
53f62ad9 | 640 | void RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info) |
13d8c7b4 | 641 | { |
9dd63e1f | 642 | int index, targetStreamIndex; |
b56dcc00 | 643 | struct camera2_shot_ext * request_shot; |
9dd63e1f SK |
644 | |
645 | ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt); | |
646 | if (frameCnt < 0) | |
13d8c7b4 | 647 | return; |
9dd63e1f SK |
648 | |
649 | index = FindEntryIndexByFrameCnt(frameCnt); | |
650 | if (index == -1) { | |
651 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
652 | return; | |
653 | } | |
654 | ||
13d8c7b4 | 655 | request_manager_entry * newEntry = &(entries[index]); |
ad37861e | 656 | request_shot = &(newEntry->internal_shot); |
2bdec060 | 657 | memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl)); |
2adfa429 | 658 | shot_ext->shot.ctl.request.frameCount = frameCnt; |
13d8c7b4 | 659 | shot_ext->request_sensor = 1; |
ad37861e SK |
660 | shot_ext->dis_bypass = 1; |
661 | shot_ext->dnr_bypass = 1; | |
662 | shot_ext->fd_bypass = 1; | |
10e122bd | 663 | shot_ext->drc_bypass = 1; |
ad37861e SK |
664 | shot_ext->setfile = 0; |
665 | ||
13d8c7b4 SK |
666 | shot_ext->request_scc = 0; |
667 | shot_ext->request_scp = 0; | |
ad37861e | 668 | |
5506cebf SK |
669 | shot_ext->isReprocessing = request_shot->isReprocessing; |
670 | shot_ext->reprocessInput = request_shot->reprocessInput; | |
9dd63e1f | 671 | shot_ext->shot.ctl.request.outputStreams[0] = 0; |
9dd63e1f | 672 | |
48728d49 SK |
673 | shot_ext->awb_mode_dm = request_shot->awb_mode_dm; |
674 | ||
e4657e32 SK |
675 | shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0]; |
676 | shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1]; | |
677 | shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2]; | |
678 | ||
53f62ad9 YJ |
679 | // mapping flash UI mode from aeMode |
680 | if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) { | |
4a9565ae | 681 | if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW) |
73f5ad60 | 682 | ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode; |
a0648fc7 | 683 | else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD) |
73f5ad60 | 684 | ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode; |
53f62ad9 YJ |
685 | request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON; |
686 | } | |
53f62ad9 YJ |
687 | |
688 | // Apply ae/awb lock or unlock | |
e117f756 YJ |
689 | if (request_shot->ae_lock == AEMODE_LOCK_ON) |
690 | request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED; | |
691 | if (request_shot->awb_lock == AWBMODE_LOCK_ON) | |
692 | request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED; | |
693 | ||
2bdec060 SK |
694 | if (m_lastAaMode == request_shot->shot.ctl.aa.mode) { |
695 | shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0); | |
696 | } | |
697 | else { | |
698 | shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode; | |
699 | m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode); | |
700 | } | |
701 | if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) { | |
702 | shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0); | |
703 | } | |
704 | else { | |
705 | shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode; | |
706 | m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode); | |
707 | } | |
708 | if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) { | |
709 | shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0); | |
710 | } | |
711 | else { | |
712 | shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode; | |
713 | m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode); | |
714 | } | |
715 | if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) { | |
716 | shot_ext->shot.ctl.aa.aeExpCompensation = 0; | |
717 | } | |
718 | else { | |
719 | shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation; | |
720 | m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation); | |
721 | } | |
ef6f83ca | 722 | |
a3bcc37b | 723 | if (request_shot->shot.ctl.aa.videoStabilizationMode && m_vdisEnable) { |
5c88d1f2 C |
724 | m_vdisBubbleEn = true; |
725 | shot_ext->dis_bypass = 0; | |
7ef20f42 | 726 | shot_ext->dnr_bypass = 0; |
5c88d1f2 C |
727 | } else { |
728 | m_vdisBubbleEn = false; | |
729 | shot_ext->dis_bypass = 1; | |
7ef20f42 | 730 | shot_ext->dnr_bypass = 1; |
5c88d1f2 | 731 | } |
5c88d1f2 | 732 | |
ef6f83ca SK |
733 | shot_ext->shot.ctl.aa.afTrigger = 0; |
734 | ||
5506cebf SK |
735 | targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0]; |
736 | shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex; | |
737 | if (targetStreamIndex & MASK_OUTPUT_SCP) | |
738 | shot_ext->request_scp = 1; | |
13d8c7b4 | 739 | |
5506cebf SK |
740 | if (targetStreamIndex & MASK_OUTPUT_SCC) |
741 | shot_ext->request_scc = 1; | |
742 | ||
743 | if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF) | |
744 | shot_ext->fd_bypass = 0; | |
745 | ||
6ba9ef65 SK |
746 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0]; |
747 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1]; | |
5506cebf SK |
748 | |
749 | ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__, | |
750 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
751 | (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode), | |
752 | (int)(shot_ext->shot.ctl.aa.afMode)); | |
13d8c7b4 SK |
753 | } |
754 | ||
5c88d1f2 C |
755 | bool RequestManager::IsVdisEnable(void) |
756 | { | |
757 | return m_vdisBubbleEn; | |
758 | } | |
5c88d1f2 | 759 | |
9dd63e1f SK |
760 | int RequestManager::FindEntryIndexByFrameCnt(int frameCnt) |
761 | { | |
762 | for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { | |
1422aff9 | 763 | if ((int)entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt) |
9dd63e1f SK |
764 | return i; |
765 | } | |
766 | return -1; | |
767 | } | |
768 | ||
769 | void RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime) | |
13d8c7b4 | 770 | { |
9dd63e1f SK |
771 | int index = FindEntryIndexByFrameCnt(frameCnt); |
772 | if (index == -1) { | |
773 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
774 | return; | |
775 | } | |
776 | ||
13d8c7b4 | 777 | request_manager_entry * currentEntry = &(entries[index]); |
a8be0011 SK |
778 | if (currentEntry->internal_shot.isReprocessing == 1) { |
779 | ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__, | |
be494d19 | 780 | index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp); |
a8be0011 SK |
781 | } else { |
782 | currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime); | |
783 | ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__, | |
784 | index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp); | |
785 | } | |
13d8c7b4 SK |
786 | } |
787 | ||
5506cebf SK |
788 | |
789 | nsecs_t RequestManager::GetTimestampByFrameCnt(int frameCnt) | |
790 | { | |
791 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
792 | if (index == -1) { | |
793 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp); | |
794 | return m_lastTimeStamp; | |
795 | } | |
796 | else | |
797 | return GetTimestamp(index); | |
798 | } | |
799 | ||
800 | nsecs_t RequestManager::GetTimestamp(int index) | |
13d8c7b4 | 801 | { |
5f643a75 | 802 | Mutex::Autolock lock(m_requestMutex); |
54f4971e SK |
803 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { |
804 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
9dd63e1f SK |
805 | return 0; |
806 | } | |
807 | ||
13d8c7b4 | 808 | request_manager_entry * currentEntry = &(entries[index]); |
5f643a75 | 809 | nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp; |
5506cebf SK |
810 | if (frameTime == 0) { |
811 | ALOGV("DEBUG(%s): timestamp null, returning saved value", __FUNCTION__); | |
812 | frameTime = m_lastTimeStamp; | |
813 | } | |
9dd63e1f | 814 | ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime); |
13d8c7b4 SK |
815 | return frameTime; |
816 | } | |
817 | ||
2f4d175d SK |
818 | uint8_t RequestManager::GetOutputStreamByFrameCnt(int frameCnt) |
819 | { | |
820 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
821 | if (index == -1) { | |
822 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
823 | return 0; | |
824 | } | |
825 | else | |
826 | return GetOutputStream(index); | |
827 | } | |
828 | ||
829 | uint8_t RequestManager::GetOutputStream(int index) | |
830 | { | |
831 | Mutex::Autolock lock(m_requestMutex); | |
832 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { | |
833 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
834 | return 0; | |
835 | } | |
836 | ||
837 | request_manager_entry * currentEntry = &(entries[index]); | |
838 | return currentEntry->internal_shot.shot.ctl.request.outputStreams[0]; | |
839 | } | |
840 | ||
69d1e6e9 SK |
841 | camera2_shot_ext * RequestManager::GetInternalShotExtByFrameCnt(int frameCnt) |
842 | { | |
843 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
844 | if (index == -1) { | |
845 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
846 | return 0; | |
847 | } | |
848 | else | |
849 | return GetInternalShotExt(index); | |
850 | } | |
851 | ||
852 | camera2_shot_ext * RequestManager::GetInternalShotExt(int index) | |
853 | { | |
854 | Mutex::Autolock lock(m_requestMutex); | |
855 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { | |
856 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
857 | return 0; | |
858 | } | |
859 | ||
860 | request_manager_entry * currentEntry = &(entries[index]); | |
861 | return ¤tEntry->internal_shot; | |
862 | } | |
863 | ||
9dd63e1f SK |
864 | int RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext) |
865 | { | |
041f38de | 866 | Mutex::Autolock lock(m_requestMutex); |
ad37861e SK |
867 | int i; |
868 | ||
be494d19 | 869 | if (m_numOfEntries == 0) { |
5c88d1f2 | 870 | CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__); |
be494d19 SK |
871 | return -1; |
872 | } | |
ad37861e | 873 | |
be494d19 | 874 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { |
ad37861e | 875 | if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount) |
be494d19 | 876 | continue; |
ad37861e SK |
877 | |
878 | if (entries[i].status == REQUESTED) { | |
879 | entries[i].status = CAPTURED; | |
880 | return entries[i].internal_shot.shot.ctl.request.frameCount; | |
be494d19 | 881 | } |
5c88d1f2 | 882 | CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status); |
ad37861e | 883 | |
be494d19 | 884 | } |
5c88d1f2 | 885 | CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
ad37861e | 886 | |
be494d19 | 887 | return -1; |
9dd63e1f | 888 | } |
13d8c7b4 | 889 | |
b5237e6b SK |
890 | void RequestManager::SetInitialSkip(int count) |
891 | { | |
892 | ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt); | |
893 | if (count > m_sensorPipelineSkipCnt) | |
894 | m_sensorPipelineSkipCnt = count; | |
895 | } | |
896 | ||
ad37861e SK |
897 | int RequestManager::GetSkipCnt() |
898 | { | |
899 | ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt); | |
900 | if (m_sensorPipelineSkipCnt == 0) | |
901 | return m_sensorPipelineSkipCnt; | |
902 | else | |
903 | return --m_sensorPipelineSkipCnt; | |
904 | } | |
905 | ||
13d8c7b4 SK |
906 | void RequestManager::Dump(void) |
907 | { | |
13d8c7b4 SK |
908 | int i = 0; |
909 | request_manager_entry * currentEntry; | |
1264ab16 | 910 | Mutex::Autolock lock(m_numOfEntriesLock); |
ad37861e | 911 | ALOGD("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)", |
13d8c7b4 SK |
912 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); |
913 | ||
914 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { | |
915 | currentEntry = &(entries[i]); | |
5506cebf | 916 | ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i, |
be494d19 | 917 | currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount, |
ad37861e | 918 | currentEntry->output_stream_count, |
5506cebf | 919 | currentEntry->internal_shot.shot.ctl.request.outputStreams[0]); |
13d8c7b4 SK |
920 | } |
921 | } | |
c15a6b00 | 922 | |
9dd63e1f SK |
923 | int RequestManager::GetNextIndex(int index) |
924 | { | |
925 | index++; | |
926 | if (index >= NUM_MAX_REQUEST_MGR_ENTRY) | |
927 | index = 0; | |
928 | ||
929 | return index; | |
930 | } | |
931 | ||
f9a06609 SK |
932 | int RequestManager::GetPrevIndex(int index) |
933 | { | |
934 | index--; | |
935 | if (index < 0) | |
936 | index = NUM_MAX_REQUEST_MGR_ENTRY-1; | |
937 | ||
938 | return index; | |
939 | } | |
940 | ||
6044e509 | 941 | ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid): |
c15a6b00 JS |
942 | m_requestQueueOps(NULL), |
943 | m_frameQueueOps(NULL), | |
944 | m_callbackCookie(NULL), | |
945 | m_numOfRemainingReqInSvc(0), | |
946 | m_isRequestQueuePending(false), | |
13d8c7b4 | 947 | m_isRequestQueueNull(true), |
1422aff9 | 948 | m_halDevice(dev), |
13d8c7b4 | 949 | m_ionCameraClient(0), |
1422aff9 MS |
950 | m_isIspStarted(false), |
951 | m_sccLocalBufferValid(false), | |
952 | m_cameraId(cameraId), | |
9dd63e1f SK |
953 | m_scp_closing(false), |
954 | m_scp_closed(false), | |
1422aff9 MS |
955 | m_wideAspect(false), |
956 | m_zoomRatio(1), | |
957 | m_vdisBubbleCnt(0), | |
958 | m_vdisDupFrame(0), | |
959 | m_jpegEncodingCount(0), | |
960 | m_scpForceSuspended(false), | |
0f26b20f | 961 | m_afState(HAL_AFSTATE_INACTIVE), |
1422aff9 | 962 | m_afTriggerId(0), |
0f26b20f SK |
963 | m_afMode(NO_CHANGE), |
964 | m_afMode2(NO_CHANGE), | |
965 | m_IsAfModeUpdateRequired(false), | |
966 | m_IsAfTriggerRequired(false), | |
967 | m_IsAfLockRequired(false), | |
483728e7 | 968 | m_serviceAfState(ANDROID_CONTROL_AF_STATE_INACTIVE), |
8e2c2fdb SK |
969 | m_afPendingTriggerId(0), |
970 | m_afModeWaitingCnt(0), | |
1422aff9 MS |
971 | m_scpOutputSignalCnt(0), |
972 | m_scpOutputImageCnt(0), | |
a15b4e3f | 973 | m_nightCaptureCnt(0), |
2f4d175d | 974 | m_nightCaptureFrameCnt(0), |
572470e2 | 975 | m_lastSceneMode(0), |
2d5e6ec2 SK |
976 | m_thumbNailW(160), |
977 | m_thumbNailH(120) | |
13d8c7b4 | 978 | { |
ed4ad5fe | 979 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 980 | int ret = 0; |
6044e509 | 981 | int res = 0; |
c15a6b00 | 982 | |
13d8c7b4 | 983 | m_exynosPictureCSC = NULL; |
9dd63e1f | 984 | m_exynosVideoCSC = NULL; |
13d8c7b4 | 985 | |
c15a6b00 JS |
986 | if (!m_grallocHal) { |
987 | ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal); | |
988 | if (ret) | |
13d8c7b4 SK |
989 | ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__); |
990 | } | |
c15a6b00 | 991 | |
daa1fcd6 | 992 | m_camera2 = camera; |
c15a6b00 JS |
993 | m_ionCameraClient = createIonClient(m_ionCameraClient); |
994 | if(m_ionCameraClient == 0) | |
13d8c7b4 | 995 | ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__); |
c15a6b00 | 996 | |
9dd63e1f SK |
997 | |
998 | m_BayerManager = new BayerBufManager(); | |
c15a6b00 | 999 | m_mainThread = new MainThread(this); |
52f54308 | 1000 | m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get())); |
6044e509 SK |
1001 | *openInvalid = InitializeISPChain(); |
1002 | if (*openInvalid < 0) { | |
ed4ad5fe | 1003 | ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__); |
6044e509 SK |
1004 | // clean process |
1005 | // 1. close video nodes | |
1006 | // SCP | |
5506cebf | 1007 | res = exynos_v4l2_close(m_camera_info.scp.fd); |
6044e509 SK |
1008 | if (res != NO_ERROR ) { |
1009 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1010 | } | |
1011 | // SCC | |
1012 | res = exynos_v4l2_close(m_camera_info.capture.fd); | |
1013 | if (res != NO_ERROR ) { | |
1014 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1015 | } | |
1016 | // Sensor | |
1017 | res = exynos_v4l2_close(m_camera_info.sensor.fd); | |
1018 | if (res != NO_ERROR ) { | |
1019 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1020 | } | |
1021 | // ISP | |
1022 | res = exynos_v4l2_close(m_camera_info.isp.fd); | |
1023 | if (res != NO_ERROR ) { | |
1024 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1025 | } | |
1026 | } else { | |
1027 | m_sensorThread = new SensorThread(this); | |
1028 | m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0); | |
053d38cf | 1029 | m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0); |
ed4ad5fe | 1030 | ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__); |
52f54308 | 1031 | |
5506cebf SK |
1032 | for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++) |
1033 | m_subStreams[i].type = SUBSTREAM_TYPE_NONE; | |
6044e509 SK |
1034 | CSC_METHOD cscMethod = CSC_METHOD_HW; |
1035 | m_exynosPictureCSC = csc_init(cscMethod); | |
1036 | if (m_exynosPictureCSC == NULL) | |
1037 | ALOGE("ERR(%s): csc_init() fail", __FUNCTION__); | |
1038 | csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM); | |
c3fb36ed | 1039 | csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER); |
6d8e5b08 | 1040 | |
6044e509 SK |
1041 | m_exynosVideoCSC = csc_init(cscMethod); |
1042 | if (m_exynosVideoCSC == NULL) | |
1043 | ALOGE("ERR(%s): csc_init() fail", __FUNCTION__); | |
1044 | csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM); | |
c3fb36ed | 1045 | csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER); |
9dd63e1f | 1046 | |
6044e509 | 1047 | m_setExifFixedAttribute(); |
9a710a45 YJ |
1048 | |
1049 | // contol information clear | |
1050 | // flash | |
1051 | m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON; | |
1052 | m_ctlInfo.flash.m_afFlashDoneFlg= false; | |
9a710a45 | 1053 | m_ctlInfo.flash.m_flashEnableFlg = false; |
9a710a45 YJ |
1054 | m_ctlInfo.flash.m_flashFrameCount = 0; |
1055 | m_ctlInfo.flash.m_flashCnt = 0; | |
1056 | m_ctlInfo.flash.m_flashTimeOut = 0; | |
caea49e6 YJ |
1057 | m_ctlInfo.flash.m_flashDecisionResult = false; |
1058 | m_ctlInfo.flash.m_flashTorchMode = false; | |
e117f756 YJ |
1059 | m_ctlInfo.flash.m_precaptureState = 0; |
1060 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
73f5ad60 YJ |
1061 | // ae |
1062 | m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE; | |
4a9565ae YJ |
1063 | // af |
1064 | m_ctlInfo.af.m_afTriggerTimeOut = 0; | |
275c9744 YJ |
1065 | // scene |
1066 | m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX; | |
6044e509 | 1067 | } |
ed4ad5fe | 1068 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
1069 | } |
1070 | ||
1071 | ExynosCameraHWInterface2::~ExynosCameraHWInterface2() | |
1072 | { | |
ed4ad5fe | 1073 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 1074 | this->release(); |
ed4ad5fe | 1075 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
1076 | } |
1077 | ||
1078 | void ExynosCameraHWInterface2::release() | |
1079 | { | |
13d8c7b4 | 1080 | int i, res; |
ed4ad5fe | 1081 | ALOGD("(HAL2::release): ENTER"); |
9dd63e1f | 1082 | |
ad37861e SK |
1083 | if (m_streamThreads[1] != NULL) { |
1084 | m_streamThreads[1]->release(); | |
1085 | m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE); | |
1086 | } | |
1087 | ||
1088 | if (m_streamThreads[0] != NULL) { | |
1089 | m_streamThreads[0]->release(); | |
1090 | m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE); | |
1091 | } | |
1092 | ||
13d8c7b4 SK |
1093 | if (m_sensorThread != NULL) { |
1094 | m_sensorThread->release(); | |
13d8c7b4 | 1095 | } |
c15a6b00 JS |
1096 | |
1097 | if (m_mainThread != NULL) { | |
13d8c7b4 | 1098 | m_mainThread->release(); |
13d8c7b4 SK |
1099 | } |
1100 | ||
13d8c7b4 SK |
1101 | if (m_exynosPictureCSC) |
1102 | csc_deinit(m_exynosPictureCSC); | |
1103 | m_exynosPictureCSC = NULL; | |
1104 | ||
9dd63e1f SK |
1105 | if (m_exynosVideoCSC) |
1106 | csc_deinit(m_exynosVideoCSC); | |
1107 | m_exynosVideoCSC = NULL; | |
1108 | ||
ad37861e | 1109 | if (m_streamThreads[1] != NULL) { |
0eb27a9d | 1110 | ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination"); |
ad37861e | 1111 | while (!m_streamThreads[1]->IsTerminated()) |
041f38de | 1112 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1113 | ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 1 termination"); |
ad37861e SK |
1114 | m_streamThreads[1] = NULL; |
1115 | } | |
1116 | ||
1117 | if (m_streamThreads[0] != NULL) { | |
0eb27a9d | 1118 | ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination"); |
ad37861e | 1119 | while (!m_streamThreads[0]->IsTerminated()) |
041f38de | 1120 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1121 | ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 0 termination"); |
ad37861e SK |
1122 | m_streamThreads[0] = NULL; |
1123 | } | |
1124 | ||
9dd63e1f | 1125 | if (m_sensorThread != NULL) { |
0eb27a9d | 1126 | ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination"); |
9dd63e1f | 1127 | while (!m_sensorThread->IsTerminated()) |
041f38de | 1128 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1129 | ALOGD("(HAL2::release): END Waiting for (indirect) sensor thread termination"); |
9dd63e1f SK |
1130 | m_sensorThread = NULL; |
1131 | } | |
1132 | ||
ad37861e | 1133 | if (m_mainThread != NULL) { |
0eb27a9d | 1134 | ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination"); |
9dd63e1f | 1135 | while (!m_mainThread->IsTerminated()) |
041f38de | 1136 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1137 | ALOGD("(HAL2::release): END Waiting for (indirect) main thread termination"); |
9dd63e1f SK |
1138 | m_mainThread = NULL; |
1139 | } | |
1140 | ||
6d8e5b08 SK |
1141 | if (m_requestManager != NULL) { |
1142 | delete m_requestManager; | |
1143 | m_requestManager = NULL; | |
1144 | } | |
1145 | ||
1146 | if (m_BayerManager != NULL) { | |
1147 | delete m_BayerManager; | |
1148 | m_BayerManager = NULL; | |
1149 | } | |
6d8e5b08 | 1150 | for (i = 0; i < NUM_BAYER_BUFFERS; i++) |
c15a6b00 JS |
1151 | freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes); |
1152 | ||
90e439c1 SK |
1153 | if (m_sccLocalBufferValid) { |
1154 | for (i = 0; i < NUM_SCC_BUFFERS; i++) | |
1155 | #ifdef ENABLE_FRAME_SYNC | |
1156 | freeCameraMemory(&m_sccLocalBuffer[i], 2); | |
1157 | #else | |
1158 | freeCameraMemory(&m_sccLocalBuffer[i], 1); | |
1159 | #endif | |
1160 | } | |
1161 | else { | |
1162 | for (i = 0; i < NUM_SCC_BUFFERS; i++) | |
1163 | freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes); | |
1164 | } | |
c15a6b00 | 1165 | |
9dd63e1f | 1166 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__); |
13d8c7b4 SK |
1167 | res = exynos_v4l2_close(m_camera_info.sensor.fd); |
1168 | if (res != NO_ERROR ) { | |
9dd63e1f | 1169 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1170 | } |
1171 | ||
9dd63e1f | 1172 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__); |
13d8c7b4 SK |
1173 | res = exynos_v4l2_close(m_camera_info.isp.fd); |
1174 | if (res != NO_ERROR ) { | |
9dd63e1f | 1175 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1176 | } |
1177 | ||
9dd63e1f | 1178 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__); |
13d8c7b4 SK |
1179 | res = exynos_v4l2_close(m_camera_info.capture.fd); |
1180 | if (res != NO_ERROR ) { | |
9dd63e1f | 1181 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1182 | } |
1183 | ||
9dd63e1f | 1184 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__); |
5506cebf | 1185 | res = exynos_v4l2_close(m_camera_info.scp.fd); |
13d8c7b4 | 1186 | if (res != NO_ERROR ) { |
9dd63e1f | 1187 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 | 1188 | } |
9dd63e1f | 1189 | ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__); |
c15a6b00 | 1190 | deleteIonClient(m_ionCameraClient); |
ad37861e | 1191 | |
ed4ad5fe | 1192 | ALOGD("(HAL2::release): EXIT"); |
ad37861e SK |
1193 | } |
1194 | ||
6044e509 | 1195 | int ExynosCameraHWInterface2::InitializeISPChain() |
ad37861e SK |
1196 | { |
1197 | char node_name[30]; | |
1198 | int fd = 0; | |
1199 | int i; | |
6044e509 | 1200 | int ret = 0; |
ad37861e SK |
1201 | |
1202 | /* Open Sensor */ | |
1203 | memset(&node_name, 0x00, sizeof(char[30])); | |
1204 | sprintf(node_name, "%s%d", NODE_PREFIX, 40); | |
1205 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1206 | ||
1207 | if (fd < 0) { | |
1208 | ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1209 | } | |
1210 | else { | |
1211 | ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1212 | } | |
1213 | m_camera_info.sensor.fd = fd; | |
1214 | ||
1215 | /* Open ISP */ | |
1216 | memset(&node_name, 0x00, sizeof(char[30])); | |
1217 | sprintf(node_name, "%s%d", NODE_PREFIX, 41); | |
1218 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1219 | ||
1220 | if (fd < 0) { | |
1221 | ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1222 | } | |
1223 | else { | |
1224 | ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1225 | } | |
1226 | m_camera_info.isp.fd = fd; | |
1227 | ||
1228 | /* Open ScalerC */ | |
1229 | memset(&node_name, 0x00, sizeof(char[30])); | |
1230 | sprintf(node_name, "%s%d", NODE_PREFIX, 42); | |
1231 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1232 | ||
1233 | if (fd < 0) { | |
1234 | ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1235 | } | |
1236 | else { | |
1237 | ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1238 | } | |
1239 | m_camera_info.capture.fd = fd; | |
1240 | ||
1241 | /* Open ScalerP */ | |
1242 | memset(&node_name, 0x00, sizeof(char[30])); | |
1243 | sprintf(node_name, "%s%d", NODE_PREFIX, 44); | |
1244 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1245 | if (fd < 0) { | |
1246 | ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1247 | } | |
1248 | else { | |
1249 | ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1250 | } | |
5506cebf | 1251 | m_camera_info.scp.fd = fd; |
ad37861e SK |
1252 | |
1253 | if(m_cameraId == 0) | |
1254 | m_camera_info.sensor_id = SENSOR_NAME_S5K4E5; | |
1255 | else | |
1256 | m_camera_info.sensor_id = SENSOR_NAME_S5K6A3; | |
1257 | ||
1258 | memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext)); | |
1259 | m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL; | |
1260 | m_camera_info.dummy_shot.shot.magicNumber = 0x23456789; | |
1261 | ||
1262 | m_camera_info.dummy_shot.dis_bypass = 1; | |
1263 | m_camera_info.dummy_shot.dnr_bypass = 1; | |
1264 | m_camera_info.dummy_shot.fd_bypass = 1; | |
1265 | ||
1266 | /*sensor setting*/ | |
1267 | m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0; | |
1268 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0; | |
1269 | m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0; | |
1270 | ||
1271 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0; | |
1272 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0; | |
1273 | ||
1274 | /*request setting*/ | |
1275 | m_camera_info.dummy_shot.request_sensor = 1; | |
1276 | m_camera_info.dummy_shot.request_scc = 0; | |
1277 | m_camera_info.dummy_shot.request_scp = 0; | |
1278 | m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0; | |
ad37861e SK |
1279 | |
1280 | m_camera_info.sensor.width = m_camera2->getSensorRawW(); | |
1281 | m_camera_info.sensor.height = m_camera2->getSensorRawH(); | |
1282 | ||
1283 | m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16; | |
1284 | m_camera_info.sensor.planes = 2; | |
1285 | m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS; | |
1286 | m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1287 | m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF; | |
ad37861e SK |
1288 | |
1289 | for(i = 0; i < m_camera_info.sensor.buffers; i++){ | |
5c664f4c | 1290 | int res; |
ad37861e SK |
1291 | initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes); |
1292 | m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2; | |
1293 | m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value | |
5c664f4c AR |
1294 | res = allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1); |
1295 | if (res) { | |
1296 | ALOGE("ERROR(%s): failed to allocateCameraMemory for sensor buffer %d", __FUNCTION__, i); | |
1297 | // Free allocated sensor buffers | |
1298 | for (int j = 0; j < i; j++) { | |
1299 | freeCameraMemory(&m_camera_info.sensor.buffer[j], m_camera_info.sensor.planes); | |
1300 | } | |
1301 | return false; | |
1302 | } | |
ad37861e SK |
1303 | } |
1304 | ||
1305 | m_camera_info.isp.width = m_camera_info.sensor.width; | |
1306 | m_camera_info.isp.height = m_camera_info.sensor.height; | |
1307 | m_camera_info.isp.format = m_camera_info.sensor.format; | |
1308 | m_camera_info.isp.planes = m_camera_info.sensor.planes; | |
1309 | m_camera_info.isp.buffers = m_camera_info.sensor.buffers; | |
1310 | m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1311 | m_camera_info.isp.memory = V4L2_MEMORY_DMABUF; | |
ad37861e SK |
1312 | |
1313 | for(i = 0; i < m_camera_info.isp.buffers; i++){ | |
1314 | initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes); | |
1315 | m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0]; | |
1316 | m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1]; | |
1317 | m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0]; | |
1318 | m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1]; | |
1319 | m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0]; | |
1320 | m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1]; | |
1321 | }; | |
1322 | ||
1323 | /* init ISP */ | |
6044e509 SK |
1324 | ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id); |
1325 | if (ret < 0) { | |
1326 | ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ", __FUNCTION__, m_camera_info.sensor_id); | |
1327 | return false; | |
1328 | } | |
ad37861e SK |
1329 | cam_int_s_fmt(&(m_camera_info.isp)); |
1330 | ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__); | |
1331 | cam_int_reqbufs(&(m_camera_info.isp)); | |
1332 | ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__); | |
1333 | ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__); | |
1334 | ||
1335 | /* init Sensor */ | |
1336 | cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id); | |
1337 | ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__); | |
1338 | if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) { | |
1339 | ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__); | |
1340 | } | |
1341 | ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__); | |
1342 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
1343 | ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__); | |
1344 | for (i = 0; i < m_camera_info.sensor.buffers; i++) { | |
1345 | ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i); | |
ad37861e SK |
1346 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1 |
1347 | m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1; | |
52f54308 SK |
1348 | memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot), |
1349 | sizeof(struct camera2_shot_ext)); | |
ad37861e | 1350 | } |
52f54308 SK |
1351 | |
1352 | for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++) | |
1353 | cam_int_qbuf(&(m_camera_info.sensor), i); | |
1354 | ||
1355 | for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++) | |
1356 | m_requestManager->pushSensorQ(i); | |
1357 | ||
5506cebf | 1358 | ALOGV("== stream_on :: sensor"); |
ad37861e | 1359 | cam_int_streamon(&(m_camera_info.sensor)); |
5506cebf | 1360 | m_camera_info.sensor.status = true; |
ad37861e SK |
1361 | |
1362 | /* init Capture */ | |
1363 | m_camera_info.capture.width = m_camera2->getSensorW(); | |
1364 | m_camera_info.capture.height = m_camera2->getSensorH(); | |
1365 | m_camera_info.capture.format = V4L2_PIX_FMT_YUYV; | |
feb7df4c SK |
1366 | #ifdef ENABLE_FRAME_SYNC |
1367 | m_camera_info.capture.planes = 2; | |
1368 | #else | |
ad37861e | 1369 | m_camera_info.capture.planes = 1; |
feb7df4c | 1370 | #endif |
ac8c2060 | 1371 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
ad37861e SK |
1372 | m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
1373 | m_camera_info.capture.memory = V4L2_MEMORY_DMABUF; | |
ad37861e | 1374 | |
5506cebf SK |
1375 | m_camera_info.capture.status = false; |
1376 | ||
1377 | return true; | |
1378 | } | |
1379 | ||
1380 | void ExynosCameraHWInterface2::StartSCCThread(bool threadExists) | |
1381 | { | |
1382 | ALOGV("(%s)", __FUNCTION__); | |
1383 | StreamThread *AllocatedStream; | |
1384 | stream_parameters_t newParameters; | |
1385 | uint32_t format_actual; | |
5506cebf SK |
1386 | |
1387 | ||
1388 | if (!threadExists) { | |
1389 | m_streamThreads[1] = new StreamThread(this, 1); | |
1390 | } | |
1391 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); | |
b8d41ae2 | 1392 | if (!threadExists) { |
053d38cf | 1393 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf | 1394 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
b8d41ae2 SK |
1395 | AllocatedStream->m_numRegisteredStream = 1; |
1396 | } | |
5506cebf SK |
1397 | AllocatedStream->m_index = 1; |
1398 | ||
1399 | format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1400 | ||
1401 | newParameters.width = m_camera2->getSensorW(); | |
1402 | newParameters.height = m_camera2->getSensorH(); | |
1403 | newParameters.format = format_actual; | |
1404 | newParameters.streamOps = NULL; | |
ac8c2060 | 1405 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; |
feb7df4c | 1406 | #ifdef ENABLE_FRAME_SYNC |
5506cebf | 1407 | newParameters.planes = 2; |
2adfa429 | 1408 | #else |
5506cebf | 1409 | newParameters.planes = 1; |
2adfa429 | 1410 | #endif |
ad37861e | 1411 | |
5506cebf SK |
1412 | newParameters.numSvcBufsInHal = 0; |
1413 | ||
1414 | newParameters.node = &m_camera_info.capture; | |
1415 | ||
1416 | AllocatedStream->streamType = STREAM_TYPE_INDIRECT; | |
5506cebf SK |
1417 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); |
1418 | ||
90e439c1 SK |
1419 | if (!threadExists) { |
1420 | if (!m_sccLocalBufferValid) { | |
1421 | for (int i = 0; i < m_camera_info.capture.buffers; i++){ | |
1422 | initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes); | |
1423 | m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2; | |
5506cebf | 1424 | #ifdef ENABLE_FRAME_SYNC |
90e439c1 SK |
1425 | m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value |
1426 | allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1); | |
5506cebf | 1427 | #else |
90e439c1 | 1428 | allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes); |
5506cebf | 1429 | #endif |
90e439c1 SK |
1430 | m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i]; |
1431 | } | |
1432 | m_sccLocalBufferValid = true; | |
1433 | } | |
1434 | } else { | |
1435 | if (m_sccLocalBufferValid) { | |
1436 | for (int i = 0; i < m_camera_info.capture.buffers; i++) | |
1437 | m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i]; | |
1438 | } else { | |
1439 | ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__); | |
5506cebf SK |
1440 | } |
1441 | } | |
1442 | cam_int_s_input(newParameters.node, m_camera_info.sensor_id); | |
ac8c2060 | 1443 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
5506cebf | 1444 | cam_int_s_fmt(newParameters.node); |
ad37861e | 1445 | ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__); |
5506cebf | 1446 | cam_int_reqbufs(newParameters.node); |
ad37861e SK |
1447 | ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__); |
1448 | ||
5506cebf | 1449 | for (int i = 0; i < newParameters.node->buffers; i++) { |
ad37861e | 1450 | ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i); |
5506cebf SK |
1451 | cam_int_qbuf(newParameters.node, i); |
1452 | newParameters.svcBufStatus[i] = ON_DRIVER; | |
ad37861e SK |
1453 | } |
1454 | ||
1455 | ALOGV("== stream_on :: capture"); | |
5506cebf | 1456 | if (cam_int_streamon(newParameters.node) < 0) { |
6d8e5b08 SK |
1457 | ALOGE("ERR(%s): capture stream on fail", __FUNCTION__); |
1458 | } else { | |
1459 | m_camera_info.capture.status = true; | |
1460 | } | |
6044e509 | 1461 | |
5506cebf SK |
1462 | AllocatedStream->setParameter(&newParameters); |
1463 | AllocatedStream->m_activated = true; | |
1464 | AllocatedStream->m_isBufferInit = true; | |
ad37861e SK |
1465 | } |
1466 | ||
1467 | void ExynosCameraHWInterface2::StartISP() | |
1468 | { | |
ad37861e SK |
1469 | ALOGV("== stream_on :: isp"); |
1470 | cam_int_streamon(&(m_camera_info.isp)); | |
ad37861e | 1471 | exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM); |
13d8c7b4 SK |
1472 | } |
1473 | ||
c15a6b00 JS |
1474 | int ExynosCameraHWInterface2::getCameraId() const |
1475 | { | |
9dd63e1f | 1476 | return m_cameraId; |
c15a6b00 | 1477 | } |
c15a6b00 JS |
1478 | |
1479 | int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops) | |
1480 | { | |
13d8c7b4 | 1481 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
1482 | if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request) |
1483 | && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) { | |
1484 | m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops; | |
1485 | return 0; | |
1486 | } | |
1487 | else { | |
13d8c7b4 | 1488 | ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__); |
c15a6b00 JS |
1489 | return 1; |
1490 | } | |
1491 | } | |
1492 | ||
1493 | int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty() | |
1494 | { | |
5506cebf SK |
1495 | int i = 0; |
1496 | ||
b5237e6b | 1497 | ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries()); |
c15a6b00 | 1498 | if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) { |
13d8c7b4 | 1499 | ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__); |
c15a6b00 JS |
1500 | return 0; |
1501 | } | |
13d8c7b4 | 1502 | m_isRequestQueueNull = false; |
0f26b20f | 1503 | if (m_requestManager->GetNumEntries() == 0) |
572470e2 | 1504 | m_requestManager->SetInitialSkip(0); |
5506cebf SK |
1505 | |
1506 | if (m_isIspStarted == false) { | |
1507 | /* isp */ | |
1508 | m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS; | |
1509 | m_camera_info.isp.buffers = m_camera_info.sensor.buffers; | |
1510 | cam_int_s_fmt(&(m_camera_info.isp)); | |
1511 | cam_int_reqbufs(&(m_camera_info.isp)); | |
1512 | ||
1513 | /* sensor */ | |
1514 | if (m_camera_info.sensor.status == false) { | |
1515 | cam_int_s_fmt(&(m_camera_info.sensor)); | |
1516 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
1517 | ||
1518 | for (i = 0; i < m_camera_info.sensor.buffers; i++) { | |
1519 | ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i); | |
1520 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1 | |
1521 | m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1; | |
1522 | memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot), | |
1523 | sizeof(struct camera2_shot_ext)); | |
1524 | } | |
1525 | for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++) | |
1526 | cam_int_qbuf(&(m_camera_info.sensor), i); | |
1527 | ||
1528 | for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++) | |
1529 | m_requestManager->pushSensorQ(i); | |
1530 | ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__); | |
1531 | cam_int_streamon(&(m_camera_info.sensor)); | |
1532 | m_camera_info.sensor.status = true; | |
1533 | } | |
1534 | } | |
1535 | if (!(m_streamThreads[1].get())) { | |
1536 | ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__); | |
1537 | StartSCCThread(false); | |
1538 | } else { | |
1539 | if (m_streamThreads[1]->m_activated == false) { | |
1540 | ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__); | |
1541 | StartSCCThread(true); | |
1542 | } else { | |
1543 | if (m_camera_info.capture.status == false) { | |
ac8c2060 | 1544 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
5506cebf SK |
1545 | cam_int_s_fmt(&(m_camera_info.capture)); |
1546 | ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__); | |
1547 | cam_int_reqbufs(&(m_camera_info.capture)); | |
1548 | ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__); | |
1549 | ||
b8d41ae2 SK |
1550 | if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) { |
1551 | StreamThread * targetStream = m_streamThreads[1].get(); | |
1552 | stream_parameters_t *targetStreamParms = &(targetStream->m_parameters); | |
1553 | node_info_t *currentNode = targetStreamParms->node; | |
1554 | ||
1555 | struct v4l2_buffer v4l2_buf; | |
1556 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
1557 | ||
1558 | for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) { | |
1559 | v4l2_buf.m.planes = planes; | |
1560 | v4l2_buf.type = currentNode->type; | |
1561 | v4l2_buf.memory = currentNode->memory; | |
1562 | ||
1563 | v4l2_buf.length = currentNode->planes; | |
1564 | v4l2_buf.index = i; | |
1565 | ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i]; | |
1566 | ||
1567 | if (i < currentNode->buffers) { | |
1568 | #ifdef ENABLE_FRAME_SYNC | |
1569 | v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0]; | |
1570 | v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1]; | |
1571 | v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2]; | |
1572 | v4l2_buf.length += targetStreamParms->metaPlanes; | |
1573 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0]; | |
1574 | v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0]; | |
1575 | ||
1576 | ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length); | |
1577 | #endif | |
1578 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { | |
1579 | ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd); | |
1580 | } | |
1581 | ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd); | |
1582 | targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC; | |
1583 | } | |
1584 | else { | |
1585 | targetStreamParms->svcBufStatus[i] = ON_SERVICE; | |
1586 | } | |
1587 | ||
1588 | } | |
1589 | ||
1590 | } else { | |
1591 | for (int i = 0; i < m_camera_info.capture.buffers; i++) { | |
1592 | ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i); | |
1593 | cam_int_qbuf(&(m_camera_info.capture), i); | |
1594 | } | |
5506cebf SK |
1595 | } |
1596 | ALOGV("== stream_on :: capture"); | |
1597 | if (cam_int_streamon(&(m_camera_info.capture)) < 0) { | |
1598 | ALOGE("ERR(%s): capture stream on fail", __FUNCTION__); | |
1599 | } else { | |
1600 | m_camera_info.capture.status = true; | |
1601 | } | |
1602 | } | |
f9a06609 SK |
1603 | if (m_scpForceSuspended) { |
1604 | m_scpForceSuspended = false; | |
1605 | } | |
5506cebf SK |
1606 | } |
1607 | } | |
1608 | if (m_isIspStarted == false) { | |
1609 | StartISP(); | |
1610 | ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__); | |
71f3bb38 | 1611 | m_requestManager->SetInitialSkip(6); |
5506cebf SK |
1612 | m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0); |
1613 | m_isIspStarted = true; | |
1614 | } | |
c15a6b00 JS |
1615 | m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); |
1616 | return 0; | |
1617 | } | |
1618 | ||
1619 | int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops) | |
1620 | { | |
13d8c7b4 | 1621 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
1622 | if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame) |
1623 | && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) { | |
1624 | m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops; | |
1625 | return 0; | |
1626 | } | |
1627 | else { | |
13d8c7b4 | 1628 | ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__); |
c15a6b00 JS |
1629 | return 1; |
1630 | } | |
1631 | } | |
1632 | ||
1633 | int ExynosCameraHWInterface2::getInProgressCount() | |
1634 | { | |
1264ab16 AR |
1635 | int inProgressJpeg; |
1636 | int inProgressCount; | |
1637 | ||
1638 | { | |
1639 | Mutex::Autolock lock(m_jpegEncoderLock); | |
1640 | inProgressJpeg = m_jpegEncodingCount; | |
1641 | inProgressCount = m_requestManager->GetNumEntries(); | |
1642 | } | |
32cf9401 | 1643 | ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__, |
1264ab16 AR |
1644 | inProgressCount, inProgressJpeg, (inProgressCount + inProgressJpeg)); |
1645 | return (inProgressCount + inProgressJpeg); | |
c15a6b00 JS |
1646 | } |
1647 | ||
1648 | int ExynosCameraHWInterface2::flushCapturesInProgress() | |
1649 | { | |
1650 | return 0; | |
1651 | } | |
1652 | ||
c15a6b00 JS |
1653 | int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request) |
1654 | { | |
13d8c7b4 | 1655 | ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template); |
c15a6b00 JS |
1656 | |
1657 | if (request == NULL) return BAD_VALUE; | |
1658 | if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) { | |
1659 | return BAD_VALUE; | |
1660 | } | |
1661 | status_t res; | |
1662 | // Pass 1, calculate size and allocate | |
daa1fcd6 | 1663 | res = m_camera2->constructDefaultRequest(request_template, |
c15a6b00 JS |
1664 | request, |
1665 | true); | |
1666 | if (res != OK) { | |
1667 | return res; | |
1668 | } | |
1669 | // Pass 2, build request | |
daa1fcd6 | 1670 | res = m_camera2->constructDefaultRequest(request_template, |
c15a6b00 JS |
1671 | request, |
1672 | false); | |
1673 | if (res != OK) { | |
1674 | ALOGE("Unable to populate new request for template %d", | |
1675 | request_template); | |
1676 | } | |
1677 | ||
1678 | return res; | |
1679 | } | |
1680 | ||
1681 | int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops, | |
1682 | uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers) | |
1683 | { | |
ed4ad5fe | 1684 | ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__, width, height, format); |
5506cebf | 1685 | bool useDirectOutput = false; |
13d8c7b4 SK |
1686 | StreamThread *AllocatedStream; |
1687 | stream_parameters_t newParameters; | |
5506cebf SK |
1688 | substream_parameters_t *subParameters; |
1689 | StreamThread *parentStream; | |
1690 | status_t res; | |
1691 | int allocCase = 0; | |
c15a6b00 | 1692 | |
5506cebf SK |
1693 | if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) && |
1694 | m_camera2->isSupportedResolution(width, height)) { | |
9dd63e1f SK |
1695 | if (!(m_streamThreads[0].get())) { |
1696 | ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__); | |
1697 | allocCase = 0; | |
13d8c7b4 SK |
1698 | } |
1699 | else { | |
6bbb593a | 1700 | if ((m_streamThreads[0].get())->m_activated == true) { |
9dd63e1f SK |
1701 | ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__); |
1702 | allocCase = 1; | |
1703 | } | |
1704 | else { | |
1705 | ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__); | |
1706 | allocCase = 2; | |
1707 | } | |
13d8c7b4 | 1708 | } |
5506cebf SK |
1709 | |
1710 | // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio. | |
1711 | if ((width == 1920 && height == 1080) || (width == 1280 && height == 720) | |
1712 | || (width == 720 && height == 480) || (width == 1440 && height == 960) | |
1713 | || (width == 1344 && height == 896)) { | |
6d8e5b08 | 1714 | m_wideAspect = true; |
5506cebf | 1715 | } else { |
6d8e5b08 SK |
1716 | m_wideAspect = false; |
1717 | } | |
1718 | ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect); | |
1719 | ||
9dd63e1f | 1720 | if (allocCase == 0 || allocCase == 2) { |
5506cebf | 1721 | *stream_id = STREAM_ID_PREVIEW; |
9dd63e1f | 1722 | |
5506cebf | 1723 | m_streamThreads[0] = new StreamThread(this, *stream_id); |
9dd63e1f | 1724 | |
5506cebf | 1725 | AllocatedStream = (StreamThread*)(m_streamThreads[0].get()); |
053d38cf | 1726 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf SK |
1727 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
1728 | ||
1729 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
1730 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1731 | if (m_wideAspect) |
1732 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1733 | *max_buffers = 7; |
5506cebf SK |
1734 | |
1735 | newParameters.width = width; | |
1736 | newParameters.height = height; | |
1737 | newParameters.format = *format_actual; | |
1738 | newParameters.streamOps = stream_ops; | |
1739 | newParameters.usage = *usage; | |
ac8c2060 | 1740 | newParameters.numHwBuffers = NUM_SCP_BUFFERS; |
5506cebf SK |
1741 | newParameters.numOwnSvcBuffers = *max_buffers; |
1742 | newParameters.planes = NUM_PLANES(*format_actual); | |
1743 | newParameters.metaPlanes = 1; | |
1744 | newParameters.numSvcBufsInHal = 0; | |
a85ec381 | 1745 | newParameters.minUndequedBuffer = 3; |
bf96172c | 1746 | newParameters.needsIonMap = true; |
5506cebf SK |
1747 | |
1748 | newParameters.node = &m_camera_info.scp; | |
1749 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1750 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1751 | ||
1752 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1753 | AllocatedStream->m_index = 0; | |
9dd63e1f SK |
1754 | AllocatedStream->setParameter(&newParameters); |
1755 | AllocatedStream->m_activated = true; | |
5506cebf SK |
1756 | AllocatedStream->m_numRegisteredStream = 1; |
1757 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); | |
ad37861e SK |
1758 | m_requestManager->SetDefaultParameters(m_camera2->getSensorW()); |
1759 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW(); | |
5506cebf SK |
1760 | if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE) |
1761 | AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10); | |
1762 | if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE) | |
1763 | AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70); | |
a3bcc37b AR |
1764 | |
1765 | // set video stabilization killswitch | |
1766 | m_requestManager->m_vdisEnable = width > 352 && height > 288; | |
1767 | ||
9dd63e1f | 1768 | return 0; |
5506cebf SK |
1769 | } else if (allocCase == 1) { |
1770 | *stream_id = STREAM_ID_RECORD; | |
1771 | ||
1772 | subParameters = &m_subStreams[STREAM_ID_RECORD]; | |
1773 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
1774 | ||
9dd63e1f SK |
1775 | parentStream = (StreamThread*)(m_streamThreads[0].get()); |
1776 | if (!parentStream) { | |
1777 | return 1; | |
9dd63e1f | 1778 | } |
9dd63e1f | 1779 | |
804236a7 | 1780 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M |
6bbb593a | 1781 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; |
6568c0f1 AR |
1782 | if (m_wideAspect) |
1783 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1784 | *max_buffers = 7; |
9dd63e1f | 1785 | |
5506cebf SK |
1786 | subParameters->type = SUBSTREAM_TYPE_RECORD; |
1787 | subParameters->width = width; | |
1788 | subParameters->height = height; | |
1789 | subParameters->format = *format_actual; | |
1790 | subParameters->svcPlanes = NUM_PLANES(*format_actual); | |
1791 | subParameters->streamOps = stream_ops; | |
1792 | subParameters->usage = *usage; | |
1793 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1794 | subParameters->numSvcBufsInHal = 0; | |
1795 | subParameters->needBufferInit = false; | |
1796 | subParameters->minUndequedBuffer = 2; | |
1797 | ||
1798 | res = parentStream->attachSubStream(STREAM_ID_RECORD, 20); | |
1799 | if (res != NO_ERROR) { | |
1800 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
1801 | return 1; | |
1802 | } | |
1803 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); | |
1804 | ALOGV("(%s): Enabling Record", __FUNCTION__); | |
9dd63e1f SK |
1805 | return 0; |
1806 | } | |
13d8c7b4 | 1807 | } |
b8d41ae2 | 1808 | else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL) |
1422aff9 | 1809 | && ((int32_t)width == m_camera2->getSensorW()) && ((int32_t)height == m_camera2->getSensorH())) { |
13d8c7b4 | 1810 | |
5506cebf SK |
1811 | if (!(m_streamThreads[1].get())) { |
1812 | ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__); | |
1813 | useDirectOutput = true; | |
6d8e5b08 SK |
1814 | } |
1815 | else { | |
5506cebf | 1816 | ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__); |
90e439c1 | 1817 | useDirectOutput = false; |
5506cebf SK |
1818 | } |
1819 | if (useDirectOutput) { | |
1820 | *stream_id = STREAM_ID_ZSL; | |
1821 | ||
053d38cf | 1822 | m_streamThreads[1] = new StreamThread(this, *stream_id); |
5506cebf | 1823 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); |
053d38cf | 1824 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf SK |
1825 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
1826 | ||
1827 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
5506cebf SK |
1828 | |
1829 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1830 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1831 | if (m_wideAspect) |
1832 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1833 | *max_buffers = 7; |
5506cebf SK |
1834 | |
1835 | newParameters.width = width; | |
1836 | newParameters.height = height; | |
1837 | newParameters.format = *format_actual; | |
1838 | newParameters.streamOps = stream_ops; | |
1839 | newParameters.usage = *usage; | |
ac8c2060 | 1840 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; |
5506cebf SK |
1841 | newParameters.numOwnSvcBuffers = *max_buffers; |
1842 | newParameters.planes = NUM_PLANES(*format_actual); | |
1843 | newParameters.metaPlanes = 1; | |
1844 | ||
1845 | newParameters.numSvcBufsInHal = 0; | |
a85ec381 | 1846 | newParameters.minUndequedBuffer = 2; |
bf96172c | 1847 | newParameters.needsIonMap = false; |
5506cebf SK |
1848 | |
1849 | newParameters.node = &m_camera_info.capture; | |
1850 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1851 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1852 | ||
1853 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1854 | AllocatedStream->m_index = 1; | |
1855 | AllocatedStream->setParameter(&newParameters); | |
1856 | AllocatedStream->m_activated = true; | |
b8d41ae2 | 1857 | AllocatedStream->m_numRegisteredStream = 1; |
5506cebf SK |
1858 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); |
1859 | return 0; | |
90e439c1 SK |
1860 | } else { |
1861 | bool bJpegExists = false; | |
1862 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); | |
1863 | subParameters = &m_subStreams[STREAM_ID_JPEG]; | |
1864 | if (subParameters->type == SUBSTREAM_TYPE_JPEG) { | |
1865 | ALOGD("(%s): jpeg stream exists", __FUNCTION__); | |
1866 | bJpegExists = true; | |
1867 | AllocatedStream->detachSubStream(STREAM_ID_JPEG); | |
1868 | } | |
1869 | AllocatedStream->m_releasing = true; | |
1870 | ALOGD("START stream thread 1 release %d", __LINE__); | |
1871 | do { | |
1872 | AllocatedStream->release(); | |
041f38de | 1873 | usleep(SIG_WAITING_TICK); |
90e439c1 SK |
1874 | } while (AllocatedStream->m_releasing); |
1875 | ALOGD("END stream thread 1 release %d", __LINE__); | |
1876 | ||
1877 | *stream_id = STREAM_ID_ZSL; | |
1878 | ||
1879 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); | |
1880 | ||
1881 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
90e439c1 SK |
1882 | |
1883 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1884 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1885 | if (m_wideAspect) |
1886 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1887 | *max_buffers = 7; |
90e439c1 SK |
1888 | |
1889 | newParameters.width = width; | |
1890 | newParameters.height = height; | |
1891 | newParameters.format = *format_actual; | |
1892 | newParameters.streamOps = stream_ops; | |
1893 | newParameters.usage = *usage; | |
1894 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; | |
1895 | newParameters.numOwnSvcBuffers = *max_buffers; | |
1896 | newParameters.planes = NUM_PLANES(*format_actual); | |
1897 | newParameters.metaPlanes = 1; | |
1898 | ||
1899 | newParameters.numSvcBufsInHal = 0; | |
bf96172c SK |
1900 | newParameters.minUndequedBuffer = 2; |
1901 | newParameters.needsIonMap = false; | |
90e439c1 SK |
1902 | |
1903 | newParameters.node = &m_camera_info.capture; | |
1904 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1905 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1906 | ||
1907 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1908 | AllocatedStream->m_index = 1; | |
1909 | AllocatedStream->setParameter(&newParameters); | |
1910 | AllocatedStream->m_activated = true; | |
1911 | AllocatedStream->m_numRegisteredStream = 1; | |
1912 | if (bJpegExists) { | |
1913 | AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10); | |
1914 | } | |
1915 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); | |
1916 | return 0; | |
1917 | ||
6d8e5b08 | 1918 | } |
5506cebf SK |
1919 | } |
1920 | else if (format == HAL_PIXEL_FORMAT_BLOB | |
1921 | && m_camera2->isSupportedJpegResolution(width, height)) { | |
1922 | *stream_id = STREAM_ID_JPEG; | |
6d8e5b08 | 1923 | |
5506cebf SK |
1924 | subParameters = &m_subStreams[*stream_id]; |
1925 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
9dd63e1f | 1926 | |
5506cebf SK |
1927 | if (!(m_streamThreads[1].get())) { |
1928 | ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__); | |
1929 | StartSCCThread(false); | |
1930 | } | |
1931 | else if (m_streamThreads[1]->m_activated == false) { | |
1932 | ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__); | |
1933 | StartSCCThread(true); | |
1934 | } | |
1935 | parentStream = (StreamThread*)(m_streamThreads[1].get()); | |
13d8c7b4 SK |
1936 | |
1937 | *format_actual = HAL_PIXEL_FORMAT_BLOB; | |
13d8c7b4 | 1938 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; |
6568c0f1 AR |
1939 | if (m_wideAspect) |
1940 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1941 | *max_buffers = 5; |
13d8c7b4 | 1942 | |
5506cebf SK |
1943 | subParameters->type = SUBSTREAM_TYPE_JPEG; |
1944 | subParameters->width = width; | |
1945 | subParameters->height = height; | |
1946 | subParameters->format = *format_actual; | |
1947 | subParameters->svcPlanes = 1; | |
1948 | subParameters->streamOps = stream_ops; | |
1949 | subParameters->usage = *usage; | |
1950 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1951 | subParameters->numSvcBufsInHal = 0; | |
1952 | subParameters->needBufferInit = false; | |
1953 | subParameters->minUndequedBuffer = 2; | |
1954 | ||
1955 | res = parentStream->attachSubStream(STREAM_ID_JPEG, 10); | |
1956 | if (res != NO_ERROR) { | |
1957 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
1958 | return 1; | |
1959 | } | |
1960 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); | |
1961 | ALOGV("(%s): Enabling Jpeg", __FUNCTION__); | |
13d8c7b4 SK |
1962 | return 0; |
1963 | } | |
74d78ebe | 1964 | else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) { |
5506cebf SK |
1965 | *stream_id = STREAM_ID_PRVCB; |
1966 | ||
1967 | subParameters = &m_subStreams[STREAM_ID_PRVCB]; | |
1968 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
1969 | ||
74d78ebe SK |
1970 | parentStream = (StreamThread*)(m_streamThreads[0].get()); |
1971 | if (!parentStream) { | |
74d78ebe SK |
1972 | return 1; |
1973 | } | |
74d78ebe SK |
1974 | |
1975 | *format_actual = format; | |
1976 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1977 | if (m_wideAspect) |
1978 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1979 | *max_buffers = 7; |
5506cebf SK |
1980 | |
1981 | subParameters->type = SUBSTREAM_TYPE_PRVCB; | |
1982 | subParameters->width = width; | |
1983 | subParameters->height = height; | |
1984 | subParameters->format = *format_actual; | |
1985 | subParameters->svcPlanes = NUM_PLANES(*format_actual); | |
1986 | subParameters->streamOps = stream_ops; | |
1987 | subParameters->usage = *usage; | |
1988 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1989 | subParameters->numSvcBufsInHal = 0; | |
1990 | subParameters->needBufferInit = false; | |
1991 | subParameters->minUndequedBuffer = 2; | |
1992 | ||
1993 | if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { | |
1994 | subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP; | |
1995 | subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP); | |
1996 | } | |
1997 | else { | |
1998 | subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
1999 | subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12); | |
2000 | } | |
74d78ebe | 2001 | |
5506cebf SK |
2002 | res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20); |
2003 | if (res != NO_ERROR) { | |
2004 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
2005 | return 1; | |
74d78ebe | 2006 | } |
5506cebf SK |
2007 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); |
2008 | ALOGV("(%s): Enabling previewcb", __FUNCTION__); | |
74d78ebe SK |
2009 | return 0; |
2010 | } | |
ed4ad5fe | 2011 | ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__); |
5506cebf | 2012 | return 1; |
c15a6b00 JS |
2013 | } |
2014 | ||
13d8c7b4 SK |
2015 | int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id, |
2016 | int num_buffers, buffer_handle_t *registeringBuffers) | |
c15a6b00 | 2017 | { |
13d8c7b4 SK |
2018 | int i,j; |
2019 | void *virtAddr[3]; | |
5506cebf SK |
2020 | int plane_index = 0; |
2021 | StreamThread * targetStream; | |
13d8c7b4 SK |
2022 | stream_parameters_t *targetStreamParms; |
2023 | node_info_t *currentNode; | |
2024 | ||
c15a6b00 JS |
2025 | struct v4l2_buffer v4l2_buf; |
2026 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
13d8c7b4 | 2027 | |
ed4ad5fe | 2028 | ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__, |
13d8c7b4 SK |
2029 | stream_id, num_buffers, (uint32_t)registeringBuffers); |
2030 | ||
5506cebf SK |
2031 | if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) { |
2032 | targetStream = m_streamThreads[0].get(); | |
13d8c7b4 | 2033 | targetStreamParms = &(m_streamThreads[0]->m_parameters); |
5c88d1f2 | 2034 | |
13d8c7b4 | 2035 | } |
5506cebf SK |
2036 | else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) { |
2037 | substream_parameters_t *targetParms; | |
2038 | targetParms = &m_subStreams[stream_id]; | |
9dd63e1f | 2039 | |
5506cebf | 2040 | targetParms->numSvcBuffers = num_buffers; |
9dd63e1f | 2041 | |
5506cebf SK |
2042 | for (i = 0 ; i < targetParms->numSvcBuffers ; i++) { |
2043 | ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__, | |
2044 | i, stream_id, (uint32_t)(registeringBuffers[i])); | |
9dd63e1f SK |
2045 | if (m_grallocHal) { |
2046 | if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i], | |
5506cebf SK |
2047 | targetParms->usage, 0, 0, |
2048 | targetParms->width, targetParms->height, virtAddr) != 0) { | |
9dd63e1f SK |
2049 | ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__); |
2050 | } | |
2051 | else { | |
2052 | ExynosBuffer currentBuf; | |
2053 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]); | |
5506cebf SK |
2054 | if (targetParms->svcPlanes == 1) { |
2055 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2056 | currentBuf.size.extS[0] = priv_handle->size; | |
2057 | currentBuf.size.extS[1] = 0; | |
2058 | currentBuf.size.extS[2] = 0; | |
2059 | } else if (targetParms->svcPlanes == 2) { | |
2060 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2061 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
2062 | ||
2063 | } else if (targetParms->svcPlanes == 3) { | |
2064 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2065 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
2066 | currentBuf.fd.extFd[2] = priv_handle->fd2; | |
2067 | } | |
2068 | for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) { | |
9dd63e1f | 2069 | currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index]; |
0d220b42 | 2070 | CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)", |
804236a7 | 2071 | __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index], |
5506cebf | 2072 | (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]); |
9dd63e1f | 2073 | } |
5506cebf SK |
2074 | targetParms->svcBufStatus[i] = ON_SERVICE; |
2075 | targetParms->svcBuffers[i] = currentBuf; | |
2076 | targetParms->svcBufHandle[i] = registeringBuffers[i]; | |
9dd63e1f SK |
2077 | } |
2078 | } | |
2079 | } | |
5506cebf | 2080 | targetParms->needBufferInit = true; |
9dd63e1f SK |
2081 | return 0; |
2082 | } | |
5506cebf SK |
2083 | else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) { |
2084 | targetStream = m_streamThreads[1].get(); | |
2085 | targetStreamParms = &(m_streamThreads[1]->m_parameters); | |
74d78ebe | 2086 | } |
13d8c7b4 | 2087 | else { |
ed4ad5fe | 2088 | ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id); |
be494d19 | 2089 | return 1; |
13d8c7b4 | 2090 | } |
c15a6b00 | 2091 | |
5506cebf | 2092 | if (targetStream->streamType == STREAM_TYPE_DIRECT) { |
13d8c7b4 SK |
2093 | if (num_buffers < targetStreamParms->numHwBuffers) { |
2094 | ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)", | |
2095 | __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers); | |
be494d19 | 2096 | return 1; |
13d8c7b4 SK |
2097 | } |
2098 | } | |
0d220b42 | 2099 | CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)", |
5506cebf SK |
2100 | __FUNCTION__, targetStreamParms->format, targetStreamParms->width, |
2101 | targetStreamParms->height, targetStreamParms->planes); | |
13d8c7b4 | 2102 | targetStreamParms->numSvcBuffers = num_buffers; |
5506cebf SK |
2103 | currentNode = targetStreamParms->node; |
2104 | currentNode->width = targetStreamParms->width; | |
2105 | currentNode->height = targetStreamParms->height; | |
2106 | currentNode->format = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format); | |
2107 | currentNode->planes = targetStreamParms->planes; | |
13d8c7b4 | 2108 | currentNode->buffers = targetStreamParms->numHwBuffers; |
5506cebf SK |
2109 | cam_int_s_input(currentNode, m_camera_info.sensor_id); |
2110 | cam_int_s_fmt(currentNode); | |
2111 | cam_int_reqbufs(currentNode); | |
2112 | for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) { | |
13d8c7b4 SK |
2113 | ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__, |
2114 | i, (uint32_t)(registeringBuffers[i])); | |
13d8c7b4 SK |
2115 | v4l2_buf.m.planes = planes; |
2116 | v4l2_buf.type = currentNode->type; | |
2117 | v4l2_buf.memory = currentNode->memory; | |
2118 | v4l2_buf.index = i; | |
2119 | v4l2_buf.length = currentNode->planes; | |
c15a6b00 | 2120 | |
13d8c7b4 | 2121 | ExynosBuffer currentBuf; |
feb7df4c | 2122 | ExynosBuffer metaBuf; |
13d8c7b4 SK |
2123 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]); |
2124 | ||
2125 | m_getAlignedYUVSize(currentNode->format, | |
2126 | currentNode->width, currentNode->height, ¤tBuf); | |
24231221 | 2127 | |
37e122d5 SK |
2128 | ALOGV("DEBUG(%s): ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride); |
2129 | if (currentNode->planes == 1) { | |
74d78ebe SK |
2130 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; |
2131 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
37e122d5 SK |
2132 | currentBuf.size.extS[0] = priv_handle->size; |
2133 | currentBuf.size.extS[1] = 0; | |
2134 | currentBuf.size.extS[2] = 0; | |
74d78ebe SK |
2135 | } else if (currentNode->planes == 2) { |
2136 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; | |
2137 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd1; | |
2138 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2139 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
2140 | ||
2141 | } else if (currentNode->planes == 3) { | |
2142 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; | |
2143 | v4l2_buf.m.planes[2].m.fd = priv_handle->fd1; | |
2144 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd2; | |
2145 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2146 | currentBuf.fd.extFd[2] = priv_handle->fd1; | |
2147 | currentBuf.fd.extFd[1] = priv_handle->fd2; | |
37e122d5 | 2148 | } |
0d220b42 | 2149 | |
5506cebf | 2150 | for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) { |
bf96172c SK |
2151 | if (targetStreamParms->needsIonMap) |
2152 | currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0); | |
13d8c7b4 | 2153 | v4l2_buf.m.planes[plane_index].length = currentBuf.size.extS[plane_index]; |
bf96172c | 2154 | ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)", |
13d8c7b4 SK |
2155 | __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd, |
2156 | (unsigned int)currentBuf.virt.extP[plane_index], | |
2157 | v4l2_buf.m.planes[plane_index].length); | |
2158 | } | |
c15a6b00 | 2159 | |
5506cebf | 2160 | if (i < currentNode->buffers) { |
feb7df4c SK |
2161 | |
2162 | ||
2163 | #ifdef ENABLE_FRAME_SYNC | |
5506cebf SK |
2164 | /* add plane for metadata*/ |
2165 | metaBuf.size.extS[0] = 4*1024; | |
2166 | allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0); | |
feb7df4c | 2167 | |
5506cebf SK |
2168 | v4l2_buf.length += targetStreamParms->metaPlanes; |
2169 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0]; | |
2170 | v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0]; | |
feb7df4c | 2171 | |
5506cebf | 2172 | ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length); |
feb7df4c | 2173 | #endif |
5506cebf SK |
2174 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { |
2175 | ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)", | |
2176 | __FUNCTION__, stream_id, currentNode->fd); | |
13d8c7b4 | 2177 | } |
5506cebf SK |
2178 | ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)", |
2179 | __FUNCTION__, stream_id, currentNode->fd); | |
2180 | targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC; | |
13d8c7b4 | 2181 | } |
5506cebf | 2182 | else { |
13d8c7b4 | 2183 | targetStreamParms->svcBufStatus[i] = ON_SERVICE; |
c15a6b00 | 2184 | } |
5506cebf | 2185 | |
13d8c7b4 | 2186 | targetStreamParms->svcBuffers[i] = currentBuf; |
feb7df4c | 2187 | targetStreamParms->metaBuffers[i] = metaBuf; |
13d8c7b4 SK |
2188 | targetStreamParms->svcBufHandle[i] = registeringBuffers[i]; |
2189 | } | |
6d8e5b08 | 2190 | |
5506cebf SK |
2191 | ALOGV("DEBUG(%s): calling streamon stream id = %d", __FUNCTION__, stream_id); |
2192 | cam_int_streamon(targetStreamParms->node); | |
ad37861e | 2193 | ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__); |
5506cebf | 2194 | currentNode->status = true; |
13d8c7b4 | 2195 | ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__); |
ad37861e | 2196 | |
c15a6b00 JS |
2197 | return 0; |
2198 | } | |
2199 | ||
2200 | int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id) | |
2201 | { | |
13d8c7b4 | 2202 | StreamThread *targetStream; |
5506cebf | 2203 | status_t res = NO_ERROR; |
ed4ad5fe | 2204 | ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id); |
5506cebf | 2205 | bool releasingScpMain = false; |
13d8c7b4 | 2206 | |
5506cebf | 2207 | if (stream_id == STREAM_ID_PREVIEW) { |
13d8c7b4 | 2208 | targetStream = (StreamThread*)(m_streamThreads[0].get()); |
d0a2bb69 SK |
2209 | if (!targetStream) { |
2210 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2211 | return NO_ERROR; |
d0a2bb69 | 2212 | } |
5506cebf SK |
2213 | targetStream->m_numRegisteredStream--; |
2214 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
2215 | releasingScpMain = true; | |
bf96172c SK |
2216 | if (targetStream->m_parameters.needsIonMap) { |
2217 | for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) { | |
2218 | for (int j = 0; j < targetStream->m_parameters.planes; j++) { | |
2219 | ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j], | |
2220 | targetStream->m_parameters.svcBuffers[i].size.extS[j]); | |
2221 | ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j, | |
c48f0170 | 2222 | targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j])); |
bf96172c | 2223 | } |
0d220b42 C |
2224 | } |
2225 | } | |
5506cebf | 2226 | } else if (stream_id == STREAM_ID_JPEG) { |
a038aa84 HC |
2227 | if (m_resizeBuf.size.s != 0) { |
2228 | freeCameraMemory(&m_resizeBuf, 1); | |
2229 | } | |
2230 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); | |
2231 | ||
13d8c7b4 | 2232 | targetStream = (StreamThread*)(m_streamThreads[1].get()); |
d0a2bb69 SK |
2233 | if (!targetStream) { |
2234 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2235 | return NO_ERROR; |
5506cebf | 2236 | } |
a038aa84 HC |
2237 | |
2238 | if (targetStream->detachSubStream(stream_id) != NO_ERROR) { | |
5506cebf SK |
2239 | ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res); |
2240 | return 1; | |
2241 | } | |
2242 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
74d78ebe | 2243 | return 0; |
5506cebf | 2244 | } else if (stream_id == STREAM_ID_RECORD) { |
a038aa84 HC |
2245 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); |
2246 | ||
5506cebf | 2247 | targetStream = (StreamThread*)(m_streamThreads[0].get()); |
d0a2bb69 SK |
2248 | if (!targetStream) { |
2249 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 HC |
2250 | return NO_ERROR; |
2251 | } | |
2252 | ||
2253 | if (targetStream->detachSubStream(stream_id) != NO_ERROR) { | |
2254 | ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res); | |
d0a2bb69 SK |
2255 | return 1; |
2256 | } | |
a038aa84 | 2257 | |
f0708d21 SK |
2258 | if (targetStream->m_numRegisteredStream != 0) |
2259 | return 0; | |
5506cebf | 2260 | } else if (stream_id == STREAM_ID_PRVCB) { |
a038aa84 HC |
2261 | if (m_previewCbBuf.size.s != 0) { |
2262 | freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes); | |
2263 | } | |
2264 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); | |
2265 | ||
5506cebf | 2266 | targetStream = (StreamThread*)(m_streamThreads[0].get()); |
d0a2bb69 SK |
2267 | if (!targetStream) { |
2268 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2269 | return NO_ERROR; |
d0a2bb69 | 2270 | } |
a038aa84 HC |
2271 | |
2272 | if (targetStream->detachSubStream(stream_id) != NO_ERROR) { | |
2273 | ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res); | |
2274 | return 1; | |
5506cebf | 2275 | } |
a038aa84 | 2276 | |
f0708d21 SK |
2277 | if (targetStream->m_numRegisteredStream != 0) |
2278 | return 0; | |
5506cebf SK |
2279 | } else if (stream_id == STREAM_ID_ZSL) { |
2280 | targetStream = (StreamThread*)(m_streamThreads[1].get()); | |
d0a2bb69 SK |
2281 | if (!targetStream) { |
2282 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2283 | return NO_ERROR; |
d0a2bb69 | 2284 | } |
a038aa84 | 2285 | |
5506cebf SK |
2286 | targetStream->m_numRegisteredStream--; |
2287 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
bf96172c SK |
2288 | if (targetStream->m_parameters.needsIonMap) { |
2289 | for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) { | |
2290 | for (int j = 0; j < targetStream->m_parameters.planes; j++) { | |
2291 | ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j], | |
2292 | targetStream->m_parameters.svcBuffers[i].size.extS[j]); | |
2293 | ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j, | |
c48f0170 | 2294 | targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j])); |
bf96172c SK |
2295 | } |
2296 | } | |
2297 | } | |
5506cebf | 2298 | } else { |
13d8c7b4 | 2299 | ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id); |
be494d19 | 2300 | return 1; |
13d8c7b4 SK |
2301 | } |
2302 | ||
a038aa84 | 2303 | if (m_sensorThread != NULL && releasingScpMain) { |
2d5655e1 SK |
2304 | m_sensorThread->release(); |
2305 | ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__); | |
2306 | while (!m_sensorThread->IsTerminated()) | |
48728d49 | 2307 | usleep(SIG_WAITING_TICK); |
2d5655e1 SK |
2308 | ALOGD("(%s): END Waiting for (indirect) sensor thread termination", __FUNCTION__); |
2309 | } | |
2d5655e1 | 2310 | |
5506cebf SK |
2311 | if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) { |
2312 | ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__); | |
2313 | targetStream = (StreamThread*)(m_streamThreads[1].get()); | |
2314 | targetStream->m_releasing = true; | |
0eb27a9d | 2315 | ALOGD("START stream thread release %d", __LINE__); |
5506cebf | 2316 | do { |
5506cebf | 2317 | targetStream->release(); |
041f38de | 2318 | usleep(SIG_WAITING_TICK); |
5506cebf | 2319 | } while (targetStream->m_releasing); |
a8be0011 | 2320 | m_camera_info.capture.status = false; |
0eb27a9d | 2321 | ALOGD("END stream thread release %d", __LINE__); |
5506cebf SK |
2322 | } |
2323 | ||
a8be0011 | 2324 | if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) { |
5506cebf SK |
2325 | ALOGV("(%s): deactivating stream thread 0", __FUNCTION__); |
2326 | targetStream = (StreamThread*)(m_streamThreads[0].get()); | |
2327 | targetStream->m_releasing = true; | |
0eb27a9d | 2328 | ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__); |
5506cebf | 2329 | do { |
5506cebf | 2330 | targetStream->release(); |
041f38de | 2331 | usleep(SIG_WAITING_TICK); |
5506cebf | 2332 | } while (targetStream->m_releasing); |
0eb27a9d | 2333 | ALOGD("(%s): END Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__); |
5506cebf SK |
2334 | targetStream->SetSignal(SIGNAL_THREAD_TERMINATE); |
2335 | ||
2336 | if (targetStream != NULL) { | |
0eb27a9d SK |
2337 | ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__); |
2338 | while (!targetStream->IsTerminated()) | |
041f38de | 2339 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 2340 | ALOGD("(%s): END Waiting for (indirect) stream thread termination", __FUNCTION__); |
5506cebf SK |
2341 | m_streamThreads[0] = NULL; |
2342 | } | |
5506cebf | 2343 | if (m_camera_info.capture.status == true) { |
f9a06609 | 2344 | m_scpForceSuspended = true; |
5506cebf SK |
2345 | } |
2346 | m_isIspStarted = false; | |
2347 | } | |
2348 | ALOGV("(%s): END", __FUNCTION__); | |
c15a6b00 JS |
2349 | return 0; |
2350 | } | |
2351 | ||
2352 | int ExynosCameraHWInterface2::allocateReprocessStream( | |
1422aff9 MS |
2353 | uint32_t /*width*/, uint32_t /*height*/, uint32_t /*format*/, |
2354 | const camera2_stream_in_ops_t* /*reprocess_stream_ops*/, | |
2355 | uint32_t* /*stream_id*/, uint32_t* /*consumer_usage*/, uint32_t* /*max_buffers*/) | |
c15a6b00 | 2356 | { |
13d8c7b4 | 2357 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
2358 | return 0; |
2359 | } | |
2360 | ||
5506cebf SK |
2361 | int ExynosCameraHWInterface2::allocateReprocessStreamFromStream( |
2362 | uint32_t output_stream_id, | |
2363 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
2364 | // outputs | |
2365 | uint32_t *stream_id) | |
2366 | { | |
ed4ad5fe | 2367 | ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id); |
5506cebf SK |
2368 | *stream_id = STREAM_ID_JPEG_REPROCESS; |
2369 | ||
2370 | m_reprocessStreamId = *stream_id; | |
2371 | m_reprocessOps = reprocess_stream_ops; | |
2372 | m_reprocessOutputStreamId = output_stream_id; | |
2373 | return 0; | |
2374 | } | |
2375 | ||
c15a6b00 JS |
2376 | int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id) |
2377 | { | |
ed4ad5fe | 2378 | ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id); |
5506cebf SK |
2379 | if (stream_id == STREAM_ID_JPEG_REPROCESS) { |
2380 | m_reprocessStreamId = 0; | |
2381 | m_reprocessOps = NULL; | |
2382 | m_reprocessOutputStreamId = 0; | |
2383 | return 0; | |
2384 | } | |
2385 | return 1; | |
c15a6b00 JS |
2386 | } |
2387 | ||
2388 | int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2) | |
2389 | { | |
ca714238 | 2390 | Mutex::Autolock lock(m_afModeTriggerLock); |
0f26b20f SK |
2391 | ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2); |
2392 | ||
2393 | switch (trigger_id) { | |
2394 | case CAMERA2_TRIGGER_AUTOFOCUS: | |
2395 | ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1); | |
ca714238 | 2396 | OnAfTrigger(ext1); |
0f26b20f SK |
2397 | break; |
2398 | ||
2399 | case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS: | |
2400 | ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1); | |
2401 | OnAfCancel(ext1); | |
2402 | break; | |
e117f756 YJ |
2403 | case CAMERA2_TRIGGER_PRECAPTURE_METERING: |
2404 | ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1); | |
2405 | OnPrecaptureMeteringTriggerStart(ext1); | |
2406 | break; | |
0f26b20f SK |
2407 | default: |
2408 | break; | |
2409 | } | |
c15a6b00 JS |
2410 | return 0; |
2411 | } | |
2412 | ||
2413 | int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user) | |
2414 | { | |
0f26b20f | 2415 | ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb); |
c15a6b00 JS |
2416 | m_notifyCb = notify_cb; |
2417 | m_callbackCookie = user; | |
2418 | return 0; | |
2419 | } | |
2420 | ||
2421 | int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops) | |
2422 | { | |
13d8c7b4 | 2423 | ALOGV("DEBUG(%s):", __FUNCTION__); |
cf95ef60 | 2424 | *ops = NULL; |
c15a6b00 JS |
2425 | return 0; |
2426 | } | |
2427 | ||
1422aff9 | 2428 | int ExynosCameraHWInterface2::dump(int /*fd*/) |
c15a6b00 | 2429 | { |
13d8c7b4 | 2430 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
2431 | return 0; |
2432 | } | |
2433 | ||
13d8c7b4 SK |
2434 | void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf) |
2435 | { | |
2436 | switch (colorFormat) { | |
2437 | // 1p | |
2438 | case V4L2_PIX_FMT_RGB565 : | |
2439 | case V4L2_PIX_FMT_YUYV : | |
2440 | case V4L2_PIX_FMT_UYVY : | |
2441 | case V4L2_PIX_FMT_VYUY : | |
2442 | case V4L2_PIX_FMT_YVYU : | |
2443 | buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h); | |
2444 | buf->size.extS[1] = 0; | |
2445 | buf->size.extS[2] = 0; | |
2446 | break; | |
2447 | // 2p | |
2448 | case V4L2_PIX_FMT_NV12 : | |
2449 | case V4L2_PIX_FMT_NV12T : | |
2450 | case V4L2_PIX_FMT_NV21 : | |
2451 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); | |
2452 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16); | |
2453 | buf->size.extS[2] = 0; | |
2454 | break; | |
2455 | case V4L2_PIX_FMT_NV12M : | |
2456 | case V4L2_PIX_FMT_NV12MT_16X16 : | |
9dd63e1f | 2457 | case V4L2_PIX_FMT_NV21M: |
13d8c7b4 SK |
2458 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); |
2459 | buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256); | |
2460 | buf->size.extS[2] = 0; | |
2461 | break; | |
2462 | case V4L2_PIX_FMT_NV16 : | |
2463 | case V4L2_PIX_FMT_NV61 : | |
2464 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); | |
2465 | buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h, 16); | |
2466 | buf->size.extS[2] = 0; | |
2467 | break; | |
2468 | // 3p | |
2469 | case V4L2_PIX_FMT_YUV420 : | |
2470 | case V4L2_PIX_FMT_YVU420 : | |
2471 | buf->size.extS[0] = (w * h); | |
2472 | buf->size.extS[1] = (w * h) >> 2; | |
2473 | buf->size.extS[2] = (w * h) >> 2; | |
2474 | break; | |
2475 | case V4L2_PIX_FMT_YUV420M: | |
2476 | case V4L2_PIX_FMT_YVU420M : | |
4a3f1820 SK |
2477 | buf->size.extS[0] = ALIGN(w, 32) * ALIGN(h, 16); |
2478 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8); | |
2479 | buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8); | |
2480 | break; | |
13d8c7b4 | 2481 | case V4L2_PIX_FMT_YUV422P : |
0d220b42 | 2482 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); |
13d8c7b4 SK |
2483 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8); |
2484 | buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8); | |
2485 | break; | |
2486 | default: | |
2487 | ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat); | |
2488 | return; | |
2489 | break; | |
2490 | } | |
2491 | } | |
c15a6b00 | 2492 | |
13d8c7b4 SK |
2493 | bool ExynosCameraHWInterface2::m_getRatioSize(int src_w, int src_h, |
2494 | int dst_w, int dst_h, | |
2495 | int *crop_x, int *crop_y, | |
2496 | int *crop_w, int *crop_h, | |
2497 | int zoom) | |
c15a6b00 | 2498 | { |
13d8c7b4 SK |
2499 | *crop_w = src_w; |
2500 | *crop_h = src_h; | |
2501 | ||
2502 | if ( src_w != dst_w | |
2503 | || src_h != dst_h) { | |
2504 | float src_ratio = 1.0f; | |
2505 | float dst_ratio = 1.0f; | |
2506 | ||
2507 | // ex : 1024 / 768 | |
2508 | src_ratio = (float)src_w / (float)src_h; | |
2509 | ||
2510 | // ex : 352 / 288 | |
2511 | dst_ratio = (float)dst_w / (float)dst_h; | |
2512 | ||
2513 | if (dst_w * dst_h < src_w * src_h) { | |
2514 | if (dst_ratio <= src_ratio) { | |
2515 | // shrink w | |
2516 | *crop_w = src_h * dst_ratio; | |
2517 | *crop_h = src_h; | |
2518 | } else { | |
2519 | // shrink h | |
2520 | *crop_w = src_w; | |
2521 | *crop_h = src_w / dst_ratio; | |
c15a6b00 | 2522 | } |
13d8c7b4 SK |
2523 | } else { |
2524 | if (dst_ratio <= src_ratio) { | |
2525 | // shrink w | |
2526 | *crop_w = src_h * dst_ratio; | |
2527 | *crop_h = src_h; | |
2528 | } else { | |
2529 | // shrink h | |
2530 | *crop_w = src_w; | |
2531 | *crop_h = src_w / dst_ratio; | |
c15a6b00 JS |
2532 | } |
2533 | } | |
c15a6b00 JS |
2534 | } |
2535 | ||
13d8c7b4 SK |
2536 | if (zoom != 0) { |
2537 | float zoomLevel = ((float)zoom + 10.0) / 10.0; | |
2538 | *crop_w = (int)((float)*crop_w / zoomLevel); | |
2539 | *crop_h = (int)((float)*crop_h / zoomLevel); | |
2540 | } | |
2541 | ||
2542 | #define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2) | |
2543 | unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1)); | |
2544 | if (w_align != 0) { | |
2545 | if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align | |
1422aff9 | 2546 | && (int)(*crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align)) <= dst_w) { |
13d8c7b4 SK |
2547 | *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align); |
2548 | } | |
2549 | else | |
2550 | *crop_w -= w_align; | |
2551 | } | |
2552 | ||
2553 | #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2) | |
2554 | unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1)); | |
2555 | if (h_align != 0) { | |
2556 | if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align | |
1422aff9 | 2557 | && (int)(*crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align)) <= dst_h) { |
13d8c7b4 SK |
2558 | *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align); |
2559 | } | |
2560 | else | |
2561 | *crop_h -= h_align; | |
2562 | } | |
2563 | ||
2564 | *crop_x = (src_w - *crop_w) >> 1; | |
2565 | *crop_y = (src_h - *crop_h) >> 1; | |
2566 | ||
2567 | if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1)) | |
2568 | *crop_x -= 1; | |
2569 | ||
2570 | if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1)) | |
2571 | *crop_y -= 1; | |
2572 | ||
2573 | return true; | |
2574 | } | |
2575 | ||
9dd63e1f | 2576 | BayerBufManager::BayerBufManager() |
13d8c7b4 | 2577 | { |
9dd63e1f SK |
2578 | ALOGV("DEBUG(%s): ", __FUNCTION__); |
2579 | for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) { | |
2580 | entries[i].status = BAYER_ON_HAL_EMPTY; | |
2581 | entries[i].reqFrameCnt = 0; | |
13d8c7b4 | 2582 | } |
9dd63e1f SK |
2583 | sensorEnqueueHead = 0; |
2584 | sensorDequeueHead = 0; | |
2585 | ispEnqueueHead = 0; | |
2586 | ispDequeueHead = 0; | |
2587 | numOnSensor = 0; | |
2588 | numOnIsp = 0; | |
2589 | numOnHalFilled = 0; | |
2590 | numOnHalEmpty = NUM_BAYER_BUFFERS; | |
13d8c7b4 SK |
2591 | } |
2592 | ||
6d8e5b08 SK |
2593 | BayerBufManager::~BayerBufManager() |
2594 | { | |
2595 | ALOGV("%s", __FUNCTION__); | |
2596 | } | |
2597 | ||
9dd63e1f | 2598 | int BayerBufManager::GetIndexForSensorEnqueue() |
13d8c7b4 | 2599 | { |
9dd63e1f SK |
2600 | int ret = 0; |
2601 | if (numOnHalEmpty == 0) | |
2602 | ret = -1; | |
2603 | else | |
2604 | ret = sensorEnqueueHead; | |
2605 | ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret); | |
2606 | return ret; | |
13d8c7b4 SK |
2607 | } |
2608 | ||
9dd63e1f | 2609 | int BayerBufManager::MarkSensorEnqueue(int index) |
13d8c7b4 | 2610 | { |
9dd63e1f SK |
2611 | ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index); |
2612 | ||
2613 | // sanity check | |
2614 | if (index != sensorEnqueueHead) { | |
2615 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead); | |
2616 | return -1; | |
2617 | } | |
2618 | if (entries[index].status != BAYER_ON_HAL_EMPTY) { | |
2619 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2620 | index, entries[index].status, BAYER_ON_HAL_EMPTY); | |
2621 | return -1; | |
13d8c7b4 | 2622 | } |
13d8c7b4 | 2623 | |
9dd63e1f SK |
2624 | entries[index].status = BAYER_ON_SENSOR; |
2625 | entries[index].reqFrameCnt = 0; | |
2626 | numOnHalEmpty--; | |
2627 | numOnSensor++; | |
2628 | sensorEnqueueHead = GetNextIndex(index); | |
2629 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2630 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2631 | return 0; | |
2632 | } | |
13d8c7b4 | 2633 | |
1422aff9 | 2634 | int BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t* /*timeStamp*/) |
13d8c7b4 | 2635 | { |
9dd63e1f SK |
2636 | ALOGV("DEBUG(%s) : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt); |
2637 | ||
9dd63e1f | 2638 | if (entries[index].status != BAYER_ON_SENSOR) { |
ad37861e | 2639 | ALOGE("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, |
9dd63e1f SK |
2640 | index, entries[index].status, BAYER_ON_SENSOR); |
2641 | return -1; | |
13d8c7b4 | 2642 | } |
13d8c7b4 | 2643 | |
9dd63e1f | 2644 | entries[index].status = BAYER_ON_HAL_FILLED; |
9dd63e1f SK |
2645 | numOnHalFilled++; |
2646 | numOnSensor--; | |
ad37861e | 2647 | |
9dd63e1f SK |
2648 | return 0; |
2649 | } | |
2650 | ||
2651 | int BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt) | |
2652 | { | |
2653 | int ret = 0; | |
2654 | if (numOnHalFilled == 0) | |
2655 | ret = -1; | |
2656 | else { | |
2657 | *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt; | |
2658 | ret = ispEnqueueHead; | |
13d8c7b4 | 2659 | } |
9dd63e1f | 2660 | ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret); |
13d8c7b4 SK |
2661 | return ret; |
2662 | } | |
2663 | ||
9dd63e1f SK |
2664 | int BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt) |
2665 | { | |
2666 | int ret = 0; | |
2667 | if (numOnIsp == 0) | |
2668 | ret = -1; | |
2669 | else { | |
2670 | *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt; | |
2671 | ret = ispDequeueHead; | |
2672 | } | |
2673 | ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret); | |
2674 | return ret; | |
2675 | } | |
13d8c7b4 | 2676 | |
9dd63e1f | 2677 | int BayerBufManager::MarkIspEnqueue(int index) |
13d8c7b4 | 2678 | { |
9dd63e1f SK |
2679 | ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index); |
2680 | ||
2681 | // sanity check | |
2682 | if (index != ispEnqueueHead) { | |
2683 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead); | |
13d8c7b4 SK |
2684 | return -1; |
2685 | } | |
9dd63e1f SK |
2686 | if (entries[index].status != BAYER_ON_HAL_FILLED) { |
2687 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2688 | index, entries[index].status, BAYER_ON_HAL_FILLED); | |
2689 | return -1; | |
13d8c7b4 SK |
2690 | } |
2691 | ||
9dd63e1f SK |
2692 | entries[index].status = BAYER_ON_ISP; |
2693 | numOnHalFilled--; | |
2694 | numOnIsp++; | |
2695 | ispEnqueueHead = GetNextIndex(index); | |
2696 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2697 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2698 | return 0; | |
2699 | } | |
2700 | ||
2701 | int BayerBufManager::MarkIspDequeue(int index) | |
2702 | { | |
2703 | ALOGV("DEBUG(%s) : BayerIndex[%d]", __FUNCTION__, index); | |
2704 | ||
2705 | // sanity check | |
2706 | if (index != ispDequeueHead) { | |
2707 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead); | |
2708 | return -1; | |
13d8c7b4 | 2709 | } |
9dd63e1f SK |
2710 | if (entries[index].status != BAYER_ON_ISP) { |
2711 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2712 | index, entries[index].status, BAYER_ON_ISP); | |
13d8c7b4 SK |
2713 | return -1; |
2714 | } | |
2715 | ||
9dd63e1f SK |
2716 | entries[index].status = BAYER_ON_HAL_EMPTY; |
2717 | entries[index].reqFrameCnt = 0; | |
2718 | numOnHalEmpty++; | |
2719 | numOnIsp--; | |
2720 | ispDequeueHead = GetNextIndex(index); | |
2721 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2722 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2723 | return 0; | |
2724 | } | |
13d8c7b4 | 2725 | |
9dd63e1f SK |
2726 | int BayerBufManager::GetNumOnSensor() |
2727 | { | |
2728 | return numOnSensor; | |
13d8c7b4 SK |
2729 | } |
2730 | ||
9dd63e1f | 2731 | int BayerBufManager::GetNumOnHalFilled() |
13d8c7b4 | 2732 | { |
9dd63e1f SK |
2733 | return numOnHalFilled; |
2734 | } | |
2735 | ||
2736 | int BayerBufManager::GetNumOnIsp() | |
2737 | { | |
2738 | return numOnIsp; | |
2739 | } | |
2740 | ||
2741 | int BayerBufManager::GetNextIndex(int index) | |
2742 | { | |
2743 | index++; | |
2744 | if (index >= NUM_BAYER_BUFFERS) | |
2745 | index = 0; | |
2746 | ||
2747 | return index; | |
2748 | } | |
2749 | ||
2750 | void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self) | |
2751 | { | |
2752 | camera_metadata_t *currentRequest = NULL; | |
2753 | camera_metadata_t *currentFrame = NULL; | |
2754 | size_t numEntries = 0; | |
2755 | size_t frameSize = 0; | |
2756 | camera_metadata_t * preparedFrame = NULL; | |
13d8c7b4 SK |
2757 | camera_metadata_t *deregisteredRequest = NULL; |
2758 | uint32_t currentSignal = self->GetProcessingSignal(); | |
2759 | MainThread * selfThread = ((MainThread*)self); | |
2760 | int res = 0; | |
2761 | ||
ad37861e | 2762 | int ret; |
ca714238 SK |
2763 | int afMode; |
2764 | uint32_t afRegion[4]; | |
ad37861e | 2765 | |
13d8c7b4 SK |
2766 | ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal); |
2767 | ||
2768 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
2769 | ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__); | |
2770 | ||
2771 | ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__); | |
2772 | selfThread->SetSignal(SIGNAL_THREAD_TERMINATE); | |
2773 | return; | |
2774 | } | |
2775 | ||
2776 | if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) { | |
2777 | ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__); | |
ad37861e | 2778 | if (m_requestManager->IsRequestQueueFull()==false) { |
ca714238 | 2779 | Mutex::Autolock lock(m_afModeTriggerLock); |
13d8c7b4 SK |
2780 | m_requestQueueOps->dequeue_request(m_requestQueueOps, ¤tRequest); |
2781 | if (NULL == currentRequest) { | |
e2068c92 | 2782 | ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal); |
13d8c7b4 | 2783 | m_isRequestQueueNull = true; |
5c88d1f2 C |
2784 | if (m_requestManager->IsVdisEnable()) |
2785 | m_vdisBubbleCnt = 1; | |
13d8c7b4 SK |
2786 | } |
2787 | else { | |
ca714238 SK |
2788 | m_requestManager->RegisterRequest(currentRequest, &afMode, afRegion); |
2789 | ||
2790 | SetAfMode((enum aa_afmode)afMode); | |
2791 | SetAfRegion(afRegion); | |
13d8c7b4 SK |
2792 | |
2793 | m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps); | |
2794 | ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc); | |
ad37861e | 2795 | if (m_requestManager->IsRequestQueueFull()==false) |
13d8c7b4 | 2796 | selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly |
9dd63e1f | 2797 | |
13d8c7b4 SK |
2798 | m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); |
2799 | } | |
c15a6b00 JS |
2800 | } |
2801 | else { | |
13d8c7b4 SK |
2802 | m_isRequestQueuePending = true; |
2803 | } | |
2804 | } | |
2805 | ||
2806 | if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) { | |
2807 | ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__); | |
2808 | /*while (1)*/ { | |
0f26b20f | 2809 | ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService()); |
ad37861e | 2810 | if (ret == false) |
0d220b42 | 2811 | CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret); |
ad37861e | 2812 | |
13d8c7b4 | 2813 | m_requestManager->DeregisterRequest(&deregisteredRequest); |
ad37861e SK |
2814 | |
2815 | ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest); | |
2816 | if (ret < 0) | |
0d220b42 | 2817 | CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret); |
ad37861e SK |
2818 | |
2819 | ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, ¤tFrame); | |
2820 | if (ret < 0) | |
0d220b42 | 2821 | CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret); |
ad37861e | 2822 | |
13d8c7b4 | 2823 | if (currentFrame==NULL) { |
ad37861e | 2824 | ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ ); |
13d8c7b4 SK |
2825 | } |
2826 | else { | |
daa1fcd6 | 2827 | ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize); |
13d8c7b4 SK |
2828 | } |
2829 | res = append_camera_metadata(currentFrame, preparedFrame); | |
2830 | if (res==0) { | |
2831 | ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__); | |
2832 | m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame); | |
ad37861e SK |
2833 | } |
2834 | else { | |
2835 | ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res); | |
2836 | } | |
2837 | } | |
2838 | if (!m_isRequestQueueNull) { | |
2839 | selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); | |
2840 | } | |
c15a6b00 | 2841 | |
ad37861e SK |
2842 | if (getInProgressCount()>0) { |
2843 | ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__); | |
2844 | m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); | |
2845 | } | |
c15a6b00 | 2846 | } |
ad37861e SK |
2847 | ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__); |
2848 | return; | |
2849 | } | |
c15a6b00 | 2850 | |
13d8c7b4 SK |
2851 | void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext) |
2852 | { | |
ad37861e SK |
2853 | ALOGD("#### common Section"); |
2854 | ALOGD("#### magic(%x) ", | |
13d8c7b4 | 2855 | shot_ext->shot.magicNumber); |
ad37861e SK |
2856 | ALOGD("#### ctl Section"); |
2857 | ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)", | |
13d8c7b4 | 2858 | shot_ext->shot.ctl.request.metadataMode, |
b56dcc00 | 2859 | shot_ext->shot.ctl.lens.aperture, |
13d8c7b4 SK |
2860 | shot_ext->shot.ctl.sensor.exposureTime, |
2861 | shot_ext->shot.ctl.sensor.frameDuration, | |
b56dcc00 SK |
2862 | shot_ext->shot.ctl.sensor.sensitivity, |
2863 | shot_ext->shot.ctl.aa.awbMode); | |
13d8c7b4 | 2864 | |
5506cebf | 2865 | ALOGD("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)", |
9dd63e1f | 2866 | shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc, |
5506cebf | 2867 | shot_ext->shot.ctl.request.outputStreams[0]); |
13d8c7b4 | 2868 | |
ad37861e SK |
2869 | ALOGD("#### DM Section"); |
2870 | ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)", | |
13d8c7b4 | 2871 | shot_ext->shot.dm.request.metadataMode, |
b56dcc00 | 2872 | shot_ext->shot.dm.lens.aperture, |
13d8c7b4 SK |
2873 | shot_ext->shot.dm.sensor.exposureTime, |
2874 | shot_ext->shot.dm.sensor.frameDuration, | |
2875 | shot_ext->shot.dm.sensor.sensitivity, | |
b56dcc00 SK |
2876 | shot_ext->shot.dm.sensor.timeStamp, |
2877 | shot_ext->shot.dm.aa.awbMode, | |
2878 | shot_ext->shot.dm.request.frameCount ); | |
13d8c7b4 SK |
2879 | } |
2880 | ||
e117f756 | 2881 | void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext) |
6f19b6cf | 2882 | { |
e117f756 YJ |
2883 | // Flash |
2884 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2885 | case IS_FLASH_STATE_ON: | |
40acdcc8 | 2886 | ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
4a9565ae YJ |
2887 | // check AF locked |
2888 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
2889 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { | |
2890 | if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) { | |
2891 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS; | |
2892 | m_ctlInfo.flash.m_flashTimeOut = 5; | |
2893 | } else | |
2894 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON; | |
2895 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT; | |
2896 | } else { | |
2897 | m_ctlInfo.flash.m_flashTimeOut--; | |
2898 | } | |
2899 | } else { | |
2900 | if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) { | |
2901 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS; | |
2902 | m_ctlInfo.flash.m_flashTimeOut = 5; | |
2903 | } else | |
2904 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON; | |
2905 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT; | |
2906 | } | |
9a710a45 | 2907 | break; |
e117f756 | 2908 | case IS_FLASH_STATE_ON_WAIT: |
6f19b6cf | 2909 | break; |
e117f756 YJ |
2910 | case IS_FLASH_STATE_ON_DONE: |
2911 | if (!m_ctlInfo.flash.m_afFlashDoneFlg) | |
2912 | // auto transition at pre-capture trigger | |
2913 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
6f19b6cf | 2914 | break; |
e117f756 | 2915 | case IS_FLASH_STATE_AUTO_AE_AWB_LOCK: |
40acdcc8 | 2916 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 YJ |
2917 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO; |
2918 | //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED; | |
2919 | shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED; | |
2920 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT; | |
2921 | break; | |
2922 | case IS_FLASH_STATE_AE_AWB_LOCK_WAIT: | |
2923 | case IS_FLASH_STATE_AUTO_WAIT: | |
2924 | shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0; | |
2925 | shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0; | |
2926 | break; | |
2927 | case IS_FLASH_STATE_AUTO_DONE: | |
40acdcc8 | 2928 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
d91c0269 | 2929 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
e117f756 YJ |
2930 | break; |
2931 | case IS_FLASH_STATE_AUTO_OFF: | |
40acdcc8 | 2932 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 | 2933 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
e117f756 YJ |
2934 | m_ctlInfo.flash.m_flashEnableFlg = false; |
2935 | break; | |
2936 | case IS_FLASH_STATE_CAPTURE: | |
40acdcc8 | 2937 | ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 YJ |
2938 | m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT; |
2939 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE; | |
2940 | shot_ext->request_scc = 0; | |
2941 | shot_ext->request_scp = 0; | |
2942 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition | |
2943 | break; | |
2944 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
2945 | shot_ext->request_scc = 0; | |
2946 | shot_ext->request_scp = 0; | |
2947 | break; | |
2948 | case IS_FLASH_STATE_CAPTURE_JPEG: | |
73f5ad60 | 2949 | ALOGV("(%s): [Flash] Flash Capture (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut)); |
e117f756 YJ |
2950 | shot_ext->request_scc = 1; |
2951 | shot_ext->request_scp = 1; | |
2952 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END; // auto transition | |
2953 | break; | |
2954 | case IS_FLASH_STATE_CAPTURE_END: | |
40acdcc8 | 2955 | ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 YJ |
2956 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
2957 | shot_ext->request_scc = 0; | |
2958 | shot_ext->request_scp = 0; | |
2959 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
2960 | m_ctlInfo.flash.m_flashCnt = 0; | |
2961 | m_ctlInfo.flash.m_afFlashDoneFlg= false; | |
cdd53a9f YJ |
2962 | break; |
2963 | case IS_FLASH_STATE_NONE: | |
e117f756 YJ |
2964 | break; |
2965 | default: | |
2966 | ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); | |
6f19b6cf YJ |
2967 | } |
2968 | } | |
2969 | ||
e117f756 | 2970 | void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext) |
6f19b6cf | 2971 | { |
e117f756 YJ |
2972 | // Flash |
2973 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2974 | case IS_FLASH_STATE_AUTO_WAIT: | |
2975 | if (m_ctlInfo.flash.m_flashDecisionResult) { | |
2976 | if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) { | |
2977 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
73f5ad60 | 2978 | ALOGV("(%s): [Flash] Lis : AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode); |
9257e29e | 2979 | } else { |
73f5ad60 | 2980 | ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__); |
9257e29e | 2981 | } |
e117f756 YJ |
2982 | } else { |
2983 | //If flash isn't activated at flash auto mode, skip flash auto control | |
2984 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
73f5ad60 | 2985 | ALOGV("(%s): [Flash] Skip : AUTO -> OFF", __FUNCTION__); |
9257e29e | 2986 | } |
e117f756 | 2987 | break; |
9257e29e | 2988 | } |
9257e29e YJ |
2989 | } |
2990 | ||
e117f756 | 2991 | void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext) |
9257e29e | 2992 | { |
e117f756 YJ |
2993 | // Flash |
2994 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2995 | case IS_FLASH_STATE_ON_WAIT: | |
2996 | if (shot_ext->shot.dm.flash.decision > 0) { | |
2997 | // store decision result to skip capture sequenece | |
73f5ad60 | 2998 | ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision); |
e117f756 YJ |
2999 | if (shot_ext->shot.dm.flash.decision == 2) |
3000 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
3001 | else | |
3002 | m_ctlInfo.flash.m_flashDecisionResult = true; | |
3003 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE; | |
3004 | } else { | |
3005 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { | |
73f5ad60 | 3006 | ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__); |
e117f756 YJ |
3007 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE; |
3008 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
caea49e6 | 3009 | } else { |
e117f756 | 3010 | m_ctlInfo.flash.m_flashTimeOut--; |
6f19b6cf | 3011 | } |
6f19b6cf | 3012 | } |
e117f756 YJ |
3013 | break; |
3014 | case IS_FLASH_STATE_AE_AWB_LOCK_WAIT: | |
3015 | if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) { | |
73f5ad60 | 3016 | ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode); |
e117f756 YJ |
3017 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT; |
3018 | } else { | |
73f5ad60 | 3019 | ALOGV("(%s): [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__); |
e117f756 YJ |
3020 | } |
3021 | break; | |
3022 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
3023 | if (m_ctlInfo.flash.m_flashDecisionResult) { | |
3024 | if (shot_ext->shot.dm.flash.firingStable) { | |
3025 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
6f19b6cf | 3026 | } else { |
9257e29e | 3027 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { |
e117f756 YJ |
3028 | ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__); |
3029 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
9257e29e | 3030 | } else { |
e117f756 | 3031 | ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut); |
9257e29e YJ |
3032 | m_ctlInfo.flash.m_flashTimeOut--; |
3033 | } | |
6f19b6cf | 3034 | } |
e117f756 YJ |
3035 | } else { |
3036 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
6f19b6cf | 3037 | } |
e117f756 | 3038 | break; |
6f19b6cf | 3039 | } |
6f19b6cf YJ |
3040 | } |
3041 | ||
cdd53a9f YJ |
3042 | void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext) |
3043 | { | |
3044 | switch (m_ctlInfo.flash.i_flashMode) { | |
3045 | case AA_AEMODE_ON: | |
3046 | // At flash off mode, capture can be done as zsl capture | |
3047 | shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED; | |
3048 | break; | |
3049 | case AA_AEMODE_ON_AUTO_FLASH: | |
3050 | // At flash auto mode, main flash have to be done if pre-flash was done. | |
3051 | if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg) | |
3052 | shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED; | |
1422aff9 MS |
3053 | // FALLTHRU |
3054 | default: | |
cdd53a9f YJ |
3055 | break; |
3056 | } | |
3057 | } | |
3058 | ||
4a9565ae YJ |
3059 | void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext) |
3060 | { | |
ca714238 SK |
3061 | shot_ext->shot.ctl.aa.afRegions[0] = currentAfRegion[0]; |
3062 | shot_ext->shot.ctl.aa.afRegions[1] = currentAfRegion[1]; | |
3063 | shot_ext->shot.ctl.aa.afRegions[2] = currentAfRegion[2]; | |
3064 | shot_ext->shot.ctl.aa.afRegions[3] = currentAfRegion[3]; | |
3065 | } | |
3066 | ||
3067 | void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion) | |
3068 | { | |
3069 | currentAfRegion[0] = afRegion[0]; | |
3070 | currentAfRegion[1] = afRegion[1]; | |
3071 | currentAfRegion[2] = afRegion[2]; | |
3072 | currentAfRegion[3] = afRegion[3]; | |
4a9565ae YJ |
3073 | } |
3074 | ||
8a3fc5dd | 3075 | void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode) |
4a9565ae YJ |
3076 | { |
3077 | if (m_afState == HAL_AFSTATE_SCANNING) { | |
3078 | ALOGD("(%s): restarting trigger ", __FUNCTION__); | |
8a3fc5dd | 3079 | } else if (!mode) { |
4a9565ae YJ |
3080 | if (m_afState != HAL_AFSTATE_NEEDS_COMMAND) |
3081 | ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState); | |
3082 | else | |
40acdcc8 | 3083 | m_afState = HAL_AFSTATE_STARTED; |
4a9565ae | 3084 | } |
40acdcc8 | 3085 | ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState); |
4a9565ae YJ |
3086 | shot_ext->shot.ctl.aa.afTrigger = 1; |
3087 | shot_ext->shot.ctl.aa.afMode = m_afMode; | |
3088 | m_IsAfTriggerRequired = false; | |
3089 | } | |
3090 | ||
13d8c7b4 SK |
3091 | void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self) |
3092 | { | |
3093 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3094 | SensorThread * selfThread = ((SensorThread*)self); | |
3095 | int index; | |
ad37861e | 3096 | int index_isp; |
13d8c7b4 SK |
3097 | status_t res; |
3098 | nsecs_t frameTime; | |
3099 | int bayersOnSensor = 0, bayersOnIsp = 0; | |
ad37861e SK |
3100 | int j = 0; |
3101 | bool isCapture = false; | |
13d8c7b4 SK |
3102 | ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal); |
3103 | ||
3104 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
2c872806 | 3105 | CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__); |
13d8c7b4 | 3106 | |
9dd63e1f | 3107 | ALOGV("(%s): calling sensor streamoff", __FUNCTION__); |
13d8c7b4 | 3108 | cam_int_streamoff(&(m_camera_info.sensor)); |
9dd63e1f | 3109 | ALOGV("(%s): calling sensor streamoff done", __FUNCTION__); |
b5237e6b SK |
3110 | |
3111 | m_camera_info.sensor.buffers = 0; | |
3112 | ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__); | |
3113 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
3114 | ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__); | |
5506cebf | 3115 | m_camera_info.sensor.status = false; |
ad37861e | 3116 | |
9dd63e1f SK |
3117 | ALOGV("(%s): calling ISP streamoff", __FUNCTION__); |
3118 | isp_int_streamoff(&(m_camera_info.isp)); | |
3119 | ALOGV("(%s): calling ISP streamoff done", __FUNCTION__); | |
ad37861e | 3120 | |
b5237e6b SK |
3121 | m_camera_info.isp.buffers = 0; |
3122 | ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__); | |
3123 | cam_int_reqbufs(&(m_camera_info.isp)); | |
3124 | ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__); | |
3125 | ||
13d8c7b4 | 3126 | exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM); |
13d8c7b4 | 3127 | |
52f54308 | 3128 | m_requestManager->releaseSensorQ(); |
2adfa429 | 3129 | m_requestManager->ResetEntry(); |
ad37861e | 3130 | ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__); |
13d8c7b4 SK |
3131 | selfThread->SetSignal(SIGNAL_THREAD_TERMINATE); |
3132 | return; | |
3133 | } | |
3134 | ||
3135 | if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING) | |
3136 | { | |
3137 | ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__); | |
9dd63e1f | 3138 | int targetStreamIndex = 0, i=0; |
ad37861e | 3139 | int matchedFrameCnt = -1, processingReqIndex; |
13d8c7b4 | 3140 | struct camera2_shot_ext *shot_ext; |
ad37861e | 3141 | struct camera2_shot_ext *shot_ext_capture; |
0f26b20f | 3142 | bool triggered = false; |
9dd63e1f | 3143 | |
ad37861e | 3144 | /* dqbuf from sensor */ |
5506cebf | 3145 | ALOGV("Sensor DQbuf start"); |
13d8c7b4 | 3146 | index = cam_int_dqbuf(&(m_camera_info.sensor)); |
52f54308 SK |
3147 | m_requestManager->pushSensorQ(index); |
3148 | ALOGV("Sensor DQbuf done(%d)", index); | |
9dd63e1f | 3149 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]); |
ad37861e | 3150 | |
a15b4e3f SK |
3151 | if (m_nightCaptureCnt != 0) { |
3152 | matchedFrameCnt = m_nightCaptureFrameCnt; | |
e117f756 | 3153 | } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) { |
9a710a45 | 3154 | matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount; |
caea49e6 | 3155 | ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt); |
6f19b6cf | 3156 | } else { |
a15b4e3f SK |
3157 | matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext); |
3158 | } | |
ad37861e | 3159 | |
5c88d1f2 C |
3160 | if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) { |
3161 | matchedFrameCnt = m_vdisDupFrame; | |
3162 | } | |
5c88d1f2 | 3163 | |
a07cbd98 | 3164 | if (matchedFrameCnt != -1) { |
ce77365b | 3165 | if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) { |
5c88d1f2 C |
3166 | frameTime = systemTime(); |
3167 | m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime); | |
3168 | m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo); | |
ce77365b HC |
3169 | } else { |
3170 | ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt); | |
5c88d1f2 | 3171 | } |
9a710a45 | 3172 | |
40acdcc8 YJ |
3173 | // face af mode setting in case of face priority scene mode |
3174 | if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) { | |
3175 | ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode); | |
3176 | m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode; | |
3177 | } | |
3178 | ||
308291de | 3179 | m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2]; |
e4657e32 SK |
3180 | float zoomLeft, zoomTop, zoomWidth, zoomHeight; |
3181 | int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0; | |
3182 | ||
3183 | m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(), | |
5506cebf | 3184 | m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height, |
e4657e32 SK |
3185 | &crop_x, &crop_y, |
3186 | &crop_w, &crop_h, | |
3187 | 0); | |
3188 | ||
5506cebf | 3189 | if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) { |
308291de | 3190 | zoomWidth = m_camera2->getSensorW() / m_zoomRatio; |
e4657e32 | 3191 | zoomHeight = zoomWidth * |
5506cebf | 3192 | m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width; |
e4657e32 | 3193 | } else { |
308291de | 3194 | zoomHeight = m_camera2->getSensorH() / m_zoomRatio; |
e4657e32 | 3195 | zoomWidth = zoomHeight * |
5506cebf | 3196 | m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height; |
e4657e32 SK |
3197 | } |
3198 | zoomLeft = (crop_w - zoomWidth) / 2; | |
3199 | zoomTop = (crop_h - zoomHeight) / 2; | |
3200 | ||
1422aff9 | 3201 | int32_t new_cropRegion[3] = { (int32_t)zoomLeft, (int32_t)zoomTop, (int32_t)zoomWidth }; |
e4657e32 | 3202 | |
408f6161 HC |
3203 | int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4); |
3204 | if (cropCompensation) | |
3205 | new_cropRegion[2] -= cropCompensation; | |
308291de | 3206 | |
e4657e32 SK |
3207 | shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0]; |
3208 | shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1]; | |
3209 | shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2]; | |
8a3fc5dd | 3210 | if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) { |
ca714238 | 3211 | ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode); |
0f26b20f SK |
3212 | shot_ext->shot.ctl.aa.afMode = m_afMode; |
3213 | if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) { | |
ed4ad5fe | 3214 | ALOGD("### With Automatic triger for continuous modes"); |
0f26b20f SK |
3215 | m_afState = HAL_AFSTATE_STARTED; |
3216 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3217 | triggered = true; | |
40acdcc8 YJ |
3218 | if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) || |
3219 | (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) { | |
3220 | switch (m_afMode) { | |
3221 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
6bd0cd3b | 3222 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE; |
40acdcc8 | 3223 | ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode); |
1422aff9 MS |
3224 | // FALLTHRU |
3225 | default: | |
40acdcc8 | 3226 | break; |
40acdcc8 YJ |
3227 | } |
3228 | } | |
cdd53a9f YJ |
3229 | // reset flash result |
3230 | if (m_ctlInfo.flash.m_afFlashDoneFlg) { | |
3231 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
3232 | m_ctlInfo.flash.m_afFlashDoneFlg = false; | |
3233 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
3234 | m_ctlInfo.flash.m_flashCnt = 0; | |
3235 | } | |
40acdcc8 | 3236 | m_ctlInfo.af.m_afTriggerTimeOut = 1; |
0f26b20f | 3237 | } |
40acdcc8 | 3238 | |
0f26b20f | 3239 | m_IsAfModeUpdateRequired = false; |
311d52eb | 3240 | // support inifinity focus mode |
fdbaf5d2 | 3241 | if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) { |
311d52eb YJ |
3242 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY; |
3243 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3244 | triggered = true; | |
3245 | } | |
0f26b20f SK |
3246 | if (m_afMode2 != NO_CHANGE) { |
3247 | enum aa_afmode tempAfMode = m_afMode2; | |
3248 | m_afMode2 = NO_CHANGE; | |
3249 | SetAfMode(tempAfMode); | |
3250 | } | |
3251 | } | |
3252 | else { | |
3253 | shot_ext->shot.ctl.aa.afMode = NO_CHANGE; | |
3254 | } | |
3255 | if (m_IsAfTriggerRequired) { | |
e117f756 | 3256 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
4a9565ae | 3257 | // flash case |
e117f756 | 3258 | if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) { |
4a9565ae YJ |
3259 | if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) { |
3260 | // Flash is enabled and start AF | |
8a3fc5dd | 3261 | m_afTrigger(shot_ext, 1); |
6f19b6cf | 3262 | } else { |
ca714238 | 3263 | m_afTrigger(shot_ext, 0); |
6f19b6cf | 3264 | } |
6f19b6cf YJ |
3265 | } |
3266 | } else { | |
4a9565ae | 3267 | // non-flash case |
ca714238 | 3268 | m_afTrigger(shot_ext, 0); |
9900d0c4 | 3269 | } |
4a9565ae | 3270 | } else { |
8e2c2fdb | 3271 | shot_ext->shot.ctl.aa.afTrigger = 0; |
0f26b20f | 3272 | } |
5506cebf SK |
3273 | |
3274 | if (m_wideAspect) { | |
3275 | shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO; | |
5506cebf SK |
3276 | } else { |
3277 | shot_ext->setfile = ISS_SUB_SCENARIO_STILL; | |
0f26b20f SK |
3278 | } |
3279 | if (triggered) | |
3280 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3281 | ||
3282 | // TODO : check collision with AFMode Update | |
3283 | if (m_IsAfLockRequired) { | |
3284 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF; | |
3285 | m_IsAfLockRequired = false; | |
3286 | } | |
4ed2f103 | 3287 | ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)", |
ad37861e SK |
3288 | index, |
3289 | shot_ext->shot.ctl.request.frameCount, | |
3290 | shot_ext->request_scp, | |
3291 | shot_ext->request_scc, | |
3292 | shot_ext->dis_bypass, sizeof(camera2_shot)); | |
4a9565ae YJ |
3293 | |
3294 | // update AF region | |
3295 | m_updateAfRegion(shot_ext); | |
3296 | ||
572470e2 | 3297 | m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode; |
6bf36b60 SK |
3298 | if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT |
3299 | && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED) | |
3300 | shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON; | |
a15b4e3f | 3301 | if (m_nightCaptureCnt == 0) { |
5506cebf | 3302 | if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE |
a15b4e3f SK |
3303 | && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) { |
3304 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d | 3305 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
a15b4e3f SK |
3306 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; |
3307 | m_nightCaptureCnt = 4; | |
3308 | m_nightCaptureFrameCnt = matchedFrameCnt; | |
3309 | shot_ext->request_scc = 0; | |
3310 | } | |
3311 | } | |
3312 | else if (m_nightCaptureCnt == 1) { | |
3313 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d SK |
3314 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30; |
3315 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
a15b4e3f | 3316 | m_nightCaptureCnt--; |
2f4d175d | 3317 | m_nightCaptureFrameCnt = 0; |
a15b4e3f SK |
3318 | shot_ext->request_scc = 1; |
3319 | } | |
6bf36b60 SK |
3320 | else if (m_nightCaptureCnt == 2) { |
3321 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d SK |
3322 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
3323 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
6bf36b60 SK |
3324 | m_nightCaptureCnt--; |
3325 | shot_ext->request_scc = 0; | |
3326 | } | |
1c5e692d | 3327 | else if (m_nightCaptureCnt == 3) { |
a15b4e3f | 3328 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; |
1c5e692d SK |
3329 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
3330 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
3331 | m_nightCaptureCnt--; | |
3332 | shot_ext->request_scc = 0; | |
3333 | } | |
3334 | else if (m_nightCaptureCnt == 4) { | |
3335 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
3336 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; | |
3337 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
a15b4e3f SK |
3338 | m_nightCaptureCnt--; |
3339 | shot_ext->request_scc = 0; | |
3340 | } | |
6f19b6cf | 3341 | |
3c17a3f7 SK |
3342 | switch (shot_ext->shot.ctl.aa.aeTargetFpsRange[1]) { |
3343 | case 15: | |
3344 | shot_ext->shot.ctl.sensor.frameDuration = (66666 * 1000); | |
3345 | break; | |
3346 | ||
3347 | case 24: | |
3348 | shot_ext->shot.ctl.sensor.frameDuration = (41666 * 1000); | |
3349 | break; | |
3350 | ||
3351 | case 25: | |
3352 | shot_ext->shot.ctl.sensor.frameDuration = (40000 * 1000); | |
3353 | break; | |
3354 | ||
3355 | case 30: | |
3356 | default: | |
3357 | shot_ext->shot.ctl.sensor.frameDuration = (33333 * 1000); | |
3358 | break; | |
3359 | } | |
3360 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
3361 | ||
6f19b6cf YJ |
3362 | // Flash mode |
3363 | // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence | |
e117f756 YJ |
3364 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) |
3365 | && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) | |
6f19b6cf | 3366 | && (m_cameraId == 0)) { |
e117f756 YJ |
3367 | if (!m_ctlInfo.flash.m_flashDecisionResult) { |
3368 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
3369 | m_ctlInfo.flash.m_afFlashDoneFlg = false; | |
3370 | m_ctlInfo.flash.m_flashCnt = 0; | |
cdd53a9f YJ |
3371 | } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) || |
3372 | (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) { | |
40acdcc8 | 3373 | ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__); |
6f19b6cf | 3374 | shot_ext->request_scc = 0; |
9a710a45 YJ |
3375 | m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt; |
3376 | m_ctlInfo.flash.m_flashEnableFlg = true; | |
e117f756 YJ |
3377 | m_ctlInfo.flash.m_afFlashDoneFlg = false; |
3378 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE; | |
73f5ad60 | 3379 | } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) { |
40acdcc8 YJ |
3380 | ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
3381 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; | |
3382 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
40acdcc8 | 3383 | m_ctlInfo.flash.m_afFlashDoneFlg= false; |
cdd53a9f | 3384 | m_ctlInfo.flash.m_flashCnt = 0; |
caea49e6 | 3385 | } |
4a9565ae YJ |
3386 | } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) { |
3387 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
caea49e6 YJ |
3388 | } |
3389 | ||
34d2b94a SK |
3390 | if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) { |
3391 | if (m_ctlInfo.flash.m_flashTorchMode == false) { | |
3392 | m_ctlInfo.flash.m_flashTorchMode = true; | |
3393 | } | |
caea49e6 | 3394 | } else { |
34d2b94a | 3395 | if (m_ctlInfo.flash.m_flashTorchMode == true) { |
caea49e6 YJ |
3396 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF; |
3397 | shot_ext->shot.ctl.flash.firingPower = 0; | |
3398 | m_ctlInfo.flash.m_flashTorchMode = false; | |
3399 | } else { | |
3400 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP; | |
6f19b6cf YJ |
3401 | } |
3402 | } | |
3403 | ||
5506cebf | 3404 | if (shot_ext->isReprocessing) { |
69d1e6e9 | 3405 | ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__); |
5506cebf SK |
3406 | m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0]; |
3407 | shot_ext->request_scp = 0; | |
3408 | shot_ext->request_scc = 0; | |
3409 | m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount; | |
69d1e6e9 SK |
3410 | m_ctlInfo.flash.m_flashDecisionResult = false; |
3411 | memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt)), | |
3412 | sizeof(struct camera2_shot_ext)); | |
5506cebf | 3413 | m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START); |
cdd53a9f YJ |
3414 | m_ctlInfo.flash.m_flashEnableFlg = false; |
3415 | } | |
3416 | ||
3417 | if (m_ctlInfo.flash.m_flashEnableFlg) { | |
3418 | m_preCaptureListenerSensor(shot_ext); | |
3419 | m_preCaptureSetter(shot_ext); | |
5506cebf | 3420 | } |
fdbaf5d2 | 3421 | |
8e2c2fdb SK |
3422 | ALOGV("(%s): queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__, |
3423 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
3424 | (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode), | |
3425 | (int)(shot_ext->shot.ctl.aa.afTrigger)); | |
7d0efb59 | 3426 | |
5c88d1f2 C |
3427 | if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) { |
3428 | shot_ext->dis_bypass = 1; | |
9c046e3a | 3429 | shot_ext->dnr_bypass = 1; |
5c88d1f2 C |
3430 | shot_ext->request_scp = 0; |
3431 | shot_ext->request_scc = 0; | |
3432 | m_vdisBubbleCnt--; | |
3433 | matchedFrameCnt = -1; | |
3434 | } else { | |
3435 | m_vdisDupFrame = matchedFrameCnt; | |
3436 | } | |
f9a06609 SK |
3437 | if (m_scpForceSuspended) |
3438 | shot_ext->request_scc = 0; | |
5c88d1f2 | 3439 | |
5506cebf | 3440 | uint32_t current_scp = shot_ext->request_scp; |
a85ec381 | 3441 | uint32_t current_scc = shot_ext->request_scc; |
7d0efb59 | 3442 | |
c0b6e17e | 3443 | if (shot_ext->shot.dm.request.frameCount == 0) { |
4aa4d739 | 3444 | CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount); |
c0b6e17e C |
3445 | } |
3446 | ||
ad37861e | 3447 | cam_int_qbuf(&(m_camera_info.isp), index); |
13d8c7b4 | 3448 | |
ad37861e SK |
3449 | ALOGV("### isp DQBUF start"); |
3450 | index_isp = cam_int_dqbuf(&(m_camera_info.isp)); | |
ef6f83ca | 3451 | |
ad37861e | 3452 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]); |
13d8c7b4 | 3453 | |
e117f756 YJ |
3454 | if (m_ctlInfo.flash.m_flashEnableFlg) |
3455 | m_preCaptureListenerISP(shot_ext); | |
9a710a45 | 3456 | |
7ef20f42 | 3457 | ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)", |
ad37861e SK |
3458 | index, |
3459 | shot_ext->shot.ctl.request.frameCount, | |
3460 | shot_ext->request_scp, | |
3461 | shot_ext->request_scc, | |
7ef20f42 HC |
3462 | shot_ext->dis_bypass, |
3463 | shot_ext->dnr_bypass, sizeof(camera2_shot)); | |
fdbaf5d2 | 3464 | |
ef6f83ca | 3465 | ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__, |
8e2c2fdb SK |
3466 | (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode), |
3467 | (int)(shot_ext->shot.dm.aa.awbMode), | |
3468 | (int)(shot_ext->shot.dm.aa.afMode)); | |
13d8c7b4 | 3469 | |
2f4d175d | 3470 | #ifndef ENABLE_FRAME_SYNC |
5506cebf | 3471 | m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0]; |
2f4d175d | 3472 | #endif |
2adfa429 | 3473 | |
fd2d78a2 SK |
3474 | if (!shot_ext->fd_bypass) { |
3475 | /* FD orientation axis transformation */ | |
3476 | for (int i=0; i < CAMERA2_MAX_FACES; i++) { | |
3477 | if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0) | |
3478 | shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW | |
ef6f83ca | 3479 | * shot_ext->shot.dm.stats.faceRectangles[i][0]) |
5506cebf | 3480 | / m_streamThreads[0].get()->m_parameters.width; |
fd2d78a2 SK |
3481 | if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0) |
3482 | shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH | |
ef6f83ca | 3483 | * shot_ext->shot.dm.stats.faceRectangles[i][1]) |
5506cebf | 3484 | / m_streamThreads[0].get()->m_parameters.height; |
fd2d78a2 SK |
3485 | if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0) |
3486 | shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW | |
ef6f83ca | 3487 | * shot_ext->shot.dm.stats.faceRectangles[i][2]) |
5506cebf | 3488 | / m_streamThreads[0].get()->m_parameters.width; |
fd2d78a2 SK |
3489 | if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0) |
3490 | shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH | |
ef6f83ca | 3491 | * shot_ext->shot.dm.stats.faceRectangles[i][3]) |
5506cebf | 3492 | / m_streamThreads[0].get()->m_parameters.height; |
fd2d78a2 SK |
3493 | } |
3494 | } | |
cdd53a9f YJ |
3495 | // aeState control |
3496 | if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT) | |
3497 | m_preCaptureAeState(shot_ext); | |
47d3a1ea | 3498 | |
275c9744 | 3499 | // At scene mode face priority |
40acdcc8 YJ |
3500 | if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE) |
3501 | shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE; | |
275c9744 | 3502 | |
48728d49 | 3503 | if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) { |
a15b4e3f SK |
3504 | m_requestManager->ApplyDynamicMetadata(shot_ext); |
3505 | } | |
69d1e6e9 SK |
3506 | |
3507 | if (current_scc != shot_ext->request_scc) { | |
3508 | ALOGD("(%s): scc frame drop1 request_scc(%d to %d)", | |
3509 | __FUNCTION__, current_scc, shot_ext->request_scc); | |
3510 | m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount); | |
3511 | } | |
3512 | if (shot_ext->request_scc) { | |
3513 | ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)"); | |
3514 | if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) { | |
3515 | if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE) | |
3516 | memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(shot_ext->shot.ctl.request.frameCount)), | |
3517 | sizeof(struct camera2_shot_ext)); | |
3518 | else | |
3519 | memcpy(&m_jpegMetadata, (void*)shot_ext, sizeof(struct camera2_shot_ext)); | |
3520 | } | |
3521 | m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING); | |
3522 | } | |
3523 | if (current_scp != shot_ext->request_scp) { | |
3524 | ALOGD("(%s): scp frame drop1 request_scp(%d to %d)", | |
3525 | __FUNCTION__, current_scp, shot_ext->request_scp); | |
3526 | m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount); | |
3527 | } | |
3528 | if (shot_ext->request_scp) { | |
3529 | ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)"); | |
3530 | m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING); | |
3531 | } | |
3532 | ||
3533 | ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__, | |
3534 | shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp); | |
3535 | if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) { | |
3536 | ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__); | |
3537 | m_scp_closed = true; | |
3538 | } | |
3539 | else | |
3540 | m_scp_closed = false; | |
3541 | ||
0f26b20f | 3542 | OnAfNotification(shot_ext->shot.dm.aa.afState); |
10e122bd SK |
3543 | OnPrecaptureMeteringNotificationISP(); |
3544 | } else { | |
8b5b8078 | 3545 | memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl)); |
10e122bd SK |
3546 | shot_ext->shot.ctl.request.frameCount = 0xfffffffe; |
3547 | shot_ext->request_sensor = 1; | |
3548 | shot_ext->dis_bypass = 1; | |
3549 | shot_ext->dnr_bypass = 1; | |
3550 | shot_ext->fd_bypass = 1; | |
3551 | shot_ext->drc_bypass = 1; | |
3552 | shot_ext->request_scc = 0; | |
3553 | shot_ext->request_scp = 0; | |
3554 | if (m_wideAspect) { | |
3555 | shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO; | |
10e122bd SK |
3556 | } else { |
3557 | shot_ext->setfile = ISS_SUB_SCENARIO_STILL; | |
3558 | } | |
572470e2 SK |
3559 | shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode; |
3560 | if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) { | |
3561 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8; | |
3562 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
3563 | } | |
10e122bd | 3564 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
cdd53a9f | 3565 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF; |
10e122bd | 3566 | ALOGV("### isp QBUF start (bubble)"); |
8b5b8078 HC |
3567 | ALOGV("bubble: queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", |
3568 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
3569 | (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode), | |
3570 | (int)(shot_ext->shot.ctl.aa.afTrigger)); | |
3571 | ||
10e122bd SK |
3572 | cam_int_qbuf(&(m_camera_info.isp), index); |
3573 | ALOGV("### isp DQBUF start (bubble)"); | |
3574 | index_isp = cam_int_dqbuf(&(m_camera_info.isp)); | |
3575 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]); | |
8b5b8078 HC |
3576 | ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)", |
3577 | (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode), | |
3578 | (int)(shot_ext->shot.dm.aa.awbMode), | |
3579 | (int)(shot_ext->shot.dm.aa.afMode)); | |
3580 | ||
10e122bd | 3581 | OnAfNotification(shot_ext->shot.dm.aa.afState); |
ad37861e | 3582 | } |
13d8c7b4 | 3583 | |
52f54308 SK |
3584 | index = m_requestManager->popSensorQ(); |
3585 | if(index < 0){ | |
3586 | ALOGE("sensorQ is empty"); | |
3587 | return; | |
3588 | } | |
3589 | ||
ca714238 | 3590 | processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index])); |
ad37861e SK |
3591 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]); |
3592 | if (m_scp_closing || m_scp_closed) { | |
3593 | ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed); | |
3594 | shot_ext->request_scc = 0; | |
3595 | shot_ext->request_scp = 0; | |
3596 | shot_ext->request_sensor = 0; | |
3597 | } | |
ad37861e | 3598 | cam_int_qbuf(&(m_camera_info.sensor), index); |
52f54308 | 3599 | ALOGV("Sensor Qbuf done(%d)", index); |
c15a6b00 | 3600 | |
ef6f83ca SK |
3601 | if (!m_scp_closing |
3602 | && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){ | |
da7ca692 | 3603 | ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)", |
ef6f83ca | 3604 | matchedFrameCnt, processingReqIndex); |
ad37861e SK |
3605 | selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); |
3606 | } | |
c15a6b00 | 3607 | } |
ad37861e SK |
3608 | return; |
3609 | } | |
9dd63e1f | 3610 | |
86646da4 SK |
3611 | void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self) |
3612 | { | |
3613 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3614 | StreamThread * selfThread = ((StreamThread*)self); | |
3615 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
5506cebf SK |
3616 | node_info_t *currentNode = selfStreamParms->node; |
3617 | substream_parameters_t *subParms; | |
86646da4 SK |
3618 | buffer_handle_t * buf = NULL; |
3619 | status_t res; | |
3620 | void *virtAddr[3]; | |
3621 | int i, j; | |
3622 | int index; | |
3623 | nsecs_t timestamp; | |
3624 | ||
3625 | if (!(selfThread->m_isBufferInit)) | |
3626 | { | |
3627 | for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) { | |
3628 | res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf); | |
3629 | if (res != NO_ERROR || buf == NULL) { | |
3630 | ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res); | |
3631 | return; | |
3632 | } | |
3633 | ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
3634 | ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
3635 | ||
5506cebf | 3636 | index = selfThread->findBufferIndex(buf); |
86646da4 SK |
3637 | if (index == -1) { |
3638 | ALOGE("ERR(%s): could not find buffer index", __FUNCTION__); | |
3639 | } | |
3640 | else { | |
3641 | ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)", | |
3642 | __FUNCTION__, index, selfStreamParms->svcBufStatus[index]); | |
3643 | if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC) | |
3644 | selfStreamParms->svcBufStatus[index] = ON_DRIVER; | |
3645 | else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE) | |
3646 | selfStreamParms->svcBufStatus[index] = ON_HAL; | |
3647 | else { | |
3648 | ALOGV("DBG(%s): buffer status abnormal (%d) " | |
3649 | , __FUNCTION__, selfStreamParms->svcBufStatus[index]); | |
3650 | } | |
3651 | selfStreamParms->numSvcBufsInHal++; | |
86646da4 | 3652 | } |
5506cebf | 3653 | selfStreamParms->bufIndex = 0; |
86646da4 SK |
3654 | } |
3655 | selfThread->m_isBufferInit = true; | |
3656 | } | |
5506cebf SK |
3657 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3658 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3659 | continue; | |
86646da4 | 3660 | |
5506cebf SK |
3661 | subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId]; |
3662 | if (subParms->type && subParms->needBufferInit) { | |
3663 | ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)", | |
3664 | __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers); | |
74d78ebe SK |
3665 | int checkingIndex = 0; |
3666 | bool found = false; | |
5506cebf SK |
3667 | for ( i = 0 ; i < subParms->numSvcBuffers; i++) { |
3668 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
74d78ebe SK |
3669 | if (res != NO_ERROR || buf == NULL) { |
3670 | ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res); | |
3671 | return; | |
3672 | } | |
5506cebf SK |
3673 | subParms->numSvcBufsInHal++; |
3674 | ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
3675 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
74d78ebe SK |
3676 | |
3677 | if (m_grallocHal->lock(m_grallocHal, *buf, | |
5506cebf SK |
3678 | subParms->usage, 0, 0, |
3679 | subParms->width, subParms->height, virtAddr) != 0) { | |
74d78ebe SK |
3680 | ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__); |
3681 | } | |
3682 | else { | |
5506cebf | 3683 | ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)", |
74d78ebe SK |
3684 | __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]); |
3685 | } | |
3686 | found = false; | |
5506cebf SK |
3687 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { |
3688 | if (subParms->svcBufHandle[checkingIndex] == *buf ) { | |
74d78ebe SK |
3689 | found = true; |
3690 | break; | |
3691 | } | |
3692 | } | |
5506cebf | 3693 | ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex); |
74d78ebe SK |
3694 | if (!found) break; |
3695 | ||
3696 | index = checkingIndex; | |
3697 | ||
3698 | if (index == -1) { | |
3699 | ALOGV("ERR(%s): could not find buffer index", __FUNCTION__); | |
3700 | } | |
3701 | else { | |
3702 | ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)", | |
5506cebf SK |
3703 | __FUNCTION__, index, subParms->svcBufStatus[index]); |
3704 | if (subParms->svcBufStatus[index]== ON_SERVICE) | |
3705 | subParms->svcBufStatus[index] = ON_HAL; | |
74d78ebe SK |
3706 | else { |
3707 | ALOGV("DBG(%s): buffer status abnormal (%d) " | |
5506cebf | 3708 | , __FUNCTION__, subParms->svcBufStatus[index]); |
74d78ebe | 3709 | } |
5506cebf | 3710 | if (*buf != subParms->svcBufHandle[index]) |
74d78ebe SK |
3711 | ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__); |
3712 | else | |
3713 | ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__); | |
3714 | } | |
5506cebf | 3715 | subParms->svcBufIndex = 0; |
74d78ebe | 3716 | } |
5506cebf SK |
3717 | if (subParms->type == SUBSTREAM_TYPE_JPEG) { |
3718 | m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2; | |
3719 | m_resizeBuf.size.extS[1] = 0; | |
3720 | m_resizeBuf.size.extS[2] = 0; | |
3721 | ||
3722 | if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) { | |
3723 | ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__); | |
3724 | } | |
3725 | } | |
3726 | if (subParms->type == SUBSTREAM_TYPE_PRVCB) { | |
3727 | m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width, | |
3728 | subParms->height, &m_previewCbBuf); | |
86646da4 | 3729 | |
5506cebf SK |
3730 | if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) { |
3731 | ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__); | |
3732 | } | |
3733 | } | |
3734 | subParms->needBufferInit= false; | |
3735 | } | |
3736 | } | |
86646da4 SK |
3737 | } |
3738 | ||
c15a6b00 JS |
3739 | void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self) |
3740 | { | |
13d8c7b4 SK |
3741 | StreamThread * selfThread = ((StreamThread*)self); |
3742 | ALOGV("DEBUG(%s): ", __FUNCTION__ ); | |
3743 | memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t)); | |
3744 | selfThread->m_isBufferInit = false; | |
5506cebf SK |
3745 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3746 | selfThread->m_attachedSubStreams[i].streamId = -1; | |
3747 | selfThread->m_attachedSubStreams[i].priority = 0; | |
3748 | } | |
c15a6b00 JS |
3749 | return; |
3750 | } | |
3751 | ||
5506cebf SK |
3752 | int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf, |
3753 | int stream_id, nsecs_t frameTimeStamp) | |
c15a6b00 | 3754 | { |
5506cebf SK |
3755 | substream_parameters_t *subParms = &m_subStreams[stream_id]; |
3756 | ||
3757 | switch (stream_id) { | |
c15a6b00 | 3758 | |
5506cebf SK |
3759 | case STREAM_ID_JPEG: |
3760 | return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp); | |
c15a6b00 | 3761 | |
5506cebf SK |
3762 | case STREAM_ID_RECORD: |
3763 | return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp); | |
c15a6b00 | 3764 | |
5506cebf SK |
3765 | case STREAM_ID_PRVCB: |
3766 | return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp); | |
3767 | ||
3768 | default: | |
3769 | return 0; | |
c15a6b00 | 3770 | } |
5506cebf SK |
3771 | } |
3772 | void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self) | |
3773 | { | |
3774 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3775 | StreamThread * selfThread = ((StreamThread*)self); | |
3776 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
3777 | node_info_t *currentNode = selfStreamParms->node; | |
3778 | int i = 0; | |
3779 | nsecs_t frameTimeStamp; | |
13d8c7b4 | 3780 | |
b55ed664 | 3781 | if (currentSignal & SIGNAL_THREAD_RELEASE) { |
5506cebf | 3782 | CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); |
b55ed664 SK |
3783 | |
3784 | if (selfThread->m_isBufferInit) { | |
a8be0011 SK |
3785 | if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) { |
3786 | ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__, | |
5506cebf | 3787 | selfThread->m_index, currentNode->fd); |
a8be0011 SK |
3788 | if (cam_int_streamoff(currentNode) < 0 ) { |
3789 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
3790 | } | |
3791 | ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__, | |
3792 | selfThread->m_index, currentNode->fd); | |
3793 | currentNode->buffers = 0; | |
3794 | cam_int_reqbufs(currentNode); | |
3795 | ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__, | |
3796 | selfThread->m_index, currentNode->fd); | |
3797 | } | |
b55ed664 SK |
3798 | } |
3799 | #ifdef ENABLE_FRAME_SYNC | |
3800 | // free metabuffers | |
5506cebf SK |
3801 | for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++) |
3802 | if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) { | |
b55ed664 SK |
3803 | freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1); |
3804 | selfStreamParms->metaBuffers[i].fd.extFd[0] = 0; | |
3805 | selfStreamParms->metaBuffers[i].size.extS[0] = 0; | |
3806 | } | |
3807 | #endif | |
3808 | selfThread->m_isBufferInit = false; | |
b55ed664 | 3809 | selfThread->m_releasing = false; |
5506cebf SK |
3810 | selfThread->m_activated = false; |
3811 | ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
3812 | return; | |
3813 | } | |
3814 | if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) { | |
3815 | status_t res; | |
3816 | buffer_handle_t * buf = NULL; | |
3817 | bool found = false; | |
3818 | ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START", | |
3819 | __FUNCTION__, selfThread->m_index); | |
3820 | res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf); | |
3821 | if (res != NO_ERROR || buf == NULL) { | |
3822 | ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res); | |
3823 | return; | |
3824 | } | |
3825 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
3826 | int checkingIndex = 0; | |
3827 | for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) { | |
3828 | if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
3829 | found = true; | |
3830 | break; | |
3831 | } | |
3832 | } | |
3833 | ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ", | |
3834 | __FUNCTION__, (unsigned int)buf, found, checkingIndex); | |
b55ed664 | 3835 | |
5506cebf SK |
3836 | if (!found) return; |
3837 | ||
3838 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { | |
3839 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3840 | continue; | |
3841 | ||
3842 | #ifdef ENABLE_FRAME_SYNC | |
a8be0011 | 3843 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt); |
5506cebf SK |
3844 | m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt); |
3845 | #else | |
3846 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()); | |
3847 | #endif | |
3848 | if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) | |
3849 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]), | |
3850 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
3851 | } | |
3852 | ||
3853 | res = m_reprocessOps->release_buffer(m_reprocessOps, buf); | |
3854 | if (res != NO_ERROR) { | |
3855 | ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res); | |
3856 | return; | |
3857 | } | |
3858 | ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_REPROCESSING_START", | |
3859 | __FUNCTION__,selfThread->m_index); | |
b55ed664 SK |
3860 | |
3861 | return; | |
3862 | } | |
13d8c7b4 | 3863 | if (currentSignal & SIGNAL_STREAM_DATA_COMING) { |
c15a6b00 | 3864 | buffer_handle_t * buf = NULL; |
5506cebf | 3865 | status_t res = 0; |
c15a6b00 JS |
3866 | int i, j; |
3867 | int index; | |
ad37861e | 3868 | nsecs_t timestamp; |
5506cebf | 3869 | #ifdef ENABLE_FRAME_SYNC |
feb7df4c | 3870 | camera2_stream *frame; |
2f4d175d | 3871 | uint8_t currentOutputStreams; |
a85ec381 | 3872 | bool directOutputEnabled = false; |
5506cebf | 3873 | #endif |
c0b6e17e | 3874 | int numOfUndqbuf = 0; |
c0b6e17e | 3875 | |
5506cebf | 3876 | ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index); |
ad37861e | 3877 | |
86646da4 | 3878 | m_streamBufferInit(self); |
c15a6b00 | 3879 | |
b5237e6b | 3880 | do { |
5506cebf SK |
3881 | ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__, |
3882 | selfThread->m_index, selfThread->streamType); | |
b5237e6b | 3883 | |
feb7df4c | 3884 | #ifdef ENABLE_FRAME_SYNC |
5506cebf SK |
3885 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes); |
3886 | frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]); | |
5506cebf | 3887 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount); |
2f4d175d SK |
3888 | currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount); |
3889 | ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams); | |
a85ec381 SK |
3890 | if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)|| |
3891 | ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) { | |
3892 | directOutputEnabled = true; | |
3893 | } | |
3894 | if (!directOutputEnabled) { | |
3895 | if (!m_nightCaptureFrameCnt) | |
3896 | m_requestManager->NotifyStreamOutput(frame->rcount); | |
3897 | } | |
feb7df4c | 3898 | #else |
5506cebf SK |
3899 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode); |
3900 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()) | |
feb7df4c | 3901 | #endif |
5506cebf SK |
3902 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d) sigcnt(%d)",__FUNCTION__, |
3903 | selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt); | |
804236a7 | 3904 | |
5506cebf | 3905 | if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] != ON_DRIVER) |
86646da4 | 3906 | ALOGV("DBG(%s): DQed buffer status abnormal (%d) ", |
5506cebf SK |
3907 | __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]); |
3908 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL; | |
b5237e6b | 3909 | |
5506cebf SK |
3910 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3911 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3912 | continue; | |
5506cebf | 3913 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 3914 | if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { |
5506cebf SK |
3915 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]), |
3916 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
b5237e6b | 3917 | } |
2f4d175d SK |
3918 | #else |
3919 | if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { | |
3920 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]), | |
3921 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
3922 | } | |
3923 | #endif | |
86646da4 | 3924 | } |
c0b6e17e | 3925 | |
5506cebf | 3926 | if (m_requestManager->GetSkipCnt() <= 0) { |
5506cebf | 3927 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 3928 | if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) { |
5506cebf | 3929 | ALOGV("** Display Preview(frameCnt:%d)", frame->rcount); |
2f4d175d SK |
3930 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3931 | frameTimeStamp, | |
3932 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3933 | } | |
3934 | else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) { | |
c48f0170 | 3935 | ALOGV("** SCC output (frameCnt:%d)", frame->rcount); |
2f4d175d SK |
3936 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3937 | frameTimeStamp, | |
3938 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3939 | } | |
a85ec381 SK |
3940 | else { |
3941 | res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps, | |
3942 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3943 | ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); | |
3944 | } | |
5506cebf | 3945 | #else |
2f4d175d | 3946 | if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) { |
5506cebf | 3947 | ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex()); |
5506cebf SK |
3948 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3949 | frameTimeStamp, | |
3950 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
74d78ebe | 3951 | } |
5506cebf | 3952 | else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) { |
5506cebf | 3953 | ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex()); |
5506cebf SK |
3954 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3955 | frameTimeStamp, | |
3956 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
74d78ebe | 3957 | } |
2f4d175d | 3958 | #endif |
5506cebf | 3959 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); |
86646da4 SK |
3960 | } |
3961 | else { | |
3962 | res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps, | |
5506cebf SK |
3963 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); |
3964 | ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); | |
86646da4 | 3965 | } |
2f4d175d | 3966 | #ifdef ENABLE_FRAME_SYNC |
a85ec381 SK |
3967 | if (directOutputEnabled) { |
3968 | if (!m_nightCaptureFrameCnt) | |
3969 | m_requestManager->NotifyStreamOutput(frame->rcount); | |
3970 | } | |
2f4d175d | 3971 | #endif |
86646da4 | 3972 | if (res == 0) { |
5506cebf | 3973 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE; |
86646da4 SK |
3974 | selfStreamParms->numSvcBufsInHal--; |
3975 | } | |
3976 | else { | |
5506cebf | 3977 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL; |
b5237e6b | 3978 | } |
86646da4 | 3979 | |
c15a6b00 | 3980 | } |
ce8e830a | 3981 | while(0); |
b5237e6b | 3982 | |
1422aff9 | 3983 | while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS)) |
a85ec381 | 3984 | < selfStreamParms->minUndequedBuffer) { |
86646da4 SK |
3985 | res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf); |
3986 | if (res != NO_ERROR || buf == NULL) { | |
a85ec381 | 3987 | ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index, res, selfStreamParms->numSvcBufsInHal); |
86646da4 SK |
3988 | break; |
3989 | } | |
3990 | selfStreamParms->numSvcBufsInHal++; | |
5506cebf | 3991 | ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, |
86646da4 SK |
3992 | selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal, |
3993 | ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
3994 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
3995 | ||
3996 | bool found = false; | |
3997 | int checkingIndex = 0; | |
3998 | for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) { | |
3999 | if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4000 | found = true; | |
9dd63e1f SK |
4001 | break; |
4002 | } | |
86646da4 | 4003 | } |
86646da4 | 4004 | if (!found) break; |
5506cebf SK |
4005 | selfStreamParms->bufIndex = checkingIndex; |
4006 | if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) { | |
86646da4 | 4007 | uint32_t plane_index = 0; |
5506cebf | 4008 | ExynosBuffer* currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]); |
86646da4 SK |
4009 | struct v4l2_buffer v4l2_buf; |
4010 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
13d8c7b4 | 4011 | |
86646da4 SK |
4012 | v4l2_buf.m.planes = planes; |
4013 | v4l2_buf.type = currentNode->type; | |
4014 | v4l2_buf.memory = currentNode->memory; | |
5506cebf | 4015 | v4l2_buf.index = selfStreamParms->bufIndex; |
86646da4 SK |
4016 | v4l2_buf.length = currentNode->planes; |
4017 | ||
5506cebf SK |
4018 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; |
4019 | v4l2_buf.m.planes[2].m.fd = priv_handle->fd1; | |
4020 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd2; | |
4021 | for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) { | |
4022 | v4l2_buf.m.planes[plane_index].length = currentBuf->size.extS[plane_index]; | |
4023 | } | |
4024 | #ifdef ENABLE_FRAME_SYNC | |
4025 | /* add plane for metadata*/ | |
4026 | v4l2_buf.length += selfStreamParms->metaPlanes; | |
4027 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0]; | |
4028 | v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0]; | |
4029 | #endif | |
4030 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { | |
4031 | ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail", | |
4032 | __FUNCTION__, selfThread->m_index); | |
4033 | return; | |
4034 | } | |
4035 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER; | |
4036 | ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)", | |
4037 | __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex); | |
4038 | } | |
4039 | } | |
4040 | ||
4041 | ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index); | |
4042 | } | |
4043 | return; | |
4044 | } | |
4045 | ||
4046 | void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self) | |
4047 | { | |
4048 | uint32_t currentSignal = self->GetProcessingSignal(); | |
4049 | StreamThread * selfThread = ((StreamThread*)self); | |
4050 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4051 | node_info_t *currentNode = selfStreamParms->node; | |
4052 | ||
4053 | ||
4054 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
4055 | CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
4056 | ||
4057 | if (selfThread->m_isBufferInit) { | |
4058 | if (currentNode->fd == m_camera_info.capture.fd) { | |
4059 | if (m_camera_info.capture.status == true) { | |
4060 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__, | |
4061 | selfThread->m_index, currentNode->fd); | |
4062 | if (cam_int_streamoff(currentNode) < 0 ){ | |
4063 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
4064 | } else { | |
4065 | m_camera_info.capture.status = false; | |
4066 | } | |
4067 | } | |
4068 | } else { | |
4069 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__, | |
4070 | selfThread->m_index, currentNode->fd); | |
4071 | if (cam_int_streamoff(currentNode) < 0 ){ | |
4072 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
4073 | } | |
4074 | } | |
4075 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index); | |
4076 | ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__, | |
4077 | selfThread->m_index, currentNode->fd); | |
4078 | currentNode->buffers = 0; | |
4079 | cam_int_reqbufs(currentNode); | |
4080 | ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__, | |
4081 | selfThread->m_index, currentNode->fd); | |
4082 | } | |
4083 | ||
4084 | selfThread->m_isBufferInit = false; | |
4085 | selfThread->m_releasing = false; | |
4086 | selfThread->m_activated = false; | |
4087 | ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
4088 | return; | |
4089 | } | |
4090 | ||
4091 | if (currentSignal & SIGNAL_STREAM_DATA_COMING) { | |
4092 | #ifdef ENABLE_FRAME_SYNC | |
4093 | camera2_stream *frame; | |
2f4d175d | 4094 | uint8_t currentOutputStreams; |
5506cebf SK |
4095 | #endif |
4096 | nsecs_t frameTimeStamp; | |
4097 | ||
4098 | ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING", | |
4099 | __FUNCTION__,selfThread->m_index); | |
4100 | ||
4101 | m_streamBufferInit(self); | |
4102 | ||
ed4ad5fe | 4103 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index); |
5506cebf | 4104 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode); |
ed4ad5fe | 4105 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__, |
5506cebf SK |
4106 | selfThread->m_index, selfStreamParms->bufIndex); |
4107 | ||
4108 | #ifdef ENABLE_FRAME_SYNC | |
4109 | frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]); | |
5506cebf | 4110 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount); |
2f4d175d SK |
4111 | currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount); |
4112 | ALOGV("frame count(SCC) : %d outputStream(%x)", frame->rcount, currentOutputStreams); | |
5506cebf SK |
4113 | #else |
4114 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()); | |
4115 | #endif | |
4116 | ||
4117 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { | |
4118 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
4119 | continue; | |
feb7df4c | 4120 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 4121 | if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { |
5506cebf | 4122 | m_requestManager->NotifyStreamOutput(frame->rcount); |
5506cebf SK |
4123 | m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]), |
4124 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
be494d19 | 4125 | } |
2f4d175d SK |
4126 | #else |
4127 | if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { | |
4128 | m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]), | |
4129 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
4130 | } | |
4131 | #endif | |
be494d19 | 4132 | } |
5506cebf SK |
4133 | cam_int_qbuf(currentNode, selfStreamParms->bufIndex); |
4134 | ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index); | |
86646da4 | 4135 | |
5506cebf SK |
4136 | |
4137 | ||
4138 | ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE", | |
4139 | __FUNCTION__, selfThread->m_index); | |
86646da4 | 4140 | } |
5506cebf SK |
4141 | |
4142 | ||
86646da4 SK |
4143 | return; |
4144 | } | |
4145 | ||
5506cebf | 4146 | void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self) |
86646da4 SK |
4147 | { |
4148 | uint32_t currentSignal = self->GetProcessingSignal(); | |
4149 | StreamThread * selfThread = ((StreamThread*)self); | |
4150 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
5506cebf SK |
4151 | node_info_t *currentNode = selfStreamParms->node; |
4152 | ||
4153 | ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal); | |
86646da4 | 4154 | |
5506cebf SK |
4155 | // Do something in Child thread handler |
4156 | // Should change function to class that inherited StreamThread class to support dynamic stream allocation | |
4157 | if (selfThread->streamType == STREAM_TYPE_DIRECT) { | |
4158 | m_streamFunc_direct(self); | |
4159 | } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) { | |
4160 | m_streamFunc_indirect(self); | |
4161 | } | |
86646da4 | 4162 | |
5506cebf SK |
4163 | return; |
4164 | } | |
4165 | int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) | |
4166 | { | |
4167 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4168 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_JPEG]; | |
4169 | status_t res; | |
4170 | ExynosRect jpegRect; | |
4171 | bool found = false; | |
de48e362 | 4172 | int srcW, srcH, srcCropX, srcCropY; |
5506cebf SK |
4173 | int pictureW, pictureH, pictureFramesize = 0; |
4174 | int pictureFormat; | |
4175 | int cropX, cropY, cropW, cropH = 0; | |
4176 | ExynosBuffer resizeBufInfo; | |
4177 | ExynosRect m_jpegPictureRect; | |
4178 | buffer_handle_t * buf = NULL; | |
c06b3290 SK |
4179 | camera2_jpeg_blob * jpegBlob = NULL; |
4180 | int jpegBufSize = 0; | |
86646da4 | 4181 | |
5506cebf SK |
4182 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4183 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4184 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4185 | found = true; | |
4186 | break; | |
86646da4 | 4187 | } |
5506cebf SK |
4188 | subParms->svcBufIndex++; |
4189 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4190 | subParms->svcBufIndex = 0; | |
4191 | } | |
4192 | if (!found) { | |
4193 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4194 | subParms->svcBufIndex++; | |
4195 | return 1; | |
86646da4 SK |
4196 | } |
4197 | ||
1264ab16 AR |
4198 | { |
4199 | Mutex::Autolock lock(m_jpegEncoderLock); | |
4200 | m_jpegEncodingCount++; | |
4201 | } | |
32cf9401 | 4202 | |
de48e362 SK |
4203 | m_getRatioSize(selfStreamParms->width, selfStreamParms->height, |
4204 | m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height, | |
4205 | &srcCropX, &srcCropY, | |
4206 | &srcW, &srcH, | |
4207 | 0); | |
4208 | ||
5506cebf SK |
4209 | m_jpegPictureRect.w = subParms->width; |
4210 | m_jpegPictureRect.h = subParms->height; | |
7d0efb59 | 4211 | |
5506cebf SK |
4212 | ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d", |
4213 | __FUNCTION__, selfStreamParms->width, selfStreamParms->height, | |
4214 | m_jpegPictureRect.w, m_jpegPictureRect.h); | |
7d0efb59 | 4215 | |
de48e362 | 4216 | m_getRatioSize(srcW, srcH, |
5506cebf SK |
4217 | m_jpegPictureRect.w, m_jpegPictureRect.h, |
4218 | &cropX, &cropY, | |
4219 | &pictureW, &pictureH, | |
4220 | 0); | |
4221 | pictureFormat = V4L2_PIX_FMT_YUYV; | |
4222 | pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH); | |
4223 | ||
4224 | if (m_exynosPictureCSC) { | |
4225 | float zoom_w = 0, zoom_h = 0; | |
4226 | if (m_zoomRatio == 0) | |
4227 | m_zoomRatio = 1; | |
4228 | ||
4229 | if (m_jpegPictureRect.w >= m_jpegPictureRect.h) { | |
4230 | zoom_w = pictureW / m_zoomRatio; | |
4231 | zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w; | |
4232 | } else { | |
4233 | zoom_h = pictureH / m_zoomRatio; | |
4234 | zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h; | |
7d0efb59 | 4235 | } |
de48e362 SK |
4236 | cropX = (srcW - zoom_w) / 2; |
4237 | cropY = (srcH - zoom_h) / 2; | |
5506cebf SK |
4238 | cropW = zoom_w; |
4239 | cropH = zoom_h; | |
4240 | ||
4241 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", | |
4242 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
4243 | ||
4244 | csc_set_src_format(m_exynosPictureCSC, | |
de48e362 | 4245 | ALIGN(srcW, 16), ALIGN(srcH, 16), |
5506cebf SK |
4246 | cropX, cropY, cropW, cropH, |
4247 | V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), | |
4248 | 0); | |
7d0efb59 | 4249 | |
5506cebf SK |
4250 | csc_set_dst_format(m_exynosPictureCSC, |
4251 | m_jpegPictureRect.w, m_jpegPictureRect.h, | |
4252 | 0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h, | |
4253 | V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16), | |
4254 | 0); | |
4255 | for (int i = 0 ; i < 3 ; i++) | |
4256 | ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ", | |
4257 | __FUNCTION__, i, srcImageBuf->fd.extFd[i]); | |
4258 | csc_set_src_buffer(m_exynosPictureCSC, | |
4259 | (void **)&srcImageBuf->fd.fd); | |
7d0efb59 | 4260 | |
5506cebf SK |
4261 | csc_set_dst_buffer(m_exynosPictureCSC, |
4262 | (void **)&m_resizeBuf.fd.fd); | |
4263 | for (int i = 0 ; i < 3 ; i++) | |
4264 | ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d", | |
4265 | __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]); | |
7d0efb59 | 4266 | |
5506cebf SK |
4267 | if (csc_convert(m_exynosPictureCSC) != 0) |
4268 | ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__); | |
4269 | ||
4270 | } | |
4271 | else { | |
4272 | ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__); | |
7d0efb59 C |
4273 | } |
4274 | ||
5506cebf | 4275 | resizeBufInfo = m_resizeBuf; |
86646da4 | 4276 | |
5506cebf | 4277 | m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf); |
86646da4 | 4278 | |
5506cebf SK |
4279 | for (int i = 1; i < 3; i++) { |
4280 | if (m_resizeBuf.size.extS[i] != 0) | |
4281 | m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1]; | |
86646da4 | 4282 | |
5506cebf SK |
4283 | ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]); |
4284 | } | |
2adfa429 | 4285 | |
5506cebf SK |
4286 | jpegRect.w = m_jpegPictureRect.w; |
4287 | jpegRect.h = m_jpegPictureRect.h; | |
4288 | jpegRect.colorFormat = V4L2_PIX_FMT_NV16; | |
86646da4 | 4289 | |
5506cebf SK |
4290 | for (int j = 0 ; j < 3 ; j++) |
4291 | ALOGV("DEBUG(%s): dest buf node fd.extFd[%d]=%d size=%d virt=%x ", | |
4292 | __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j], | |
4293 | (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j], | |
4294 | (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]); | |
86646da4 | 4295 | |
c06b3290 SK |
4296 | jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0]; |
4297 | if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) { | |
5506cebf | 4298 | ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__); |
c06b3290 SK |
4299 | } else { |
4300 | m_resizeBuf = resizeBufInfo; | |
2adfa429 | 4301 | |
c06b3290 SK |
4302 | int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s; |
4303 | ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__, | |
4304 | m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize); | |
4305 | char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]); | |
4306 | jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]); | |
2adfa429 | 4307 | |
c06b3290 SK |
4308 | if (jpegBuffer[jpegSize-1] == 0) |
4309 | jpegSize--; | |
4310 | jpegBlob->jpeg_size = jpegSize; | |
4311 | jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID; | |
4312 | } | |
cf593314 | 4313 | subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize; |
5506cebf | 4314 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); |
be494d19 | 4315 | |
5506cebf SK |
4316 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", |
4317 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4318 | if (res == 0) { | |
4319 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4320 | subParms->numSvcBufsInHal--; | |
4321 | } | |
4322 | else { | |
4323 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4324 | } | |
86646da4 | 4325 | |
5506cebf SK |
4326 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4327 | { | |
4328 | bool found = false; | |
4329 | int checkingIndex = 0; | |
86646da4 | 4330 | |
5506cebf SK |
4331 | ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
4332 | ||
4333 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
4334 | if (res != NO_ERROR || buf == NULL) { | |
4335 | ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4336 | break; | |
4337 | } | |
4338 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4339 | subParms->numSvcBufsInHal ++; | |
4340 | ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4341 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
4342 | ||
4343 | ||
4344 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { | |
4345 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4346 | found = true; | |
4347 | break; | |
86646da4 | 4348 | } |
5506cebf SK |
4349 | } |
4350 | ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found); | |
be494d19 | 4351 | |
5506cebf SK |
4352 | if (!found) { |
4353 | break; | |
4354 | } | |
4355 | ||
4356 | subParms->svcBufIndex = checkingIndex; | |
4357 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4358 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4359 | } | |
4360 | else { | |
4361 | ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4362 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4363 | } | |
4364 | } | |
1264ab16 AR |
4365 | { |
4366 | Mutex::Autolock lock(m_jpegEncoderLock); | |
4367 | m_jpegEncodingCount--; | |
4368 | } | |
5506cebf SK |
4369 | return 0; |
4370 | } | |
86646da4 | 4371 | |
5506cebf SK |
4372 | int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) |
4373 | { | |
4374 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4375 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_RECORD]; | |
4376 | status_t res; | |
4377 | ExynosRect jpegRect; | |
4378 | bool found = false; | |
4379 | int cropX, cropY, cropW, cropH = 0; | |
4380 | buffer_handle_t * buf = NULL; | |
86646da4 | 4381 | |
5506cebf SK |
4382 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4383 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4384 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4385 | found = true; | |
4386 | break; | |
4387 | } | |
4388 | subParms->svcBufIndex++; | |
4389 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4390 | subParms->svcBufIndex = 0; | |
4391 | } | |
4392 | if (!found) { | |
4393 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4394 | subParms->svcBufIndex++; | |
4395 | return 1; | |
4396 | } | |
86646da4 | 4397 | |
5506cebf SK |
4398 | if (m_exynosVideoCSC) { |
4399 | int videoW = subParms->width, videoH = subParms->height; | |
4400 | int cropX, cropY, cropW, cropH = 0; | |
4401 | int previewW = selfStreamParms->width, previewH = selfStreamParms->height; | |
4402 | m_getRatioSize(previewW, previewH, | |
4403 | videoW, videoH, | |
4404 | &cropX, &cropY, | |
4405 | &cropW, &cropH, | |
4406 | 0); | |
86646da4 | 4407 | |
5506cebf SK |
4408 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", |
4409 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
86646da4 | 4410 | |
5506cebf | 4411 | csc_set_src_format(m_exynosVideoCSC, |
4a3f1820 | 4412 | ALIGN(previewW, 32), previewH, |
5506cebf SK |
4413 | cropX, cropY, cropW, cropH, |
4414 | selfStreamParms->format, | |
4415 | 0); | |
86646da4 | 4416 | |
5506cebf SK |
4417 | csc_set_dst_format(m_exynosVideoCSC, |
4418 | videoW, videoH, | |
4419 | 0, 0, videoW, videoH, | |
4420 | subParms->format, | |
4421 | 1); | |
86646da4 | 4422 | |
5506cebf SK |
4423 | csc_set_src_buffer(m_exynosVideoCSC, |
4424 | (void **)&srcImageBuf->fd.fd); | |
86646da4 | 4425 | |
5506cebf SK |
4426 | csc_set_dst_buffer(m_exynosVideoCSC, |
4427 | (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd))); | |
4428 | ||
4429 | if (csc_convert(m_exynosVideoCSC) != 0) { | |
4430 | ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__); | |
4431 | } | |
4432 | else { | |
4433 | ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__); | |
86646da4 | 4434 | } |
5506cebf SK |
4435 | } |
4436 | else { | |
4437 | ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__); | |
4438 | } | |
86646da4 | 4439 | |
5506cebf | 4440 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); |
86646da4 | 4441 | |
5506cebf SK |
4442 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", |
4443 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4444 | if (res == 0) { | |
4445 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4446 | subParms->numSvcBufsInHal--; | |
4447 | } | |
4448 | else { | |
4449 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4450 | } | |
86646da4 | 4451 | |
5506cebf SK |
4452 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4453 | { | |
4454 | bool found = false; | |
4455 | int checkingIndex = 0; | |
86646da4 | 4456 | |
5506cebf SK |
4457 | ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
4458 | ||
4459 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
4460 | if (res != NO_ERROR || buf == NULL) { | |
4461 | ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4462 | break; | |
4463 | } | |
4464 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4465 | subParms->numSvcBufsInHal ++; | |
4466 | ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4467 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
4468 | ||
4469 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { | |
4470 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4471 | found = true; | |
4472 | break; | |
86646da4 | 4473 | } |
13d8c7b4 | 4474 | } |
5506cebf | 4475 | ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex); |
86646da4 | 4476 | |
5506cebf SK |
4477 | if (!found) { |
4478 | break; | |
4479 | } | |
86646da4 | 4480 | |
5506cebf SK |
4481 | subParms->svcBufIndex = checkingIndex; |
4482 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4483 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4484 | } | |
4485 | else { | |
4486 | ALOGV("DEBUG(%s): record bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4487 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4488 | } | |
4489 | } | |
4490 | return 0; | |
86646da4 SK |
4491 | } |
4492 | ||
5506cebf | 4493 | int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) |
86646da4 | 4494 | { |
5506cebf SK |
4495 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); |
4496 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_PRVCB]; | |
4497 | status_t res; | |
4498 | bool found = false; | |
4499 | int cropX, cropY, cropW, cropH = 0; | |
4500 | buffer_handle_t * buf = NULL; | |
86646da4 | 4501 | |
5506cebf SK |
4502 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4503 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4504 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4505 | found = true; | |
4506 | break; | |
4507 | } | |
4508 | subParms->svcBufIndex++; | |
4509 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4510 | subParms->svcBufIndex = 0; | |
4511 | } | |
4512 | if (!found) { | |
4513 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4514 | subParms->svcBufIndex++; | |
4515 | return 1; | |
4516 | } | |
86646da4 | 4517 | |
5506cebf SK |
4518 | if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { |
4519 | if (m_exynosVideoCSC) { | |
4520 | int previewCbW = subParms->width, previewCbH = subParms->height; | |
4521 | int cropX, cropY, cropW, cropH = 0; | |
4522 | int previewW = selfStreamParms->width, previewH = selfStreamParms->height; | |
4523 | m_getRatioSize(previewW, previewH, | |
4524 | previewCbW, previewCbH, | |
4525 | &cropX, &cropY, | |
4526 | &cropW, &cropH, | |
4527 | 0); | |
86646da4 | 4528 | |
5506cebf SK |
4529 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", |
4530 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
4531 | csc_set_src_format(m_exynosVideoCSC, | |
4a3f1820 | 4532 | ALIGN(previewW, 32), previewH, |
5506cebf SK |
4533 | cropX, cropY, cropW, cropH, |
4534 | selfStreamParms->format, | |
4535 | 0); | |
86646da4 | 4536 | |
5506cebf SK |
4537 | csc_set_dst_format(m_exynosVideoCSC, |
4538 | previewCbW, previewCbH, | |
4539 | 0, 0, previewCbW, previewCbH, | |
4540 | subParms->internalFormat, | |
4541 | 1); | |
86646da4 | 4542 | |
5506cebf SK |
4543 | csc_set_src_buffer(m_exynosVideoCSC, |
4544 | (void **)&srcImageBuf->fd.fd); | |
4545 | ||
4546 | csc_set_dst_buffer(m_exynosVideoCSC, | |
4547 | (void **)(&(m_previewCbBuf.fd.fd))); | |
4548 | ||
4549 | if (csc_convert(m_exynosVideoCSC) != 0) { | |
4550 | ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__); | |
4551 | } | |
4552 | else { | |
4553 | ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__); | |
4554 | } | |
4555 | if (previewCbW == ALIGN(previewCbW, 16)) { | |
4556 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0], | |
4557 | m_previewCbBuf.virt.extP[0], previewCbW * previewCbH); | |
4558 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH, | |
4559 | m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 ); | |
4560 | } | |
4561 | else { | |
4562 | // TODO : copy line by line ? | |
4563 | } | |
4564 | } | |
4565 | else { | |
4566 | ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__); | |
4567 | } | |
86646da4 | 4568 | } |
5506cebf SK |
4569 | else if (subParms->format == HAL_PIXEL_FORMAT_YV12) { |
4570 | int previewCbW = subParms->width, previewCbH = subParms->height; | |
4571 | int stride = ALIGN(previewCbW, 16); | |
4a3f1820 | 4572 | int uv_stride = ALIGN(previewCbW/2, 16); |
0d220b42 | 4573 | int c_stride = ALIGN(stride / 2, 16); |
4a3f1820 SK |
4574 | |
4575 | if (previewCbW == ALIGN(previewCbW, 32)) { | |
4576 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0], | |
4577 | srcImageBuf->virt.extP[0], stride * previewCbH); | |
4578 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH, | |
4579 | srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 ); | |
4580 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2), | |
4581 | srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 ); | |
4582 | } else { | |
4583 | char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]); | |
4584 | char * srcAddr = (char *)(srcImageBuf->virt.extP[0]); | |
4585 | for (int i = 0 ; i < previewCbH ; i++) { | |
4586 | memcpy(dstAddr, srcAddr, previewCbW); | |
4587 | dstAddr += stride; | |
4588 | srcAddr += ALIGN(stride, 32); | |
4589 | } | |
4590 | dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH); | |
4591 | srcAddr = (char *)(srcImageBuf->virt.extP[1]); | |
4592 | for (int i = 0 ; i < previewCbH/2 ; i++) { | |
4593 | memcpy(dstAddr, srcAddr, previewCbW/2); | |
4594 | dstAddr += c_stride; | |
4595 | srcAddr += uv_stride; | |
4596 | } | |
4597 | srcAddr = (char *)(srcImageBuf->virt.extP[2]); | |
4598 | for (int i = 0 ; i < previewCbH/2 ; i++) { | |
4599 | memcpy(dstAddr, srcAddr, previewCbW/2); | |
4600 | dstAddr += c_stride; | |
4601 | srcAddr += uv_stride; | |
4602 | } | |
4603 | } | |
5506cebf SK |
4604 | } |
4605 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); | |
4606 | ||
4607 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", | |
4608 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4609 | if (res == 0) { | |
4610 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4611 | subParms->numSvcBufsInHal--; | |
4612 | } | |
4613 | else { | |
4614 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
86646da4 SK |
4615 | } |
4616 | ||
5506cebf SK |
4617 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4618 | { | |
4619 | bool found = false; | |
4620 | int checkingIndex = 0; | |
86646da4 | 4621 | |
5506cebf | 4622 | ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
86646da4 | 4623 | |
5506cebf SK |
4624 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); |
4625 | if (res != NO_ERROR || buf == NULL) { | |
4626 | ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4627 | break; | |
4628 | } | |
4629 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4630 | subParms->numSvcBufsInHal ++; | |
4631 | ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4632 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
86646da4 | 4633 | |
86646da4 | 4634 | |
5506cebf SK |
4635 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { |
4636 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4637 | found = true; | |
4638 | break; | |
4639 | } | |
4640 | } | |
4641 | ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex); | |
4642 | ||
4643 | if (!found) { | |
4644 | break; | |
4645 | } | |
4646 | ||
4647 | subParms->svcBufIndex = checkingIndex; | |
4648 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4649 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4650 | } | |
4651 | else { | |
4652 | ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4653 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4654 | } | |
4655 | } | |
4656 | return 0; | |
c15a6b00 JS |
4657 | } |
4658 | ||
2d5e6ec2 SK |
4659 | bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h) |
4660 | { | |
4661 | int sizeOfSupportList; | |
4662 | ||
4663 | //REAR Camera | |
4664 | if(this->getCameraId() == 0) { | |
17071e43 | 4665 | sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int32_t)*2); |
2d5e6ec2 SK |
4666 | |
4667 | for(int i = 0; i < sizeOfSupportList; i++) { | |
4668 | if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h)) | |
4669 | return true; | |
4670 | } | |
4671 | ||
4672 | } | |
4673 | else { | |
17071e43 | 4674 | sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int32_t)*2); |
2d5e6ec2 SK |
4675 | |
4676 | for(int i = 0; i < sizeOfSupportList; i++) { | |
4677 | if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h)) | |
4678 | return true; | |
4679 | } | |
4680 | } | |
4681 | ||
4682 | return false; | |
4683 | } | |
13d8c7b4 SK |
4684 | bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf, |
4685 | ExynosBuffer *jpegBuf, | |
4686 | ExynosRect *rect) | |
4687 | { | |
4688 | unsigned char *addr; | |
4689 | ||
4690 | ExynosJpegEncoderForCamera jpegEnc; | |
4691 | bool ret = false; | |
4692 | int res = 0; | |
4693 | ||
4694 | unsigned int *yuvSize = yuvBuf->size.extS; | |
4695 | ||
4696 | if (jpegEnc.create()) { | |
9dd63e1f | 4697 | ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__); |
13d8c7b4 SK |
4698 | goto jpeg_encode_done; |
4699 | } | |
4700 | ||
87423e56 | 4701 | if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) { |
9dd63e1f | 4702 | ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__); |
13d8c7b4 SK |
4703 | goto jpeg_encode_done; |
4704 | } | |
4705 | ||
4706 | if (jpegEnc.setSize(rect->w, rect->h)) { | |
9dd63e1f | 4707 | ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__); |
13d8c7b4 SK |
4708 | goto jpeg_encode_done; |
4709 | } | |
4710 | ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h); | |
4711 | ||
4712 | if (jpegEnc.setColorFormat(rect->colorFormat)) { | |
9dd63e1f | 4713 | ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__); |
13d8c7b4 SK |
4714 | goto jpeg_encode_done; |
4715 | } | |
13d8c7b4 SK |
4716 | |
4717 | if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) { | |
9dd63e1f | 4718 | ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__); |
13d8c7b4 SK |
4719 | goto jpeg_encode_done; |
4720 | } | |
13d8c7b4 | 4721 | |
48728d49 | 4722 | if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) { |
2d5e6ec2 | 4723 | mExifInfo.enableThumb = true; |
48728d49 | 4724 | if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) { |
4017b08c SK |
4725 | // in the case of unsupported parameter, disable thumbnail |
4726 | mExifInfo.enableThumb = false; | |
2d5e6ec2 | 4727 | } else { |
48728d49 SK |
4728 | m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0]; |
4729 | m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1]; | |
2d5e6ec2 SK |
4730 | } |
4731 | ||
4732 | ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH); | |
4733 | ||
4734 | } else { | |
4735 | mExifInfo.enableThumb = false; | |
4736 | } | |
54f4971e | 4737 | |
2d5e6ec2 SK |
4738 | if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) { |
4739 | ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH); | |
54f4971e SK |
4740 | goto jpeg_encode_done; |
4741 | } | |
4742 | ||
2d5e6ec2 | 4743 | ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW); |
87423e56 | 4744 | if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) { |
54f4971e SK |
4745 | ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__); |
4746 | goto jpeg_encode_done; | |
4747 | } | |
13d8c7b4 | 4748 | |
54f4971e | 4749 | m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata); |
9dd63e1f | 4750 | ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize); |
54f4971e | 4751 | if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) { |
9dd63e1f | 4752 | ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__); |
13d8c7b4 SK |
4753 | goto jpeg_encode_done; |
4754 | } | |
54f4971e | 4755 | if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) { |
9dd63e1f | 4756 | ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__); |
13d8c7b4 SK |
4757 | goto jpeg_encode_done; |
4758 | } | |
13d8c7b4 SK |
4759 | |
4760 | if (jpegEnc.updateConfig()) { | |
9dd63e1f | 4761 | ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__); |
13d8c7b4 SK |
4762 | goto jpeg_encode_done; |
4763 | } | |
4764 | ||
1422aff9 | 4765 | if ((res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo))) { |
9dd63e1f | 4766 | ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res); |
13d8c7b4 SK |
4767 | goto jpeg_encode_done; |
4768 | } | |
4769 | ||
4770 | ret = true; | |
4771 | ||
4772 | jpeg_encode_done: | |
4773 | ||
4774 | if (jpegEnc.flagCreate() == true) | |
4775 | jpegEnc.destroy(); | |
4776 | ||
4777 | return ret; | |
4778 | } | |
4779 | ||
e117f756 YJ |
4780 | void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id) |
4781 | { | |
4782 | m_ctlInfo.flash.m_precaptureTriggerId = id; | |
73f5ad60 | 4783 | m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE; |
e117f756 YJ |
4784 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) { |
4785 | // flash is required | |
4786 | switch (m_ctlInfo.flash.m_flashCnt) { | |
4787 | case IS_FLASH_STATE_AUTO_DONE: | |
d91c0269 | 4788 | case IS_FLASH_STATE_AUTO_OFF: |
e117f756 YJ |
4789 | // Flash capture sequence, AF flash was executed before |
4790 | break; | |
4791 | default: | |
4792 | // Full flash sequence | |
4793 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
4794 | m_ctlInfo.flash.m_flashEnableFlg = true; | |
8a3fc5dd | 4795 | m_ctlInfo.flash.m_flashTimeOut = 0; |
e117f756 YJ |
4796 | } |
4797 | } else { | |
73f5ad60 YJ |
4798 | // Skip pre-capture in case of non-flash. |
4799 | ALOGV("[PreCap] Flash OFF mode "); | |
e117f756 YJ |
4800 | m_ctlInfo.flash.m_flashEnableFlg = false; |
4801 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE; | |
e117f756 | 4802 | } |
73f5ad60 YJ |
4803 | ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt); |
4804 | OnPrecaptureMeteringNotificationSensor(); | |
e117f756 | 4805 | } |
13d8c7b4 | 4806 | |
0f26b20f SK |
4807 | void ExynosCameraHWInterface2::OnAfTrigger(int id) |
4808 | { | |
8e2c2fdb | 4809 | m_afTriggerId = id; |
6f19b6cf | 4810 | |
0f26b20f SK |
4811 | switch (m_afMode) { |
4812 | case AA_AFMODE_AUTO: | |
4813 | case AA_AFMODE_MACRO: | |
34d2b94a | 4814 | case AA_AFMODE_MANUAL: |
73f5ad60 | 4815 | ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode); |
caea49e6 YJ |
4816 | // If flash is enable, Flash operation is executed before triggering AF |
4817 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) | |
e117f756 | 4818 | && (m_ctlInfo.flash.m_flashEnableFlg == false) |
caea49e6 | 4819 | && (m_cameraId == 0)) { |
73f5ad60 | 4820 | ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode); |
e117f756 YJ |
4821 | m_ctlInfo.flash.m_flashEnableFlg = true; |
4822 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
caea49e6 | 4823 | m_ctlInfo.flash.m_flashDecisionResult = false; |
e117f756 | 4824 | m_ctlInfo.flash.m_afFlashDoneFlg = true; |
caea49e6 | 4825 | } |
0f26b20f SK |
4826 | OnAfTriggerAutoMacro(id); |
4827 | break; | |
4828 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
73f5ad60 | 4829 | ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode); |
0f26b20f SK |
4830 | OnAfTriggerCAFVideo(id); |
4831 | break; | |
4832 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
73f5ad60 | 4833 | ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode); |
0f26b20f SK |
4834 | OnAfTriggerCAFPicture(id); |
4835 | break; | |
8e2c2fdb | 4836 | |
34d2b94a | 4837 | case AA_AFMODE_OFF: |
0f26b20f SK |
4838 | default: |
4839 | break; | |
4840 | } | |
4841 | } | |
4842 | ||
1422aff9 | 4843 | void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int /*id*/) |
0f26b20f SK |
4844 | { |
4845 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4846 | |
4847 | switch (m_afState) { | |
4848 | case HAL_AFSTATE_INACTIVE: | |
6caa0c80 SK |
4849 | case HAL_AFSTATE_PASSIVE_FOCUSED: |
4850 | case HAL_AFSTATE_SCANNING: | |
0f26b20f SK |
4851 | nextState = HAL_AFSTATE_NEEDS_COMMAND; |
4852 | m_IsAfTriggerRequired = true; | |
4853 | break; | |
4854 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4855 | nextState = NO_TRANSITION; | |
4856 | break; | |
4857 | case HAL_AFSTATE_STARTED: | |
4858 | nextState = NO_TRANSITION; | |
4859 | break; | |
0f26b20f SK |
4860 | case HAL_AFSTATE_LOCKED: |
4861 | nextState = HAL_AFSTATE_NEEDS_COMMAND; | |
4862 | m_IsAfTriggerRequired = true; | |
4863 | break; | |
4864 | case HAL_AFSTATE_FAILED: | |
4865 | nextState = HAL_AFSTATE_NEEDS_COMMAND; | |
4866 | m_IsAfTriggerRequired = true; | |
4867 | break; | |
4868 | default: | |
4869 | break; | |
4870 | } | |
4871 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4872 | if (nextState != NO_TRANSITION) | |
4873 | m_afState = nextState; | |
4874 | } | |
4875 | ||
4876 | void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id) | |
4877 | { | |
4878 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4879 | |
4880 | switch (m_afState) { | |
4881 | case HAL_AFSTATE_INACTIVE: | |
4882 | nextState = HAL_AFSTATE_FAILED; | |
4883 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4884 | break; | |
4885 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4886 | // not used | |
4887 | break; | |
4888 | case HAL_AFSTATE_STARTED: | |
4889 | nextState = HAL_AFSTATE_NEEDS_DETERMINATION; | |
9900d0c4 | 4890 | m_AfHwStateFailed = false; |
0f26b20f SK |
4891 | break; |
4892 | case HAL_AFSTATE_SCANNING: | |
4893 | nextState = HAL_AFSTATE_NEEDS_DETERMINATION; | |
9900d0c4 | 4894 | m_AfHwStateFailed = false; |
caea49e6 YJ |
4895 | // If flash is enable, Flash operation is executed before triggering AF |
4896 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) | |
e117f756 | 4897 | && (m_ctlInfo.flash.m_flashEnableFlg == false) |
caea49e6 | 4898 | && (m_cameraId == 0)) { |
73f5ad60 | 4899 | ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id); |
e117f756 YJ |
4900 | m_ctlInfo.flash.m_flashEnableFlg = true; |
4901 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
caea49e6 | 4902 | m_ctlInfo.flash.m_flashDecisionResult = false; |
e117f756 | 4903 | m_ctlInfo.flash.m_afFlashDoneFlg = true; |
caea49e6 | 4904 | } |
0f26b20f SK |
4905 | break; |
4906 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
4907 | nextState = NO_TRANSITION; | |
4908 | break; | |
4909 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
4910 | m_IsAfLockRequired = true; | |
9900d0c4 | 4911 | if (m_AfHwStateFailed) { |
caea49e6 | 4912 | ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__); |
9900d0c4 SK |
4913 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); |
4914 | nextState = HAL_AFSTATE_FAILED; | |
4915 | } | |
4916 | else { | |
caea49e6 | 4917 | ALOGV("(%s): [CAF] LAST : success", __FUNCTION__); |
9900d0c4 SK |
4918 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); |
4919 | nextState = HAL_AFSTATE_LOCKED; | |
4920 | } | |
4921 | m_AfHwStateFailed = false; | |
0f26b20f SK |
4922 | break; |
4923 | case HAL_AFSTATE_LOCKED: | |
4924 | nextState = NO_TRANSITION; | |
4925 | break; | |
4926 | case HAL_AFSTATE_FAILED: | |
4927 | nextState = NO_TRANSITION; | |
4928 | break; | |
4929 | default: | |
4930 | break; | |
4931 | } | |
4932 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4933 | if (nextState != NO_TRANSITION) | |
4934 | m_afState = nextState; | |
4935 | } | |
4936 | ||
4937 | ||
1422aff9 | 4938 | void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int /*id*/) |
0f26b20f SK |
4939 | { |
4940 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4941 | |
4942 | switch (m_afState) { | |
4943 | case HAL_AFSTATE_INACTIVE: | |
4944 | nextState = HAL_AFSTATE_FAILED; | |
4945 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4946 | break; | |
4947 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4948 | // not used | |
4949 | break; | |
4950 | case HAL_AFSTATE_STARTED: | |
4951 | m_IsAfLockRequired = true; | |
4952 | nextState = HAL_AFSTATE_FAILED; | |
4953 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4954 | break; | |
4955 | case HAL_AFSTATE_SCANNING: | |
4956 | m_IsAfLockRequired = true; | |
4957 | nextState = HAL_AFSTATE_FAILED; | |
4958 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4959 | break; | |
4960 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
4961 | // not used | |
4962 | break; | |
4963 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
4964 | m_IsAfLockRequired = true; | |
4965 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
4966 | nextState = HAL_AFSTATE_LOCKED; | |
4967 | break; | |
4968 | case HAL_AFSTATE_LOCKED: | |
4969 | nextState = NO_TRANSITION; | |
4970 | break; | |
4971 | case HAL_AFSTATE_FAILED: | |
4972 | nextState = NO_TRANSITION; | |
4973 | break; | |
4974 | default: | |
4975 | break; | |
4976 | } | |
4977 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4978 | if (nextState != NO_TRANSITION) | |
4979 | m_afState = nextState; | |
4980 | } | |
4981 | ||
73f5ad60 YJ |
4982 | void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor() |
4983 | { | |
4984 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
4985 | // Just noti of pre-capture start | |
4986 | if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) { | |
4987 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
4988 | ANDROID_CONTROL_AE_STATE_PRECAPTURE, | |
4989 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
4990 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); | |
4991 | m_notifyCb(CAMERA2_MSG_AUTOWB, | |
4992 | ANDROID_CONTROL_AWB_STATE_CONVERGED, | |
4993 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
4994 | m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE; | |
4995 | } | |
4996 | } | |
4997 | } | |
4998 | ||
4999 | void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP() | |
e117f756 YJ |
5000 | { |
5001 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
5002 | if (m_ctlInfo.flash.m_flashEnableFlg) { | |
5003 | // flash case | |
5004 | switch (m_ctlInfo.flash.m_flashCnt) { | |
5005 | case IS_FLASH_STATE_AUTO_DONE: | |
d91c0269 | 5006 | case IS_FLASH_STATE_AUTO_OFF: |
73f5ad60 YJ |
5007 | if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) { |
5008 | // End notification | |
e117f756 | 5009 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, |
8a3fc5dd | 5010 | ANDROID_CONTROL_AE_STATE_CONVERGED, |
e117f756 | 5011 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
8a3fc5dd | 5012 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 | 5013 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
8a3fc5dd | 5014 | ANDROID_CONTROL_AWB_STATE_CONVERGED, |
e117f756 YJ |
5015 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
5016 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
5017 | } else { | |
5018 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
5019 | ANDROID_CONTROL_AE_STATE_PRECAPTURE, | |
5020 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
40acdcc8 | 5021 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 YJ |
5022 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
5023 | ANDROID_CONTROL_AWB_STATE_CONVERGED, | |
5024 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
73f5ad60 | 5025 | m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE; |
e117f756 | 5026 | } |
73f5ad60 YJ |
5027 | break; |
5028 | case IS_FLASH_STATE_CAPTURE: | |
5029 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
5030 | case IS_FLASH_STATE_CAPTURE_JPEG: | |
5031 | case IS_FLASH_STATE_CAPTURE_END: | |
5032 | ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt); | |
5033 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
5034 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
8a3fc5dd | 5035 | ANDROID_CONTROL_AE_STATE_CONVERGED, |
73f5ad60 YJ |
5036 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
5037 | m_notifyCb(CAMERA2_MSG_AUTOWB, | |
8a3fc5dd | 5038 | ANDROID_CONTROL_AWB_STATE_CONVERGED, |
73f5ad60 YJ |
5039 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
5040 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
5041 | break; | |
e117f756 YJ |
5042 | } |
5043 | } else { | |
5044 | // non-flash case | |
73f5ad60 | 5045 | if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) { |
d91c0269 | 5046 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, |
8a3fc5dd | 5047 | ANDROID_CONTROL_AE_STATE_CONVERGED, |
d91c0269 | 5048 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
8a3fc5dd | 5049 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 | 5050 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
8a3fc5dd | 5051 | ANDROID_CONTROL_AWB_STATE_CONVERGED, |
e117f756 | 5052 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
d91c0269 | 5053 | m_ctlInfo.flash.m_precaptureTriggerId = 0; |
e117f756 YJ |
5054 | } |
5055 | } | |
5056 | } | |
5057 | } | |
5058 | ||
0f26b20f SK |
5059 | void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti) |
5060 | { | |
5061 | switch (m_afMode) { | |
5062 | case AA_AFMODE_AUTO: | |
5063 | case AA_AFMODE_MACRO: | |
5064 | OnAfNotificationAutoMacro(noti); | |
5065 | break; | |
5066 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
5067 | OnAfNotificationCAFVideo(noti); | |
5068 | break; | |
5069 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
5070 | OnAfNotificationCAFPicture(noti); | |
5071 | break; | |
5072 | case AA_AFMODE_OFF: | |
5073 | default: | |
5074 | break; | |
5075 | } | |
5076 | } | |
5077 | ||
5078 | void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti) | |
5079 | { | |
5080 | int nextState = NO_TRANSITION; | |
5081 | bool bWrongTransition = false; | |
5082 | ||
5083 | if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) { | |
5084 | switch (noti) { | |
5085 | case AA_AFSTATE_INACTIVE: | |
5086 | case AA_AFSTATE_ACTIVE_SCAN: | |
5087 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5088 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5089 | default: | |
5090 | nextState = NO_TRANSITION; | |
5091 | break; | |
5092 | } | |
5093 | } | |
5094 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5095 | switch (noti) { | |
5096 | case AA_AFSTATE_INACTIVE: | |
5097 | nextState = NO_TRANSITION; | |
5098 | break; | |
5099 | case AA_AFSTATE_ACTIVE_SCAN: | |
5100 | nextState = HAL_AFSTATE_SCANNING; | |
5101 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN); | |
5102 | break; | |
5103 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5104 | nextState = NO_TRANSITION; | |
5105 | break; | |
5106 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5107 | nextState = NO_TRANSITION; | |
5108 | break; | |
5109 | default: | |
5110 | bWrongTransition = true; | |
5111 | break; | |
5112 | } | |
5113 | } | |
5114 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5115 | switch (noti) { | |
5116 | case AA_AFSTATE_INACTIVE: | |
5117 | bWrongTransition = true; | |
5118 | break; | |
5119 | case AA_AFSTATE_ACTIVE_SCAN: | |
5120 | nextState = NO_TRANSITION; | |
5121 | break; | |
5122 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
9a710a45 | 5123 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 YJ |
5124 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
5125 | switch (m_ctlInfo.flash.m_flashCnt) { | |
5126 | case IS_FLASH_STATE_ON_DONE: | |
5127 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
4a9565ae | 5128 | nextState = NO_TRANSITION; |
e117f756 YJ |
5129 | break; |
5130 | case IS_FLASH_STATE_AUTO_DONE: | |
4a9565ae | 5131 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
9a710a45 YJ |
5132 | nextState = HAL_AFSTATE_LOCKED; |
5133 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
e117f756 YJ |
5134 | break; |
5135 | default: | |
9a710a45 YJ |
5136 | nextState = NO_TRANSITION; |
5137 | } | |
5138 | } else { | |
5139 | nextState = HAL_AFSTATE_LOCKED; | |
5140 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5141 | } | |
0f26b20f SK |
5142 | break; |
5143 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
9a710a45 | 5144 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 YJ |
5145 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
5146 | switch (m_ctlInfo.flash.m_flashCnt) { | |
5147 | case IS_FLASH_STATE_ON_DONE: | |
5148 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
4a9565ae | 5149 | nextState = NO_TRANSITION; |
e117f756 YJ |
5150 | break; |
5151 | case IS_FLASH_STATE_AUTO_DONE: | |
4a9565ae | 5152 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
9a710a45 YJ |
5153 | nextState = HAL_AFSTATE_FAILED; |
5154 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
e117f756 YJ |
5155 | break; |
5156 | default: | |
9a710a45 YJ |
5157 | nextState = NO_TRANSITION; |
5158 | } | |
5159 | } else { | |
5160 | nextState = HAL_AFSTATE_FAILED; | |
5161 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5162 | } | |
0f26b20f SK |
5163 | break; |
5164 | default: | |
5165 | bWrongTransition = true; | |
5166 | break; | |
5167 | } | |
5168 | } | |
5169 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5170 | switch (noti) { | |
5171 | case AA_AFSTATE_INACTIVE: | |
5172 | case AA_AFSTATE_ACTIVE_SCAN: | |
5173 | bWrongTransition = true; | |
5174 | break; | |
5175 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5176 | nextState = NO_TRANSITION; | |
5177 | break; | |
5178 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5179 | default: | |
5180 | bWrongTransition = true; | |
5181 | break; | |
5182 | } | |
5183 | } | |
5184 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5185 | switch (noti) { | |
5186 | case AA_AFSTATE_INACTIVE: | |
5187 | case AA_AFSTATE_ACTIVE_SCAN: | |
5188 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5189 | bWrongTransition = true; | |
5190 | break; | |
5191 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5192 | nextState = NO_TRANSITION; | |
5193 | break; | |
5194 | default: | |
5195 | bWrongTransition = true; | |
5196 | break; | |
5197 | } | |
5198 | } | |
5199 | if (bWrongTransition) { | |
5200 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5201 | return; | |
5202 | } | |
5203 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5204 | if (nextState != NO_TRANSITION) | |
5205 | m_afState = nextState; | |
5206 | } | |
5207 | ||
5208 | void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti) | |
5209 | { | |
5210 | int nextState = NO_TRANSITION; | |
5211 | bool bWrongTransition = false; | |
5212 | ||
5213 | if (m_afState == HAL_AFSTATE_INACTIVE) { | |
5214 | switch (noti) { | |
5215 | case AA_AFSTATE_INACTIVE: | |
5216 | case AA_AFSTATE_ACTIVE_SCAN: | |
5217 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5218 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5219 | default: | |
5220 | nextState = NO_TRANSITION; | |
5221 | break; | |
5222 | } | |
40acdcc8 YJ |
5223 | // Check AF notification after triggering |
5224 | if (m_ctlInfo.af.m_afTriggerTimeOut > 0) { | |
5225 | if (m_ctlInfo.af.m_afTriggerTimeOut > 5) { | |
5226 | ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode); | |
5227 | SetAfMode(AA_AFMODE_OFF); | |
5228 | SetAfMode(m_afMode); | |
5229 | m_ctlInfo.af.m_afTriggerTimeOut = 0; | |
5230 | } else { | |
5231 | m_ctlInfo.af.m_afTriggerTimeOut++; | |
5232 | } | |
5233 | } | |
0f26b20f SK |
5234 | } |
5235 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5236 | switch (noti) { | |
5237 | case AA_AFSTATE_INACTIVE: | |
5238 | nextState = NO_TRANSITION; | |
5239 | break; | |
5240 | case AA_AFSTATE_ACTIVE_SCAN: | |
5241 | nextState = HAL_AFSTATE_SCANNING; | |
5242 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
40acdcc8 | 5243 | m_ctlInfo.af.m_afTriggerTimeOut = 0; |
0f26b20f SK |
5244 | break; |
5245 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5246 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5247 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
40acdcc8 | 5248 | m_ctlInfo.af.m_afTriggerTimeOut = 0; |
0f26b20f SK |
5249 | break; |
5250 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
cd13bb78 SK |
5251 | //nextState = HAL_AFSTATE_FAILED; |
5252 | //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5253 | nextState = NO_TRANSITION; | |
0f26b20f SK |
5254 | break; |
5255 | default: | |
5256 | bWrongTransition = true; | |
5257 | break; | |
5258 | } | |
5259 | } | |
5260 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5261 | switch (noti) { | |
5262 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 | 5263 | nextState = NO_TRANSITION; |
0f26b20f SK |
5264 | break; |
5265 | case AA_AFSTATE_ACTIVE_SCAN: | |
5266 | nextState = NO_TRANSITION; | |
9900d0c4 | 5267 | m_AfHwStateFailed = false; |
0f26b20f SK |
5268 | break; |
5269 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5270 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
9900d0c4 | 5271 | m_AfHwStateFailed = false; |
0f26b20f SK |
5272 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); |
5273 | break; | |
5274 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
9900d0c4 SK |
5275 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; |
5276 | m_AfHwStateFailed = true; | |
5277 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
0f26b20f SK |
5278 | break; |
5279 | default: | |
5280 | bWrongTransition = true; | |
5281 | break; | |
5282 | } | |
5283 | } | |
5284 | else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) { | |
5285 | switch (noti) { | |
5286 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 | 5287 | nextState = NO_TRANSITION; |
0f26b20f SK |
5288 | break; |
5289 | case AA_AFSTATE_ACTIVE_SCAN: | |
5290 | nextState = HAL_AFSTATE_SCANNING; | |
9900d0c4 | 5291 | m_AfHwStateFailed = false; |
0f26b20f SK |
5292 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); |
5293 | break; | |
5294 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5295 | nextState = NO_TRANSITION; | |
9900d0c4 | 5296 | m_AfHwStateFailed = false; |
0f26b20f SK |
5297 | break; |
5298 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
9900d0c4 SK |
5299 | nextState = NO_TRANSITION; |
5300 | m_AfHwStateFailed = true; | |
0f26b20f SK |
5301 | break; |
5302 | default: | |
5303 | bWrongTransition = true; | |
5304 | break; | |
5305 | } | |
5306 | } | |
5307 | else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) { | |
f7f8d321 YJ |
5308 | //Skip notification in case of flash, wait the end of flash on |
5309 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { | |
5310 | if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE) | |
5311 | return; | |
5312 | } | |
0f26b20f SK |
5313 | switch (noti) { |
5314 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 | 5315 | nextState = NO_TRANSITION; |
0f26b20f SK |
5316 | break; |
5317 | case AA_AFSTATE_ACTIVE_SCAN: | |
5318 | nextState = NO_TRANSITION; | |
5319 | break; | |
5320 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
caea49e6 | 5321 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 | 5322 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
e117f756 YJ |
5323 | switch (m_ctlInfo.flash.m_flashCnt) { |
5324 | case IS_FLASH_STATE_ON_DONE: | |
f7f8d321 | 5325 | ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
e117f756 | 5326 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; |
4a9565ae | 5327 | nextState = NO_TRANSITION; |
e117f756 YJ |
5328 | break; |
5329 | case IS_FLASH_STATE_AUTO_DONE: | |
f7f8d321 | 5330 | ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
4a9565ae | 5331 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
caea49e6 YJ |
5332 | m_IsAfLockRequired = true; |
5333 | nextState = HAL_AFSTATE_LOCKED; | |
5334 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
e117f756 YJ |
5335 | break; |
5336 | default: | |
caea49e6 YJ |
5337 | nextState = NO_TRANSITION; |
5338 | } | |
5339 | } else { | |
5340 | m_IsAfLockRequired = true; | |
5341 | nextState = HAL_AFSTATE_LOCKED; | |
5342 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5343 | } | |
0f26b20f SK |
5344 | break; |
5345 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
caea49e6 | 5346 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 | 5347 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
e117f756 YJ |
5348 | switch (m_ctlInfo.flash.m_flashCnt) { |
5349 | case IS_FLASH_STATE_ON_DONE: | |
f7f8d321 | 5350 | ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
e117f756 | 5351 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; |
4a9565ae | 5352 | nextState = NO_TRANSITION; |
e117f756 YJ |
5353 | break; |
5354 | case IS_FLASH_STATE_AUTO_DONE: | |
f7f8d321 | 5355 | ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
4a9565ae | 5356 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
caea49e6 YJ |
5357 | m_IsAfLockRequired = true; |
5358 | nextState = HAL_AFSTATE_FAILED; | |
5359 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
e117f756 YJ |
5360 | break; |
5361 | default: | |
caea49e6 YJ |
5362 | nextState = NO_TRANSITION; |
5363 | } | |
5364 | } else { | |
5365 | m_IsAfLockRequired = true; | |
5366 | nextState = HAL_AFSTATE_FAILED; | |
5367 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5368 | } | |
0f26b20f SK |
5369 | break; |
5370 | default: | |
5371 | bWrongTransition = true; | |
5372 | break; | |
5373 | } | |
5374 | } | |
5375 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5376 | switch (noti) { | |
5377 | case AA_AFSTATE_INACTIVE: | |
5378 | nextState = NO_TRANSITION; | |
5379 | break; | |
5380 | case AA_AFSTATE_ACTIVE_SCAN: | |
5381 | bWrongTransition = true; | |
5382 | break; | |
5383 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5384 | nextState = NO_TRANSITION; | |
5385 | break; | |
5386 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5387 | default: | |
5388 | bWrongTransition = true; | |
5389 | break; | |
5390 | } | |
5391 | } | |
5392 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5393 | switch (noti) { | |
5394 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 SK |
5395 | bWrongTransition = true; |
5396 | break; | |
0f26b20f | 5397 | case AA_AFSTATE_ACTIVE_SCAN: |
9900d0c4 SK |
5398 | nextState = HAL_AFSTATE_SCANNING; |
5399 | break; | |
0f26b20f SK |
5400 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: |
5401 | bWrongTransition = true; | |
5402 | break; | |
5403 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5404 | nextState = NO_TRANSITION; | |
5405 | break; | |
5406 | default: | |
5407 | bWrongTransition = true; | |
5408 | break; | |
5409 | } | |
5410 | } | |
5411 | if (bWrongTransition) { | |
5412 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5413 | return; | |
5414 | } | |
5415 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5416 | if (nextState != NO_TRANSITION) | |
5417 | m_afState = nextState; | |
5418 | } | |
5419 | ||
5420 | void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti) | |
5421 | { | |
5422 | int nextState = NO_TRANSITION; | |
5423 | bool bWrongTransition = false; | |
5424 | ||
5425 | if (m_afState == HAL_AFSTATE_INACTIVE) { | |
5426 | switch (noti) { | |
5427 | case AA_AFSTATE_INACTIVE: | |
5428 | case AA_AFSTATE_ACTIVE_SCAN: | |
5429 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5430 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5431 | default: | |
5432 | nextState = NO_TRANSITION; | |
5433 | break; | |
5434 | } | |
5435 | } | |
5436 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5437 | switch (noti) { | |
5438 | case AA_AFSTATE_INACTIVE: | |
5439 | nextState = NO_TRANSITION; | |
5440 | break; | |
5441 | case AA_AFSTATE_ACTIVE_SCAN: | |
5442 | nextState = HAL_AFSTATE_SCANNING; | |
5443 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
5444 | break; | |
5445 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5446 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5447 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
5448 | break; | |
5449 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5450 | nextState = HAL_AFSTATE_FAILED; | |
5451 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5452 | break; | |
5453 | default: | |
5454 | bWrongTransition = true; | |
5455 | break; | |
5456 | } | |
5457 | } | |
5458 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5459 | switch (noti) { | |
5460 | case AA_AFSTATE_INACTIVE: | |
5461 | bWrongTransition = true; | |
5462 | break; | |
5463 | case AA_AFSTATE_ACTIVE_SCAN: | |
5464 | nextState = NO_TRANSITION; | |
5465 | break; | |
5466 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5467 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5468 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
5469 | break; | |
5470 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5471 | nextState = NO_TRANSITION; | |
5472 | break; | |
5473 | default: | |
5474 | bWrongTransition = true; | |
5475 | break; | |
5476 | } | |
5477 | } | |
5478 | else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) { | |
5479 | switch (noti) { | |
5480 | case AA_AFSTATE_INACTIVE: | |
5481 | bWrongTransition = true; | |
5482 | break; | |
5483 | case AA_AFSTATE_ACTIVE_SCAN: | |
5484 | nextState = HAL_AFSTATE_SCANNING; | |
5485 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
5486 | break; | |
5487 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5488 | nextState = NO_TRANSITION; | |
5489 | break; | |
5490 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5491 | nextState = HAL_AFSTATE_FAILED; | |
5492 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
8e2c2fdb | 5493 | // TODO : needs NO_TRANSITION ? |
0f26b20f SK |
5494 | break; |
5495 | default: | |
5496 | bWrongTransition = true; | |
5497 | break; | |
5498 | } | |
5499 | } | |
5500 | else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) { | |
5501 | switch (noti) { | |
5502 | case AA_AFSTATE_INACTIVE: | |
5503 | bWrongTransition = true; | |
5504 | break; | |
5505 | case AA_AFSTATE_ACTIVE_SCAN: | |
5506 | nextState = NO_TRANSITION; | |
5507 | break; | |
5508 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5509 | m_IsAfLockRequired = true; | |
5510 | nextState = HAL_AFSTATE_LOCKED; | |
5511 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5512 | break; | |
5513 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5514 | nextState = HAL_AFSTATE_FAILED; | |
5515 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5516 | break; | |
5517 | default: | |
5518 | bWrongTransition = true; | |
5519 | break; | |
5520 | } | |
5521 | } | |
5522 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5523 | switch (noti) { | |
5524 | case AA_AFSTATE_INACTIVE: | |
5525 | nextState = NO_TRANSITION; | |
5526 | break; | |
5527 | case AA_AFSTATE_ACTIVE_SCAN: | |
5528 | bWrongTransition = true; | |
5529 | break; | |
5530 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5531 | nextState = NO_TRANSITION; | |
5532 | break; | |
5533 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5534 | default: | |
5535 | bWrongTransition = true; | |
5536 | break; | |
5537 | } | |
5538 | } | |
5539 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5540 | switch (noti) { | |
5541 | case AA_AFSTATE_INACTIVE: | |
5542 | case AA_AFSTATE_ACTIVE_SCAN: | |
5543 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5544 | bWrongTransition = true; | |
5545 | break; | |
5546 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5547 | nextState = NO_TRANSITION; | |
5548 | break; | |
5549 | default: | |
5550 | bWrongTransition = true; | |
5551 | break; | |
5552 | } | |
5553 | } | |
5554 | if (bWrongTransition) { | |
5555 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5556 | return; | |
5557 | } | |
5558 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5559 | if (nextState != NO_TRANSITION) | |
5560 | m_afState = nextState; | |
5561 | } | |
5562 | ||
5563 | void ExynosCameraHWInterface2::OnAfCancel(int id) | |
5564 | { | |
8e2c2fdb SK |
5565 | m_afTriggerId = id; |
5566 | ||
0f26b20f SK |
5567 | switch (m_afMode) { |
5568 | case AA_AFMODE_AUTO: | |
5569 | case AA_AFMODE_MACRO: | |
8e2c2fdb | 5570 | case AA_AFMODE_OFF: |
c48f0170 | 5571 | case AA_AFMODE_MANUAL: |
0f26b20f SK |
5572 | OnAfCancelAutoMacro(id); |
5573 | break; | |
5574 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
5575 | OnAfCancelCAFVideo(id); | |
5576 | break; | |
5577 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
5578 | OnAfCancelCAFPicture(id); | |
5579 | break; | |
0f26b20f SK |
5580 | default: |
5581 | break; | |
5582 | } | |
5583 | } | |
5584 | ||
1422aff9 | 5585 | void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int /*id*/) |
0f26b20f SK |
5586 | { |
5587 | int nextState = NO_TRANSITION; | |
0f26b20f | 5588 | |
e117f756 YJ |
5589 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
5590 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; | |
6f19b6cf | 5591 | } |
0f26b20f SK |
5592 | switch (m_afState) { |
5593 | case HAL_AFSTATE_INACTIVE: | |
5594 | nextState = NO_TRANSITION; | |
cd13bb78 | 5595 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); |
0f26b20f SK |
5596 | break; |
5597 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5598 | case HAL_AFSTATE_STARTED: | |
5599 | case HAL_AFSTATE_SCANNING: | |
5600 | case HAL_AFSTATE_LOCKED: | |
5601 | case HAL_AFSTATE_FAILED: | |
5602 | SetAfMode(AA_AFMODE_OFF); | |
5603 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5604 | nextState = HAL_AFSTATE_INACTIVE; | |
5605 | break; | |
5606 | default: | |
5607 | break; | |
5608 | } | |
5609 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5610 | if (nextState != NO_TRANSITION) | |
5611 | m_afState = nextState; | |
5612 | } | |
5613 | ||
1422aff9 | 5614 | void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int /*id*/) |
0f26b20f SK |
5615 | { |
5616 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
5617 | |
5618 | switch (m_afState) { | |
5619 | case HAL_AFSTATE_INACTIVE: | |
5620 | nextState = NO_TRANSITION; | |
5621 | break; | |
5622 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5623 | case HAL_AFSTATE_STARTED: | |
5624 | case HAL_AFSTATE_SCANNING: | |
5625 | case HAL_AFSTATE_LOCKED: | |
5626 | case HAL_AFSTATE_FAILED: | |
5627 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
5628 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
5629 | SetAfMode(AA_AFMODE_OFF); | |
5630 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5631 | SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE); | |
5632 | nextState = HAL_AFSTATE_INACTIVE; | |
5633 | break; | |
5634 | default: | |
5635 | break; | |
5636 | } | |
5637 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5638 | if (nextState != NO_TRANSITION) | |
5639 | m_afState = nextState; | |
5640 | } | |
5641 | ||
1422aff9 | 5642 | void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int /*id*/) |
0f26b20f SK |
5643 | { |
5644 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
5645 | |
5646 | switch (m_afState) { | |
5647 | case HAL_AFSTATE_INACTIVE: | |
5648 | nextState = NO_TRANSITION; | |
5649 | break; | |
5650 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5651 | case HAL_AFSTATE_STARTED: | |
5652 | case HAL_AFSTATE_SCANNING: | |
5653 | case HAL_AFSTATE_LOCKED: | |
5654 | case HAL_AFSTATE_FAILED: | |
5655 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
5656 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
5657 | SetAfMode(AA_AFMODE_OFF); | |
5658 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5659 | SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO); | |
5660 | nextState = HAL_AFSTATE_INACTIVE; | |
5661 | break; | |
5662 | default: | |
5663 | break; | |
5664 | } | |
5665 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5666 | if (nextState != NO_TRANSITION) | |
5667 | m_afState = nextState; | |
5668 | } | |
5669 | ||
5670 | void ExynosCameraHWInterface2::SetAfStateForService(int newState) | |
5671 | { | |
8e2c2fdb SK |
5672 | if (m_serviceAfState != newState || newState == 0) |
5673 | m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie); | |
0f26b20f | 5674 | m_serviceAfState = newState; |
0f26b20f SK |
5675 | } |
5676 | ||
5677 | int ExynosCameraHWInterface2::GetAfStateForService() | |
5678 | { | |
5679 | return m_serviceAfState; | |
5680 | } | |
5681 | ||
5682 | void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode) | |
5683 | { | |
5684 | if (m_afMode != afMode) { | |
ca714238 | 5685 | if (m_IsAfModeUpdateRequired && m_afMode != AA_AFMODE_OFF) { |
0f26b20f SK |
5686 | m_afMode2 = afMode; |
5687 | ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode); | |
5688 | } | |
5689 | else { | |
5690 | ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode); | |
5691 | m_IsAfModeUpdateRequired = true; | |
5692 | m_afMode = afMode; | |
c48f0170 SK |
5693 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); |
5694 | m_afState = HAL_AFSTATE_INACTIVE; | |
8e2c2fdb | 5695 | } |
0f26b20f SK |
5696 | } |
5697 | } | |
5698 | ||
54f4971e SK |
5699 | void ExynosCameraHWInterface2::m_setExifFixedAttribute(void) |
5700 | { | |
5701 | char property[PROPERTY_VALUE_MAX]; | |
5702 | ||
5703 | //2 0th IFD TIFF Tags | |
5704 | //3 Maker | |
5705 | property_get("ro.product.brand", property, EXIF_DEF_MAKER); | |
5706 | strncpy((char *)mExifInfo.maker, property, | |
5707 | sizeof(mExifInfo.maker) - 1); | |
5708 | mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0'; | |
5709 | //3 Model | |
5710 | property_get("ro.product.model", property, EXIF_DEF_MODEL); | |
5711 | strncpy((char *)mExifInfo.model, property, | |
5712 | sizeof(mExifInfo.model) - 1); | |
5713 | mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0'; | |
5714 | //3 Software | |
5715 | property_get("ro.build.id", property, EXIF_DEF_SOFTWARE); | |
5716 | strncpy((char *)mExifInfo.software, property, | |
5717 | sizeof(mExifInfo.software) - 1); | |
5718 | mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0'; | |
5719 | ||
5720 | //3 YCbCr Positioning | |
5721 | mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING; | |
5722 | ||
5723 | //2 0th IFD Exif Private Tags | |
5724 | //3 F Number | |
e00f6591 | 5725 | mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN); |
54f4971e SK |
5726 | mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN; |
5727 | //3 Exposure Program | |
5728 | mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM; | |
5729 | //3 Exif Version | |
5730 | memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version)); | |
5731 | //3 Aperture | |
e00f6591 SK |
5732 | double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den); |
5733 | mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN); | |
54f4971e SK |
5734 | mExifInfo.aperture.den = EXIF_DEF_APEX_DEN; |
5735 | //3 Maximum lens aperture | |
5736 | mExifInfo.max_aperture.num = mExifInfo.aperture.num; | |
5737 | mExifInfo.max_aperture.den = mExifInfo.aperture.den; | |
5738 | //3 Lens Focal Length | |
e00f6591 SK |
5739 | mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100); |
5740 | ||
54f4971e SK |
5741 | mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN; |
5742 | //3 User Comments | |
5743 | strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS); | |
5744 | //3 Color Space information | |
5745 | mExifInfo.color_space = EXIF_DEF_COLOR_SPACE; | |
5746 | //3 Exposure Mode | |
5747 | mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE; | |
5748 | ||
5749 | //2 0th IFD GPS Info Tags | |
5750 | unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 }; | |
5751 | memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version)); | |
5752 | ||
5753 | //2 1th IFD TIFF Tags | |
5754 | mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION; | |
5755 | mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM; | |
5756 | mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN; | |
5757 | mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM; | |
5758 | mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN; | |
5759 | mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT; | |
5760 | } | |
5761 | ||
5762 | void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect, | |
1422aff9 | 5763 | camera2_shot_ext *currentEntry) |
54f4971e | 5764 | { |
48728d49 SK |
5765 | camera2_dm *dm = &(currentEntry->shot.dm); |
5766 | camera2_ctl *ctl = &(currentEntry->shot.ctl); | |
54f4971e SK |
5767 | |
5768 | ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue ); | |
5769 | if (!ctl->request.frameCount) | |
5770 | return; | |
5771 | //2 0th IFD TIFF Tags | |
5772 | //3 Width | |
5773 | exifInfo->width = rect->w; | |
5774 | //3 Height | |
5775 | exifInfo->height = rect->h; | |
5776 | //3 Orientation | |
5777 | switch (ctl->jpeg.orientation) { | |
5778 | case 90: | |
5779 | exifInfo->orientation = EXIF_ORIENTATION_90; | |
5780 | break; | |
5781 | case 180: | |
5782 | exifInfo->orientation = EXIF_ORIENTATION_180; | |
5783 | break; | |
5784 | case 270: | |
5785 | exifInfo->orientation = EXIF_ORIENTATION_270; | |
5786 | break; | |
5787 | case 0: | |
5788 | default: | |
5789 | exifInfo->orientation = EXIF_ORIENTATION_UP; | |
5790 | break; | |
5791 | } | |
5792 | ||
5793 | //3 Date time | |
be8daa96 | 5794 | struct timeval rawtime; |
54f4971e | 5795 | struct tm *timeinfo; |
be8daa96 MS |
5796 | gettimeofday(&rawtime, NULL); |
5797 | timeinfo = localtime(&rawtime.tv_sec); | |
54f4971e | 5798 | strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo); |
be8daa96 MS |
5799 | snprintf((char *)exifInfo->sub_sec, sizeof(exifInfo->sub_sec), "%03lu", |
5800 | (unsigned long)rawtime.tv_usec / 1000UL); | |
54f4971e SK |
5801 | |
5802 | //2 0th IFD Exif Private Tags | |
5803 | //3 Exposure Time | |
5804 | int shutterSpeed = (dm->sensor.exposureTime/1000); | |
5805 | ||
9a77d67e SK |
5806 | // To display exposure time just above 500ms as 1/2sec, not 1 sec. |
5807 | if (shutterSpeed > 500000) | |
5808 | shutterSpeed -= 100000; | |
5809 | ||
54f4971e SK |
5810 | if (shutterSpeed < 0) { |
5811 | shutterSpeed = 100; | |
5812 | } | |
5813 | ||
5814 | exifInfo->exposure_time.num = 1; | |
5815 | // x us -> 1/x s */ | |
5816 | //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed); | |
5817 | exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed); | |
5818 | ||
5819 | //3 ISO Speed Rating | |
5820 | exifInfo->iso_speed_rating = dm->aa.isoValue; | |
5821 | ||
5822 | uint32_t av, tv, bv, sv, ev; | |
5823 | av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den); | |
5824 | tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den); | |
5825 | sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating); | |
5826 | bv = av + tv - sv; | |
5827 | ev = av + tv; | |
5828 | //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating); | |
c06b3290 | 5829 | ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv); |
54f4971e SK |
5830 | |
5831 | //3 Shutter Speed | |
5832 | exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN; | |
5833 | exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN; | |
5834 | //3 Brightness | |
5835 | exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN; | |
5836 | exifInfo->brightness.den = EXIF_DEF_APEX_DEN; | |
5837 | //3 Exposure Bias | |
5838 | if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH|| | |
5839 | ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) { | |
5840 | exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN; | |
5841 | exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN; | |
5842 | } else { | |
5843 | exifInfo->exposure_bias.num = 0; | |
5844 | exifInfo->exposure_bias.den = 0; | |
5845 | } | |
5846 | //3 Metering Mode | |
5847 | /*switch (m_curCameraInfo->metering) { | |
5848 | case METERING_MODE_CENTER: | |
5849 | exifInfo->metering_mode = EXIF_METERING_CENTER; | |
5850 | break; | |
5851 | case METERING_MODE_MATRIX: | |
5852 | exifInfo->metering_mode = EXIF_METERING_MULTISPOT; | |
5853 | break; | |
5854 | case METERING_MODE_SPOT: | |
5855 | exifInfo->metering_mode = EXIF_METERING_SPOT; | |
5856 | break; | |
5857 | case METERING_MODE_AVERAGE: | |
5858 | default: | |
5859 | exifInfo->metering_mode = EXIF_METERING_AVERAGE; | |
5860 | break; | |
5861 | }*/ | |
5862 | exifInfo->metering_mode = EXIF_METERING_CENTER; | |
5863 | ||
5864 | //3 Flash | |
9257e29e YJ |
5865 | if (m_ctlInfo.flash.m_flashDecisionResult) |
5866 | exifInfo->flash = 1; | |
5867 | else | |
5868 | exifInfo->flash = EXIF_DEF_FLASH; | |
54f4971e SK |
5869 | |
5870 | //3 White Balance | |
48728d49 | 5871 | if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO) |
54f4971e SK |
5872 | exifInfo->white_balance = EXIF_WB_AUTO; |
5873 | else | |
5874 | exifInfo->white_balance = EXIF_WB_MANUAL; | |
5875 | ||
5876 | //3 Scene Capture Type | |
5877 | switch (ctl->aa.sceneMode) { | |
5878 | case AA_SCENE_MODE_PORTRAIT: | |
5879 | exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; | |
5880 | break; | |
5881 | case AA_SCENE_MODE_LANDSCAPE: | |
5882 | exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE; | |
5883 | break; | |
5884 | case AA_SCENE_MODE_NIGHT_PORTRAIT: | |
5885 | exifInfo->scene_capture_type = EXIF_SCENE_NIGHT; | |
5886 | break; | |
5887 | default: | |
5888 | exifInfo->scene_capture_type = EXIF_SCENE_STANDARD; | |
5889 | break; | |
5890 | } | |
5891 | ||
5892 | //2 0th IFD GPS Info Tags | |
5893 | if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) { | |
5894 | ||
5895 | if (ctl->jpeg.gpsCoordinates[0] > 0) | |
5896 | strcpy((char *)exifInfo->gps_latitude_ref, "N"); | |
5897 | else | |
5898 | strcpy((char *)exifInfo->gps_latitude_ref, "S"); | |
5899 | ||
5900 | if (ctl->jpeg.gpsCoordinates[1] > 0) | |
5901 | strcpy((char *)exifInfo->gps_longitude_ref, "E"); | |
5902 | else | |
5903 | strcpy((char *)exifInfo->gps_longitude_ref, "W"); | |
5904 | ||
5905 | if (ctl->jpeg.gpsCoordinates[2] > 0) | |
5906 | exifInfo->gps_altitude_ref = 0; | |
5907 | else | |
5908 | exifInfo->gps_altitude_ref = 1; | |
5909 | ||
0066b2cb SK |
5910 | double latitude = fabs(ctl->jpeg.gpsCoordinates[0]); |
5911 | double longitude = fabs(ctl->jpeg.gpsCoordinates[1]); | |
5912 | double altitude = fabs(ctl->jpeg.gpsCoordinates[2]); | |
54f4971e SK |
5913 | |
5914 | exifInfo->gps_latitude[0].num = (uint32_t)latitude; | |
5915 | exifInfo->gps_latitude[0].den = 1; | |
5916 | exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60); | |
5917 | exifInfo->gps_latitude[1].den = 1; | |
3db6fe61 | 5918 | exifInfo->gps_latitude[2].num = (uint32_t)round((((latitude - exifInfo->gps_latitude[0].num) * 60) |
54f4971e SK |
5919 | - exifInfo->gps_latitude[1].num) * 60); |
5920 | exifInfo->gps_latitude[2].den = 1; | |
5921 | ||
5922 | exifInfo->gps_longitude[0].num = (uint32_t)longitude; | |
5923 | exifInfo->gps_longitude[0].den = 1; | |
5924 | exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60); | |
5925 | exifInfo->gps_longitude[1].den = 1; | |
3db6fe61 | 5926 | exifInfo->gps_longitude[2].num = (uint32_t)round((((longitude - exifInfo->gps_longitude[0].num) * 60) |
54f4971e SK |
5927 | - exifInfo->gps_longitude[1].num) * 60); |
5928 | exifInfo->gps_longitude[2].den = 1; | |
5929 | ||
3db6fe61 | 5930 | exifInfo->gps_altitude.num = (uint32_t)round(altitude); |
54f4971e SK |
5931 | exifInfo->gps_altitude.den = 1; |
5932 | ||
5933 | struct tm tm_data; | |
5934 | long timestamp; | |
5935 | timestamp = (long)ctl->jpeg.gpsTimestamp; | |
5936 | gmtime_r(×tamp, &tm_data); | |
5937 | exifInfo->gps_timestamp[0].num = tm_data.tm_hour; | |
5938 | exifInfo->gps_timestamp[0].den = 1; | |
5939 | exifInfo->gps_timestamp[1].num = tm_data.tm_min; | |
5940 | exifInfo->gps_timestamp[1].den = 1; | |
5941 | exifInfo->gps_timestamp[2].num = tm_data.tm_sec; | |
5942 | exifInfo->gps_timestamp[2].den = 1; | |
5943 | snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp), | |
5944 | "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday); | |
5945 | ||
07ed0359 | 5946 | memset(exifInfo->gps_processing_method, 0, 100); |
0066b2cb | 5947 | memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32); |
54f4971e SK |
5948 | exifInfo->enableGps = true; |
5949 | } else { | |
5950 | exifInfo->enableGps = false; | |
5951 | } | |
5952 | ||
5953 | //2 1th IFD TIFF Tags | |
5954 | exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0]; | |
5955 | exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1]; | |
5956 | } | |
5957 | ||
13d8c7b4 SK |
5958 | ExynosCameraHWInterface2::MainThread::~MainThread() |
5959 | { | |
ad37861e | 5960 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5961 | } |
5962 | ||
5963 | void ExynosCameraHWInterface2::MainThread::release() | |
5964 | { | |
ad37861e | 5965 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5966 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5967 | } |
5968 | ||
5969 | ExynosCameraHWInterface2::SensorThread::~SensorThread() | |
5970 | { | |
ad37861e | 5971 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5972 | } |
5973 | ||
5974 | void ExynosCameraHWInterface2::SensorThread::release() | |
5975 | { | |
ad37861e | 5976 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5977 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5978 | } |
5979 | ||
13d8c7b4 SK |
5980 | ExynosCameraHWInterface2::StreamThread::~StreamThread() |
5981 | { | |
ad37861e | 5982 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5983 | } |
5984 | ||
5985 | void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters) | |
5986 | { | |
5987 | ALOGV("DEBUG(%s):", __FUNCTION__); | |
5506cebf | 5988 | memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t)); |
c15a6b00 JS |
5989 | } |
5990 | ||
13d8c7b4 | 5991 | void ExynosCameraHWInterface2::StreamThread::release() |
c15a6b00 | 5992 | { |
9dd63e1f | 5993 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5994 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5995 | } |
5996 | ||
5997 | int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr) | |
5998 | { | |
5999 | int index; | |
6000 | for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) { | |
6001 | if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr) | |
6002 | return index; | |
6003 | } | |
6004 | return -1; | |
c15a6b00 JS |
6005 | } |
6006 | ||
5506cebf SK |
6007 | int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle) |
6008 | { | |
6009 | int index; | |
6010 | for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) { | |
6011 | if (m_parameters.svcBufHandle[index] == *bufHandle) | |
6012 | return index; | |
6013 | } | |
6014 | return -1; | |
6015 | } | |
6016 | ||
6017 | status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority) | |
9dd63e1f | 6018 | { |
5506cebf SK |
6019 | ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id); |
6020 | int index, vacantIndex; | |
6021 | bool vacancy = false; | |
6022 | ||
6023 | for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) { | |
6024 | if (!vacancy && m_attachedSubStreams[index].streamId == -1) { | |
6025 | vacancy = true; | |
6026 | vacantIndex = index; | |
6027 | } else if (m_attachedSubStreams[index].streamId == stream_id) { | |
6028 | return BAD_VALUE; | |
6029 | } | |
6030 | } | |
6031 | if (!vacancy) | |
6032 | return NO_MEMORY; | |
6033 | m_attachedSubStreams[vacantIndex].streamId = stream_id; | |
6034 | m_attachedSubStreams[vacantIndex].priority = priority; | |
6035 | m_numRegisteredStream++; | |
6036 | return NO_ERROR; | |
9dd63e1f SK |
6037 | } |
6038 | ||
5506cebf | 6039 | status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id) |
74d78ebe | 6040 | { |
5506cebf SK |
6041 | ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id); |
6042 | int index; | |
6043 | bool found = false; | |
6044 | ||
6045 | for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) { | |
6046 | if (m_attachedSubStreams[index].streamId == stream_id) { | |
6047 | found = true; | |
6048 | break; | |
6049 | } | |
6050 | } | |
6051 | if (!found) | |
6052 | return BAD_VALUE; | |
6053 | m_attachedSubStreams[index].streamId = -1; | |
6054 | m_attachedSubStreams[index].priority = 0; | |
6055 | m_numRegisteredStream--; | |
6056 | return NO_ERROR; | |
74d78ebe SK |
6057 | } |
6058 | ||
c15a6b00 JS |
6059 | int ExynosCameraHWInterface2::createIonClient(ion_client ionClient) |
6060 | { | |
6061 | if (ionClient == 0) { | |
6062 | ionClient = ion_client_create(); | |
6063 | if (ionClient < 0) { | |
13d8c7b4 | 6064 | ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient); |
c15a6b00 JS |
6065 | return 0; |
6066 | } | |
6067 | } | |
c15a6b00 JS |
6068 | return ionClient; |
6069 | } | |
6070 | ||
6071 | int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient) | |
6072 | { | |
6073 | if (ionClient != 0) { | |
6074 | if (ionClient > 0) { | |
6075 | ion_client_destroy(ionClient); | |
6076 | } | |
6077 | ionClient = 0; | |
6078 | } | |
c15a6b00 JS |
6079 | return ionClient; |
6080 | } | |
6081 | ||
13d8c7b4 | 6082 | int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum) |
181e425e SK |
6083 | { |
6084 | return allocCameraMemory(ionClient, buf, iMemoryNum, 0); | |
6085 | } | |
6086 | ||
6087 | int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag) | |
c15a6b00 JS |
6088 | { |
6089 | int ret = 0; | |
6090 | int i = 0; | |
181e425e | 6091 | int flag = 0; |
c15a6b00 JS |
6092 | |
6093 | if (ionClient == 0) { | |
13d8c7b4 | 6094 | ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient); |
c15a6b00 JS |
6095 | return -1; |
6096 | } | |
6097 | ||
181e425e | 6098 | for (i = 0 ; i < iMemoryNum ; i++) { |
13d8c7b4 | 6099 | if (buf->size.extS[i] == 0) { |
c15a6b00 JS |
6100 | break; |
6101 | } | |
181e425e | 6102 | if (1 << i & cacheFlag) |
88c9cff4 | 6103 | flag = ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC; |
181e425e SK |
6104 | else |
6105 | flag = 0; | |
13d8c7b4 | 6106 | buf->fd.extFd[i] = ion_alloc(ionClient, \ |
601acb66 | 6107 | buf->size.extS[i], 0, ION_HEAP_SYSTEM_MASK, flag); |
13d8c7b4 SK |
6108 | if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) { |
6109 | ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]); | |
6110 | buf->fd.extFd[i] = -1; | |
c15a6b00 JS |
6111 | freeCameraMemory(buf, iMemoryNum); |
6112 | return -1; | |
6113 | } | |
6114 | ||
13d8c7b4 SK |
6115 | buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \ |
6116 | buf->size.extS[i], 0); | |
6117 | if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) { | |
6118 | ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]); | |
6119 | buf->virt.extP[i] = (char *)MAP_FAILED; | |
c15a6b00 JS |
6120 | freeCameraMemory(buf, iMemoryNum); |
6121 | return -1; | |
6122 | } | |
181e425e | 6123 | ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag); |
c15a6b00 JS |
6124 | } |
6125 | ||
6126 | return ret; | |
6127 | } | |
6128 | ||
13d8c7b4 | 6129 | void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum) |
c15a6b00 | 6130 | { |
13d8c7b4 | 6131 | |
5506cebf | 6132 | int i = 0 ; |
6d8e5b08 | 6133 | int ret = 0; |
c15a6b00 JS |
6134 | |
6135 | for (i=0;i<iMemoryNum;i++) { | |
13d8c7b4 SK |
6136 | if (buf->fd.extFd[i] != -1) { |
6137 | if (buf->virt.extP[i] != (char *)MAP_FAILED) { | |
6d8e5b08 SK |
6138 | ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]); |
6139 | if (ret < 0) | |
6140 | ALOGE("ERR(%s)", __FUNCTION__); | |
c15a6b00 | 6141 | } |
13d8c7b4 | 6142 | ion_free(buf->fd.extFd[i]); |
90e439c1 | 6143 | ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]); |
c15a6b00 | 6144 | } |
13d8c7b4 SK |
6145 | buf->fd.extFd[i] = -1; |
6146 | buf->virt.extP[i] = (char *)MAP_FAILED; | |
6147 | buf->size.extS[i] = 0; | |
c15a6b00 JS |
6148 | } |
6149 | } | |
6150 | ||
13d8c7b4 | 6151 | void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum) |
c15a6b00 JS |
6152 | { |
6153 | int i =0 ; | |
6154 | for (i=0;i<iMemoryNum;i++) { | |
13d8c7b4 SK |
6155 | buf->virt.extP[i] = (char *)MAP_FAILED; |
6156 | buf->fd.extFd[i] = -1; | |
6157 | buf->size.extS[i] = 0; | |
c15a6b00 JS |
6158 | } |
6159 | } | |
6160 | ||
6161 | ||
13d8c7b4 SK |
6162 | |
6163 | ||
9dd63e1f | 6164 | static camera2_device_t *g_cam2_device = NULL; |
b5237e6b | 6165 | static bool g_camera_vaild = false; |
e43660b0 | 6166 | static Mutex g_camera_mutex; |
daa1fcd6 | 6167 | ExynosCamera2 * g_camera2[2] = { NULL, NULL }; |
c15a6b00 JS |
6168 | |
6169 | static int HAL2_camera_device_close(struct hw_device_t* device) | |
6170 | { | |
e43660b0 | 6171 | Mutex::Autolock lock(g_camera_mutex); |
ed4ad5fe | 6172 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 6173 | if (device) { |
9dd63e1f | 6174 | |
c15a6b00 | 6175 | camera2_device_t *cam_device = (camera2_device_t *)device; |
ad37861e SK |
6176 | ALOGV("cam_device(0x%08x):", (unsigned int)cam_device); |
6177 | ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device); | |
c15a6b00 JS |
6178 | delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv); |
6179 | free(cam_device); | |
b5237e6b | 6180 | g_camera_vaild = false; |
053d38cf | 6181 | g_cam2_device = NULL; |
c15a6b00 | 6182 | } |
6d8e5b08 | 6183 | |
ed4ad5fe | 6184 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
6185 | return 0; |
6186 | } | |
6187 | ||
6188 | static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev) | |
6189 | { | |
6190 | return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv); | |
6191 | } | |
6192 | ||
6193 | static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev, | |
6194 | const camera2_request_queue_src_ops_t *request_src_ops) | |
6195 | { | |
13d8c7b4 | 6196 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6197 | return obj(dev)->setRequestQueueSrcOps(request_src_ops); |
6198 | } | |
6199 | ||
6200 | static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev) | |
6201 | { | |
13d8c7b4 | 6202 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6203 | return obj(dev)->notifyRequestQueueNotEmpty(); |
6204 | } | |
6205 | ||
6206 | static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev, | |
6207 | const camera2_frame_queue_dst_ops_t *frame_dst_ops) | |
6208 | { | |
13d8c7b4 | 6209 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6210 | return obj(dev)->setFrameQueueDstOps(frame_dst_ops); |
6211 | } | |
6212 | ||
6213 | static int HAL2_device_get_in_progress_count(const struct camera2_device *dev) | |
6214 | { | |
13d8c7b4 | 6215 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6216 | return obj(dev)->getInProgressCount(); |
6217 | } | |
6218 | ||
6219 | static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev) | |
6220 | { | |
13d8c7b4 | 6221 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6222 | return obj(dev)->flushCapturesInProgress(); |
6223 | } | |
6224 | ||
6225 | static int HAL2_device_construct_default_request(const struct camera2_device *dev, | |
6226 | int request_template, camera_metadata_t **request) | |
6227 | { | |
13d8c7b4 | 6228 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6229 | return obj(dev)->constructDefaultRequest(request_template, request); |
6230 | } | |
6231 | ||
6232 | static int HAL2_device_allocate_stream( | |
6233 | const struct camera2_device *dev, | |
6234 | // inputs | |
6235 | uint32_t width, | |
6236 | uint32_t height, | |
6237 | int format, | |
6238 | const camera2_stream_ops_t *stream_ops, | |
6239 | // outputs | |
6240 | uint32_t *stream_id, | |
6241 | uint32_t *format_actual, | |
6242 | uint32_t *usage, | |
6243 | uint32_t *max_buffers) | |
6244 | { | |
9dd63e1f | 6245 | ALOGV("(%s): ", __FUNCTION__); |
c15a6b00 JS |
6246 | return obj(dev)->allocateStream(width, height, format, stream_ops, |
6247 | stream_id, format_actual, usage, max_buffers); | |
6248 | } | |
6249 | ||
c15a6b00 JS |
6250 | static int HAL2_device_register_stream_buffers(const struct camera2_device *dev, |
6251 | uint32_t stream_id, | |
6252 | int num_buffers, | |
6253 | buffer_handle_t *buffers) | |
6254 | { | |
13d8c7b4 | 6255 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6256 | return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers); |
6257 | } | |
6258 | ||
6259 | static int HAL2_device_release_stream( | |
6260 | const struct camera2_device *dev, | |
6261 | uint32_t stream_id) | |
6262 | { | |
ad37861e | 6263 | ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id); |
b5237e6b SK |
6264 | if (!g_camera_vaild) |
6265 | return 0; | |
c15a6b00 JS |
6266 | return obj(dev)->releaseStream(stream_id); |
6267 | } | |
6268 | ||
6269 | static int HAL2_device_allocate_reprocess_stream( | |
6270 | const struct camera2_device *dev, | |
6271 | uint32_t width, | |
6272 | uint32_t height, | |
6273 | uint32_t format, | |
6274 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
6275 | // outputs | |
6276 | uint32_t *stream_id, | |
6277 | uint32_t *consumer_usage, | |
6278 | uint32_t *max_buffers) | |
6279 | { | |
13d8c7b4 | 6280 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6281 | return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops, |
6282 | stream_id, consumer_usage, max_buffers); | |
6283 | } | |
6284 | ||
2b0421d1 EVT |
6285 | static int HAL2_device_allocate_reprocess_stream_from_stream( |
6286 | const struct camera2_device *dev, | |
6287 | uint32_t output_stream_id, | |
6288 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
6289 | // outputs | |
6290 | uint32_t *stream_id) | |
6291 | { | |
6292 | ALOGV("DEBUG(%s):", __FUNCTION__); | |
5506cebf SK |
6293 | return obj(dev)->allocateReprocessStreamFromStream(output_stream_id, |
6294 | reprocess_stream_ops, stream_id); | |
2b0421d1 EVT |
6295 | } |
6296 | ||
c15a6b00 JS |
6297 | static int HAL2_device_release_reprocess_stream( |
6298 | const struct camera2_device *dev, | |
6299 | uint32_t stream_id) | |
6300 | { | |
13d8c7b4 | 6301 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6302 | return obj(dev)->releaseReprocessStream(stream_id); |
6303 | } | |
6304 | ||
6305 | static int HAL2_device_trigger_action(const struct camera2_device *dev, | |
6306 | uint32_t trigger_id, | |
6307 | int ext1, | |
6308 | int ext2) | |
6309 | { | |
13d8c7b4 | 6310 | ALOGV("DEBUG(%s):", __FUNCTION__); |
b8d41ae2 SK |
6311 | if (!g_camera_vaild) |
6312 | return 0; | |
c15a6b00 JS |
6313 | return obj(dev)->triggerAction(trigger_id, ext1, ext2); |
6314 | } | |
6315 | ||
6316 | static int HAL2_device_set_notify_callback(const struct camera2_device *dev, | |
6317 | camera2_notify_callback notify_cb, | |
6318 | void *user) | |
6319 | { | |
13d8c7b4 | 6320 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6321 | return obj(dev)->setNotifyCallback(notify_cb, user); |
6322 | } | |
6323 | ||
6324 | static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev, | |
6325 | vendor_tag_query_ops_t **ops) | |
6326 | { | |
13d8c7b4 | 6327 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6328 | return obj(dev)->getMetadataVendorTagOps(ops); |
6329 | } | |
6330 | ||
6331 | static int HAL2_device_dump(const struct camera2_device *dev, int fd) | |
6332 | { | |
13d8c7b4 | 6333 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6334 | return obj(dev)->dump(fd); |
6335 | } | |
6336 | ||
6337 | ||
6338 | ||
6339 | ||
6340 | ||
6341 | static int HAL2_getNumberOfCameras() | |
6342 | { | |
9dd63e1f SK |
6343 | ALOGV("(%s): returning 2", __FUNCTION__); |
6344 | return 2; | |
c15a6b00 JS |
6345 | } |
6346 | ||
6347 | ||
c15a6b00 JS |
6348 | static int HAL2_getCameraInfo(int cameraId, struct camera_info *info) |
6349 | { | |
ad37861e | 6350 | ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId); |
9dd63e1f | 6351 | static camera_metadata_t * mCameraInfo[2] = {NULL, NULL}; |
ad37861e | 6352 | |
c15a6b00 | 6353 | status_t res; |
13d8c7b4 | 6354 | |
daa1fcd6 | 6355 | if (cameraId == 0) { |
9dd63e1f | 6356 | info->facing = CAMERA_FACING_BACK; |
daa1fcd6 SK |
6357 | if (!g_camera2[0]) |
6358 | g_camera2[0] = new ExynosCamera2(0); | |
6359 | } | |
6360 | else if (cameraId == 1) { | |
9dd63e1f | 6361 | info->facing = CAMERA_FACING_FRONT; |
daa1fcd6 SK |
6362 | if (!g_camera2[1]) |
6363 | g_camera2[1] = new ExynosCamera2(1); | |
6364 | } | |
6365 | else | |
6366 | return BAD_VALUE; | |
6367 | ||
c15a6b00 JS |
6368 | info->orientation = 0; |
6369 | info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0); | |
9dd63e1f | 6370 | if (mCameraInfo[cameraId] == NULL) { |
daa1fcd6 | 6371 | res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true); |
c15a6b00 JS |
6372 | if (res != OK) { |
6373 | ALOGE("%s: Unable to allocate static info: %s (%d)", | |
13d8c7b4 | 6374 | __FUNCTION__, strerror(-res), res); |
c15a6b00 JS |
6375 | return res; |
6376 | } | |
daa1fcd6 | 6377 | res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false); |
c15a6b00 JS |
6378 | if (res != OK) { |
6379 | ALOGE("%s: Unable to fill in static info: %s (%d)", | |
13d8c7b4 | 6380 | __FUNCTION__, strerror(-res), res); |
c15a6b00 JS |
6381 | return res; |
6382 | } | |
6383 | } | |
9dd63e1f | 6384 | info->static_camera_characteristics = mCameraInfo[cameraId]; |
13d8c7b4 | 6385 | return NO_ERROR; |
c15a6b00 JS |
6386 | } |
6387 | ||
6388 | #define SET_METHOD(m) m : HAL2_device_##m | |
6389 | ||
6390 | static camera2_device_ops_t camera2_device_ops = { | |
6391 | SET_METHOD(set_request_queue_src_ops), | |
6392 | SET_METHOD(notify_request_queue_not_empty), | |
6393 | SET_METHOD(set_frame_queue_dst_ops), | |
6394 | SET_METHOD(get_in_progress_count), | |
6395 | SET_METHOD(flush_captures_in_progress), | |
6396 | SET_METHOD(construct_default_request), | |
6397 | SET_METHOD(allocate_stream), | |
6398 | SET_METHOD(register_stream_buffers), | |
6399 | SET_METHOD(release_stream), | |
6400 | SET_METHOD(allocate_reprocess_stream), | |
2b0421d1 | 6401 | SET_METHOD(allocate_reprocess_stream_from_stream), |
c15a6b00 JS |
6402 | SET_METHOD(release_reprocess_stream), |
6403 | SET_METHOD(trigger_action), | |
6404 | SET_METHOD(set_notify_callback), | |
6405 | SET_METHOD(get_metadata_vendor_tag_ops), | |
6406 | SET_METHOD(dump), | |
1422aff9 | 6407 | get_instance_metadata : NULL |
c15a6b00 JS |
6408 | }; |
6409 | ||
6410 | #undef SET_METHOD | |
6411 | ||
6412 | ||
6413 | static int HAL2_camera_device_open(const struct hw_module_t* module, | |
6414 | const char *id, | |
6415 | struct hw_device_t** device) | |
6416 | { | |
c15a6b00 | 6417 | int cameraId = atoi(id); |
6044e509 | 6418 | int openInvalid = 0; |
9dd63e1f | 6419 | |
e43660b0 AR |
6420 | Mutex::Autolock lock(g_camera_mutex); |
6421 | if (g_camera_vaild) { | |
6422 | ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__); | |
a6302fad | 6423 | return -EUSERS; |
e43660b0 | 6424 | } |
b5237e6b | 6425 | g_camera_vaild = false; |
0eb27a9d | 6426 | ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId); |
c15a6b00 | 6427 | if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) { |
13d8c7b4 | 6428 | ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id); |
c15a6b00 JS |
6429 | return -EINVAL; |
6430 | } | |
6431 | ||
0eb27a9d | 6432 | ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device); |
c15a6b00 JS |
6433 | if (g_cam2_device) { |
6434 | if (obj(g_cam2_device)->getCameraId() == cameraId) { | |
0eb27a9d | 6435 | ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id); |
c15a6b00 JS |
6436 | goto done; |
6437 | } else { | |
0eb27a9d | 6438 | ALOGD("(%s): START waiting for cam device free", __FUNCTION__); |
9dd63e1f | 6439 | while (g_cam2_device) |
041f38de | 6440 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 6441 | ALOGD("(%s): END waiting for cam device free", __FUNCTION__); |
c15a6b00 JS |
6442 | } |
6443 | } | |
6444 | ||
6445 | g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t)); | |
ad37861e | 6446 | ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device); |
9dd63e1f | 6447 | |
c15a6b00 JS |
6448 | if (!g_cam2_device) |
6449 | return -ENOMEM; | |
6450 | ||
6451 | g_cam2_device->common.tag = HARDWARE_DEVICE_TAG; | |
6452 | g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0; | |
6453 | g_cam2_device->common.module = const_cast<hw_module_t *>(module); | |
6454 | g_cam2_device->common.close = HAL2_camera_device_close; | |
6455 | ||
6456 | g_cam2_device->ops = &camera2_device_ops; | |
6457 | ||
13d8c7b4 | 6458 | ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id); |
c15a6b00 | 6459 | |
6044e509 SK |
6460 | g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid); |
6461 | if (!openInvalid) { | |
5506cebf | 6462 | ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__); |
6f19b6cf | 6463 | return -ENODEV; |
6044e509 | 6464 | } |
c15a6b00 JS |
6465 | done: |
6466 | *device = (hw_device_t *)g_cam2_device; | |
13d8c7b4 | 6467 | ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device); |
b5237e6b | 6468 | g_camera_vaild = true; |
c15a6b00 JS |
6469 | |
6470 | return 0; | |
6471 | } | |
6472 | ||
6473 | ||
6474 | static hw_module_methods_t camera_module_methods = { | |
6475 | open : HAL2_camera_device_open | |
6476 | }; | |
6477 | ||
6478 | extern "C" { | |
6479 | struct camera_module HAL_MODULE_INFO_SYM = { | |
6480 | common : { | |
6481 | tag : HARDWARE_MODULE_TAG, | |
6482 | module_api_version : CAMERA_MODULE_API_VERSION_2_0, | |
6483 | hal_api_version : HARDWARE_HAL_API_VERSION, | |
6484 | id : CAMERA_HARDWARE_MODULE_ID, | |
6485 | name : "Exynos Camera HAL2", | |
6486 | author : "Samsung Corporation", | |
6487 | methods : &camera_module_methods, | |
1422aff9 MS |
6488 | dso : NULL, |
6489 | reserved : {0}, | |
c15a6b00 JS |
6490 | }, |
6491 | get_number_of_cameras : HAL2_getNumberOfCameras, | |
1422aff9 MS |
6492 | get_camera_info : HAL2_getCameraInfo, |
6493 | set_callbacks : NULL, | |
6494 | get_vendor_tag_ops : NULL, | |
6495 | open_legacy : NULL, | |
6496 | reserved : {0} | |
c15a6b00 JS |
6497 | }; |
6498 | } | |
6499 | ||
6500 | }; // namespace android |