Commit | Line | Data |
---|---|---|
c15a6b00 JS |
1 | /* |
2 | ** | |
3 | ** Copyright 2008, The Android Open Source Project | |
4 | ** Copyright 2012, Samsung Electronics Co. LTD | |
5 | ** | |
6 | ** Licensed under the Apache License, Version 2.0 (the "License"); | |
7 | ** you may not use this file except in compliance with the License. | |
8 | ** You may obtain a copy of the License at | |
9 | ** | |
10 | ** http://www.apache.org/licenses/LICENSE-2.0 | |
11 | ** | |
12 | ** Unless required by applicable law or agreed to in writing, software | |
13 | ** distributed under the License is distributed on an "AS IS" BASIS, | |
14 | ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
15 | ** See the License for the specific language governing permissions and | |
16 | ** limitations under the License. | |
17 | */ | |
18 | ||
19 | /*! | |
20 | * \file ExynosCameraHWInterface2.cpp | |
21 | * \brief source file for Android Camera API 2.0 HAL | |
22 | * \author Sungjoong Kang(sj3.kang@samsung.com) | |
13d8c7b4 | 23 | * \date 2012/07/10 |
c15a6b00 JS |
24 | * |
25 | * <b>Revision History: </b> | |
26 | * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n | |
27 | * Initial Release | |
13d8c7b4 SK |
28 | * |
29 | * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n | |
30 | * 2nd Release | |
31 | * | |
c15a6b00 JS |
32 | */ |
33 | ||
34 | //#define LOG_NDEBUG 0 | |
9dd63e1f | 35 | #define LOG_TAG "ExynosCameraHAL2" |
be8daa96 | 36 | #include <sys/time.h> |
c15a6b00 | 37 | #include <utils/Log.h> |
3db6fe61 | 38 | #include <math.h> |
c15a6b00 JS |
39 | |
40 | #include "ExynosCameraHWInterface2.h" | |
41 | #include "exynos_format.h" | |
42 | ||
c15a6b00 JS |
43 | namespace android { |
44 | ||
9dd63e1f SK |
45 | void m_savePostView(const char *fname, uint8_t *buf, uint32_t size) |
46 | { | |
47 | int nw; | |
48 | int cnt = 0; | |
49 | uint32_t written = 0; | |
50 | ||
ad37861e | 51 | ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size); |
9dd63e1f SK |
52 | int fd = open(fname, O_RDWR | O_CREAT, 0644); |
53 | if (fd < 0) { | |
54 | ALOGE("failed to create file [%s]: %s", fname, strerror(errno)); | |
55 | return; | |
56 | } | |
57 | ||
ad37861e | 58 | ALOGV("writing %d bytes to file [%s]", size, fname); |
9dd63e1f SK |
59 | while (written < size) { |
60 | nw = ::write(fd, buf + written, size - written); | |
61 | if (nw < 0) { | |
62 | ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno)); | |
63 | break; | |
64 | } | |
65 | written += nw; | |
66 | cnt++; | |
67 | } | |
ad37861e | 68 | ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt); |
9dd63e1f SK |
69 | ::close(fd); |
70 | } | |
71 | ||
c15a6b00 JS |
72 | int get_pixel_depth(uint32_t fmt) |
73 | { | |
74 | int depth = 0; | |
75 | ||
76 | switch (fmt) { | |
77 | case V4L2_PIX_FMT_JPEG: | |
78 | depth = 8; | |
79 | break; | |
80 | ||
81 | case V4L2_PIX_FMT_NV12: | |
82 | case V4L2_PIX_FMT_NV21: | |
83 | case V4L2_PIX_FMT_YUV420: | |
84 | case V4L2_PIX_FMT_YVU420M: | |
85 | case V4L2_PIX_FMT_NV12M: | |
86 | case V4L2_PIX_FMT_NV12MT: | |
87 | depth = 12; | |
88 | break; | |
89 | ||
90 | case V4L2_PIX_FMT_RGB565: | |
91 | case V4L2_PIX_FMT_YUYV: | |
92 | case V4L2_PIX_FMT_YVYU: | |
93 | case V4L2_PIX_FMT_UYVY: | |
94 | case V4L2_PIX_FMT_VYUY: | |
95 | case V4L2_PIX_FMT_NV16: | |
96 | case V4L2_PIX_FMT_NV61: | |
97 | case V4L2_PIX_FMT_YUV422P: | |
98 | case V4L2_PIX_FMT_SBGGR10: | |
99 | case V4L2_PIX_FMT_SBGGR12: | |
100 | case V4L2_PIX_FMT_SBGGR16: | |
101 | depth = 16; | |
102 | break; | |
103 | ||
104 | case V4L2_PIX_FMT_RGB32: | |
105 | depth = 32; | |
106 | break; | |
107 | default: | |
108 | ALOGE("Get depth failed(format : %d)", fmt); | |
109 | break; | |
110 | } | |
111 | ||
112 | return depth; | |
13d8c7b4 | 113 | } |
c15a6b00 JS |
114 | |
115 | int cam_int_s_fmt(node_info_t *node) | |
116 | { | |
117 | struct v4l2_format v4l2_fmt; | |
118 | unsigned int framesize; | |
119 | int ret; | |
120 | ||
121 | memset(&v4l2_fmt, 0, sizeof(struct v4l2_format)); | |
122 | ||
123 | v4l2_fmt.type = node->type; | |
124 | framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8; | |
125 | ||
126 | if (node->planes >= 1) { | |
127 | v4l2_fmt.fmt.pix_mp.width = node->width; | |
128 | v4l2_fmt.fmt.pix_mp.height = node->height; | |
129 | v4l2_fmt.fmt.pix_mp.pixelformat = node->format; | |
130 | v4l2_fmt.fmt.pix_mp.field = V4L2_FIELD_ANY; | |
131 | } else { | |
13d8c7b4 | 132 | ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__); |
c15a6b00 JS |
133 | } |
134 | ||
135 | /* Set up for capture */ | |
136 | ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt); | |
137 | ||
138 | if (ret < 0) | |
13d8c7b4 | 139 | ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret); |
c15a6b00 | 140 | |
be494d19 | 141 | |
c15a6b00 JS |
142 | return ret; |
143 | } | |
144 | ||
145 | int cam_int_reqbufs(node_info_t *node) | |
146 | { | |
147 | struct v4l2_requestbuffers req; | |
148 | int ret; | |
149 | ||
150 | req.count = node->buffers; | |
151 | req.type = node->type; | |
152 | req.memory = node->memory; | |
153 | ||
154 | ret = exynos_v4l2_reqbufs(node->fd, &req); | |
155 | ||
156 | if (ret < 0) | |
13d8c7b4 | 157 | ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret); |
c15a6b00 JS |
158 | |
159 | return req.count; | |
160 | } | |
161 | ||
162 | int cam_int_qbuf(node_info_t *node, int index) | |
163 | { | |
164 | struct v4l2_buffer v4l2_buf; | |
165 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
166 | int i; | |
167 | int ret = 0; | |
168 | ||
169 | v4l2_buf.m.planes = planes; | |
170 | v4l2_buf.type = node->type; | |
171 | v4l2_buf.memory = node->memory; | |
172 | v4l2_buf.index = index; | |
173 | v4l2_buf.length = node->planes; | |
174 | ||
175 | for(i = 0; i < node->planes; i++){ | |
13d8c7b4 SK |
176 | v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]); |
177 | v4l2_buf.m.planes[i].length = (unsigned long)(node->buffer[index].size.extS[i]); | |
c15a6b00 JS |
178 | } |
179 | ||
180 | ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf); | |
181 | ||
182 | if (ret < 0) | |
13d8c7b4 | 183 | ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret); |
c15a6b00 JS |
184 | |
185 | return ret; | |
186 | } | |
187 | ||
188 | int cam_int_streamon(node_info_t *node) | |
189 | { | |
190 | enum v4l2_buf_type type = node->type; | |
191 | int ret; | |
192 | ||
be494d19 | 193 | |
c15a6b00 JS |
194 | ret = exynos_v4l2_streamon(node->fd, type); |
195 | ||
196 | if (ret < 0) | |
ad37861e | 197 | ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret); |
c15a6b00 JS |
198 | |
199 | ALOGV("On streaming I/O... ... fd(%d)", node->fd); | |
200 | ||
201 | return ret; | |
202 | } | |
203 | ||
13d8c7b4 SK |
204 | int cam_int_streamoff(node_info_t *node) |
205 | { | |
ad37861e SK |
206 | enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
207 | int ret; | |
13d8c7b4 | 208 | |
be494d19 | 209 | |
ad37861e SK |
210 | ALOGV("Off streaming I/O... fd(%d)", node->fd); |
211 | ret = exynos_v4l2_streamoff(node->fd, type); | |
13d8c7b4 SK |
212 | |
213 | if (ret < 0) | |
214 | ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret); | |
215 | ||
ad37861e | 216 | return ret; |
13d8c7b4 SK |
217 | } |
218 | ||
9dd63e1f SK |
219 | int isp_int_streamoff(node_info_t *node) |
220 | { | |
ad37861e SK |
221 | enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
222 | int ret; | |
9dd63e1f | 223 | |
ad37861e SK |
224 | ALOGV("Off streaming I/O... fd(%d)", node->fd); |
225 | ret = exynos_v4l2_streamoff(node->fd, type); | |
9dd63e1f SK |
226 | |
227 | if (ret < 0) | |
228 | ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret); | |
229 | ||
ad37861e | 230 | return ret; |
9dd63e1f SK |
231 | } |
232 | ||
c15a6b00 JS |
233 | int cam_int_dqbuf(node_info_t *node) |
234 | { | |
235 | struct v4l2_buffer v4l2_buf; | |
236 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
237 | int ret; | |
238 | ||
239 | v4l2_buf.type = node->type; | |
240 | v4l2_buf.memory = node->memory; | |
241 | v4l2_buf.m.planes = planes; | |
242 | v4l2_buf.length = node->planes; | |
243 | ||
244 | ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf); | |
245 | if (ret < 0) | |
13d8c7b4 | 246 | ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret); |
c15a6b00 JS |
247 | |
248 | return v4l2_buf.index; | |
249 | } | |
250 | ||
feb7df4c SK |
251 | int cam_int_dqbuf(node_info_t *node, int num_plane) |
252 | { | |
253 | struct v4l2_buffer v4l2_buf; | |
254 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
255 | int ret; | |
256 | ||
257 | v4l2_buf.type = node->type; | |
258 | v4l2_buf.memory = node->memory; | |
259 | v4l2_buf.m.planes = planes; | |
260 | v4l2_buf.length = num_plane; | |
261 | ||
262 | ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf); | |
263 | if (ret < 0) | |
264 | ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret); | |
265 | ||
266 | return v4l2_buf.index; | |
267 | } | |
268 | ||
c15a6b00 JS |
269 | int cam_int_s_input(node_info_t *node, int index) |
270 | { | |
271 | int ret; | |
13d8c7b4 | 272 | |
c15a6b00 JS |
273 | ret = exynos_v4l2_s_input(node->fd, index); |
274 | if (ret < 0) | |
13d8c7b4 | 275 | ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret); |
c15a6b00 JS |
276 | |
277 | return ret; | |
278 | } | |
279 | ||
280 | ||
281 | gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal; | |
282 | ||
283 | RequestManager::RequestManager(SignalDrivenThread* main_thread): | |
a3bcc37b | 284 | m_vdisEnable(false), |
1422aff9 | 285 | m_lastCompletedFrameCnt(-1), |
2bdec060 SK |
286 | m_lastAeMode(0), |
287 | m_lastAaMode(0), | |
288 | m_lastAwbMode(0), | |
289 | m_lastAeComp(0), | |
1422aff9 | 290 | m_vdisBubbleEn(false) |
c15a6b00 JS |
291 | { |
292 | m_metadataConverter = new MetadataConverter; | |
293 | m_mainThread = main_thread; | |
2adfa429 | 294 | ResetEntry(); |
ad37861e | 295 | m_sensorPipelineSkipCnt = 0; |
c15a6b00 JS |
296 | return; |
297 | } | |
298 | ||
299 | RequestManager::~RequestManager() | |
300 | { | |
6d8e5b08 SK |
301 | ALOGV("%s", __FUNCTION__); |
302 | if (m_metadataConverter != NULL) { | |
303 | delete m_metadataConverter; | |
304 | m_metadataConverter = NULL; | |
305 | } | |
306 | ||
52f54308 | 307 | releaseSensorQ(); |
c15a6b00 JS |
308 | return; |
309 | } | |
310 | ||
2adfa429 JS |
311 | void RequestManager::ResetEntry() |
312 | { | |
313 | Mutex::Autolock lock(m_requestMutex); | |
1264ab16 | 314 | Mutex::Autolock lock2(m_numOfEntriesLock); |
2adfa429 JS |
315 | for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) { |
316 | memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t)); | |
317 | entries[i].internal_shot.shot.ctl.request.frameCount = -1; | |
318 | } | |
319 | m_numOfEntries = 0; | |
320 | m_entryInsertionIndex = -1; | |
321 | m_entryProcessingIndex = -1; | |
322 | m_entryFrameOutputIndex = -1; | |
323 | } | |
324 | ||
c15a6b00 JS |
325 | int RequestManager::GetNumEntries() |
326 | { | |
1264ab16 | 327 | Mutex::Autolock lock(m_numOfEntriesLock); |
c15a6b00 JS |
328 | return m_numOfEntries; |
329 | } | |
330 | ||
9dd63e1f SK |
331 | void RequestManager::SetDefaultParameters(int cropX) |
332 | { | |
333 | m_cropX = cropX; | |
334 | } | |
335 | ||
c15a6b00 JS |
336 | bool RequestManager::IsRequestQueueFull() |
337 | { | |
338 | Mutex::Autolock lock(m_requestMutex); | |
1264ab16 | 339 | Mutex::Autolock lock2(m_numOfEntriesLock); |
c15a6b00 JS |
340 | if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY) |
341 | return true; | |
342 | else | |
343 | return false; | |
344 | } | |
345 | ||
ca714238 | 346 | void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion) |
c15a6b00 | 347 | { |
13d8c7b4 SK |
348 | ALOGV("DEBUG(%s):", __FUNCTION__); |
349 | ||
c15a6b00 | 350 | Mutex::Autolock lock(m_requestMutex); |
1264ab16 | 351 | Mutex::Autolock lock2(m_numOfEntriesLock); |
13d8c7b4 | 352 | |
c15a6b00 | 353 | request_manager_entry * newEntry = NULL; |
9dd63e1f | 354 | int newInsertionIndex = GetNextIndex(m_entryInsertionIndex); |
ca714238 | 355 | ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries ); |
13d8c7b4 | 356 | |
c15a6b00 | 357 | |
c15a6b00 JS |
358 | newEntry = &(entries[newInsertionIndex]); |
359 | ||
360 | if (newEntry->status!=EMPTY) { | |
13d8c7b4 SK |
361 | ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__); |
362 | return; | |
c15a6b00 JS |
363 | } |
364 | newEntry->status = REGISTERED; | |
365 | newEntry->original_request = new_request; | |
ad37861e | 366 | memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext)); |
13d8c7b4 | 367 | m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot)); |
a85ec381 SK |
368 | newEntry->output_stream_count = 0; |
369 | if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP) | |
370 | newEntry->output_stream_count++; | |
371 | ||
372 | if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC) | |
373 | newEntry->output_stream_count++; | |
c15a6b00 JS |
374 | |
375 | m_numOfEntries++; | |
376 | m_entryInsertionIndex = newInsertionIndex; | |
377 | ||
13d8c7b4 | 378 | |
ca714238 SK |
379 | *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode); |
380 | afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0]; | |
381 | afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1]; | |
382 | afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2]; | |
383 | afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3]; | |
c15a6b00 | 384 | ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))", |
be494d19 | 385 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount); |
c15a6b00 JS |
386 | } |
387 | ||
388 | void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request) | |
389 | { | |
13d8c7b4 | 390 | ALOGV("DEBUG(%s):", __FUNCTION__); |
ad37861e SK |
391 | int frame_index; |
392 | request_manager_entry * currentEntry; | |
c15a6b00 | 393 | |
ad37861e | 394 | Mutex::Autolock lock(m_requestMutex); |
1264ab16 | 395 | Mutex::Autolock lock2(m_numOfEntriesLock); |
13d8c7b4 | 396 | |
f9a06609 | 397 | frame_index = GetCompletedIndex(); |
ad37861e | 398 | currentEntry = &(entries[frame_index]); |
f9a06609 | 399 | if (currentEntry->status != COMPLETED) { |
5c88d1f2 C |
400 | CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__, |
401 | m_entryProcessingIndex, frame_index,(int)(currentEntry->status)); | |
13d8c7b4 | 402 | return; |
c15a6b00 | 403 | } |
13d8c7b4 SK |
404 | if (deregistered_request) *deregistered_request = currentEntry->original_request; |
405 | ||
041f38de SK |
406 | m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount; |
407 | ||
c15a6b00 JS |
408 | currentEntry->status = EMPTY; |
409 | currentEntry->original_request = NULL; | |
be494d19 SK |
410 | memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext)); |
411 | currentEntry->internal_shot.shot.ctl.request.frameCount = -1; | |
c15a6b00 JS |
412 | currentEntry->output_stream_count = 0; |
413 | m_numOfEntries--; | |
414 | ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)", | |
415 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); | |
13d8c7b4 | 416 | |
041f38de | 417 | CheckCompleted(GetNextIndex(frame_index)); |
c15a6b00 | 418 | return; |
c15a6b00 JS |
419 | } |
420 | ||
13d8c7b4 | 421 | bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size, |
0f26b20f | 422 | camera_metadata_t ** prepared_frame, int afState) |
c15a6b00 | 423 | { |
13d8c7b4 | 424 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
425 | Mutex::Autolock lock(m_requestMutex); |
426 | status_t res = NO_ERROR; | |
f9a06609 | 427 | int tempFrameOutputIndex = GetCompletedIndex(); |
13d8c7b4 SK |
428 | request_manager_entry * currentEntry = &(entries[tempFrameOutputIndex]); |
429 | ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__, | |
430 | m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex); | |
431 | ||
f9a06609 | 432 | if (currentEntry->status != COMPLETED) { |
ad37861e | 433 | ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status)); |
13d8c7b4 SK |
434 | |
435 | return false; | |
436 | } | |
437 | m_entryFrameOutputIndex = tempFrameOutputIndex; | |
07b3ad1c | 438 | m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated |
0f26b20f | 439 | add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1); |
13d8c7b4 | 440 | res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot), |
c15a6b00 JS |
441 | m_tempFrameMetadata); |
442 | if (res!=NO_ERROR) { | |
13d8c7b4 SK |
443 | ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res); |
444 | return false; | |
c15a6b00 JS |
445 | } |
446 | *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata); | |
447 | *frame_size = get_camera_metadata_size(m_tempFrameMetadata); | |
448 | *prepared_frame = m_tempFrameMetadata; | |
5506cebf SK |
449 | ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex, |
450 | currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp); | |
9dd63e1f | 451 | // Dump(); |
13d8c7b4 | 452 | return true; |
c15a6b00 JS |
453 | } |
454 | ||
ca714238 | 455 | int RequestManager::MarkProcessingRequest(ExynosBuffer* buf) |
c15a6b00 | 456 | { |
13d8c7b4 | 457 | struct camera2_shot_ext * shot_ext; |
b56dcc00 | 458 | struct camera2_shot_ext * request_shot; |
13d8c7b4 | 459 | int targetStreamIndex = 0; |
ad37861e | 460 | request_manager_entry * newEntry = NULL; |
0f26b20f | 461 | static int count = 0; |
13d8c7b4 | 462 | |
52f54308 | 463 | Mutex::Autolock lock(m_requestMutex); |
1264ab16 | 464 | Mutex::Autolock lock2(m_numOfEntriesLock); |
13d8c7b4 | 465 | if (m_numOfEntries == 0) { |
4aa4d739 | 466 | CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__); |
13d8c7b4 SK |
467 | return -1; |
468 | } | |
469 | ||
470 | if ((m_entryProcessingIndex == m_entryInsertionIndex) | |
be494d19 | 471 | && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) { |
da7ca692 | 472 | ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)", |
13d8c7b4 SK |
473 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); |
474 | return -1; | |
475 | } | |
c15a6b00 | 476 | |
9dd63e1f | 477 | int newProcessingIndex = GetNextIndex(m_entryProcessingIndex); |
ad37861e | 478 | ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex); |
c15a6b00 | 479 | |
c15a6b00 | 480 | newEntry = &(entries[newProcessingIndex]); |
ad37861e | 481 | request_shot = &(newEntry->internal_shot); |
be494d19 | 482 | if (newEntry->status != REGISTERED) { |
5c88d1f2 C |
483 | CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status); |
484 | for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) { | |
485 | CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount); | |
486 | } | |
13d8c7b4 | 487 | return -1; |
c15a6b00 | 488 | } |
ad37861e | 489 | |
be494d19 | 490 | newEntry->status = REQUESTED; |
c15a6b00 | 491 | |
ad37861e | 492 | shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1]; |
13d8c7b4 | 493 | |
ad37861e SK |
494 | memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext)); |
495 | shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount; | |
13d8c7b4 | 496 | shot_ext->request_sensor = 1; |
9dd63e1f SK |
497 | shot_ext->dis_bypass = 1; |
498 | shot_ext->dnr_bypass = 1; | |
ad37861e SK |
499 | shot_ext->fd_bypass = 1; |
500 | shot_ext->setfile = 0; | |
501 | ||
5506cebf SK |
502 | targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0]; |
503 | shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex; | |
504 | if (targetStreamIndex & MASK_OUTPUT_SCP) | |
505 | shot_ext->request_scp = 1; | |
13d8c7b4 | 506 | |
5506cebf SK |
507 | if (targetStreamIndex & MASK_OUTPUT_SCC) |
508 | shot_ext->request_scc = 1; | |
509 | ||
510 | if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF) | |
511 | shot_ext->fd_bypass = 0; | |
13d8c7b4 | 512 | |
0f26b20f SK |
513 | if (count == 0){ |
514 | shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO; | |
515 | } else | |
516 | shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE; | |
517 | ||
518 | count++; | |
ad37861e SK |
519 | shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL; |
520 | shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL; | |
521 | shot_ext->shot.magicNumber = 0x23456789; | |
522 | shot_ext->shot.ctl.sensor.exposureTime = 0; | |
523 | shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000; | |
524 | shot_ext->shot.ctl.sensor.sensitivity = 0; | |
525 | ||
e4657e32 SK |
526 | |
527 | shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0]; | |
528 | shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1]; | |
529 | shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2]; | |
13d8c7b4 | 530 | |
ad37861e SK |
531 | m_entryProcessingIndex = newProcessingIndex; |
532 | return newProcessingIndex; | |
c15a6b00 JS |
533 | } |
534 | ||
2adfa429 | 535 | void RequestManager::NotifyStreamOutput(int frameCnt) |
c15a6b00 | 536 | { |
9dd63e1f SK |
537 | int index; |
538 | ||
2adfa429 JS |
539 | Mutex::Autolock lock(m_requestMutex); |
540 | ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt); | |
9dd63e1f SK |
541 | |
542 | index = FindEntryIndexByFrameCnt(frameCnt); | |
543 | if (index == -1) { | |
544 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
545 | return; | |
546 | } | |
2adfa429 | 547 | ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt, entries[index].output_stream_count); |
9dd63e1f | 548 | |
be494d19 SK |
549 | entries[index].output_stream_count--; //TODO : match stream id also |
550 | CheckCompleted(index); | |
13d8c7b4 SK |
551 | } |
552 | ||
553 | void RequestManager::CheckCompleted(int index) | |
554 | { | |
041f38de SK |
555 | if ((entries[index].status == METADONE || entries[index].status == COMPLETED) |
556 | && (entries[index].output_stream_count <= 0)){ | |
557 | ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__, | |
2adfa429 | 558 | index, entries[index].internal_shot.shot.ctl.request.frameCount ); |
041f38de | 559 | entries[index].status = COMPLETED; |
1422aff9 | 560 | if (m_lastCompletedFrameCnt + 1 == (int)entries[index].internal_shot.shot.ctl.request.frameCount) |
041f38de | 561 | m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE); |
2adfa429 | 562 | } |
c15a6b00 | 563 | } |
9dd63e1f | 564 | |
f9a06609 | 565 | int RequestManager::GetCompletedIndex() |
ad37861e | 566 | { |
041f38de | 567 | return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1); |
ad37861e SK |
568 | } |
569 | ||
52f54308 SK |
570 | void RequestManager::pushSensorQ(int index) |
571 | { | |
572 | Mutex::Autolock lock(m_requestMutex); | |
573 | m_sensorQ.push_back(index); | |
574 | } | |
575 | ||
576 | int RequestManager::popSensorQ() | |
577 | { | |
578 | List<int>::iterator sensor_token; | |
579 | int index; | |
580 | ||
581 | Mutex::Autolock lock(m_requestMutex); | |
582 | ||
583 | if(m_sensorQ.size() == 0) | |
584 | return -1; | |
585 | ||
586 | sensor_token = m_sensorQ.begin()++; | |
587 | index = *sensor_token; | |
588 | m_sensorQ.erase(sensor_token); | |
589 | ||
590 | return (index); | |
591 | } | |
592 | ||
593 | void RequestManager::releaseSensorQ() | |
594 | { | |
595 | List<int>::iterator r; | |
596 | ||
597 | Mutex::Autolock lock(m_requestMutex); | |
0eb27a9d | 598 | ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size()); |
52f54308 SK |
599 | |
600 | while(m_sensorQ.size() > 0){ | |
601 | r = m_sensorQ.begin()++; | |
602 | m_sensorQ.erase(r); | |
603 | } | |
604 | return; | |
605 | } | |
606 | ||
ad37861e | 607 | void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext) |
13d8c7b4 | 608 | { |
9dd63e1f | 609 | int index; |
b56dcc00 SK |
610 | struct camera2_shot_ext * request_shot; |
611 | nsecs_t timeStamp; | |
ad37861e | 612 | int i; |
13d8c7b4 | 613 | |
52f54308 | 614 | Mutex::Autolock lock(m_requestMutex); |
ad37861e | 615 | ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
9dd63e1f | 616 | |
ad37861e | 617 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { |
492a2506 MS |
618 | if (entries[i].internal_shot.shot.ctl.request.frameCount |
619 | == shot_ext->shot.ctl.request.frameCount) { | |
620 | if (entries[i].status == CAPTURED) { | |
621 | entries[i].status = METADONE; | |
622 | break; | |
623 | } | |
624 | if (entries[i].status == METADONE) { | |
625 | return; | |
626 | } | |
2adfa429 | 627 | } |
ad37861e SK |
628 | } |
629 | ||
630 | if (i == NUM_MAX_REQUEST_MGR_ENTRY){ | |
631 | ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); | |
9dd63e1f | 632 | return; |
13d8c7b4 | 633 | } |
9dd63e1f | 634 | |
ad37861e | 635 | request_manager_entry * newEntry = &(entries[i]); |
b56dcc00 | 636 | request_shot = &(newEntry->internal_shot); |
9dd63e1f | 637 | |
b56dcc00 | 638 | timeStamp = request_shot->shot.dm.sensor.timeStamp; |
ad37861e | 639 | memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm)); |
b56dcc00 | 640 | request_shot->shot.dm.sensor.timeStamp = timeStamp; |
5506cebf | 641 | m_lastTimeStamp = timeStamp; |
ad37861e | 642 | CheckCompleted(i); |
13d8c7b4 SK |
643 | } |
644 | ||
53f62ad9 | 645 | void RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info) |
13d8c7b4 | 646 | { |
9dd63e1f | 647 | int index, targetStreamIndex; |
b56dcc00 | 648 | struct camera2_shot_ext * request_shot; |
9dd63e1f SK |
649 | |
650 | ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt); | |
651 | if (frameCnt < 0) | |
13d8c7b4 | 652 | return; |
9dd63e1f SK |
653 | |
654 | index = FindEntryIndexByFrameCnt(frameCnt); | |
655 | if (index == -1) { | |
656 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
657 | return; | |
658 | } | |
659 | ||
13d8c7b4 | 660 | request_manager_entry * newEntry = &(entries[index]); |
ad37861e | 661 | request_shot = &(newEntry->internal_shot); |
2bdec060 | 662 | memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl)); |
2adfa429 | 663 | shot_ext->shot.ctl.request.frameCount = frameCnt; |
13d8c7b4 | 664 | shot_ext->request_sensor = 1; |
ad37861e SK |
665 | shot_ext->dis_bypass = 1; |
666 | shot_ext->dnr_bypass = 1; | |
667 | shot_ext->fd_bypass = 1; | |
10e122bd | 668 | shot_ext->drc_bypass = 1; |
ad37861e SK |
669 | shot_ext->setfile = 0; |
670 | ||
13d8c7b4 SK |
671 | shot_ext->request_scc = 0; |
672 | shot_ext->request_scp = 0; | |
ad37861e | 673 | |
5506cebf SK |
674 | shot_ext->isReprocessing = request_shot->isReprocessing; |
675 | shot_ext->reprocessInput = request_shot->reprocessInput; | |
9dd63e1f | 676 | shot_ext->shot.ctl.request.outputStreams[0] = 0; |
9dd63e1f | 677 | |
48728d49 SK |
678 | shot_ext->awb_mode_dm = request_shot->awb_mode_dm; |
679 | ||
e4657e32 SK |
680 | shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0]; |
681 | shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1]; | |
682 | shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2]; | |
683 | ||
53f62ad9 YJ |
684 | // mapping flash UI mode from aeMode |
685 | if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) { | |
4a9565ae | 686 | if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW) |
73f5ad60 | 687 | ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode; |
a0648fc7 | 688 | else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD) |
73f5ad60 | 689 | ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode; |
53f62ad9 YJ |
690 | request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON; |
691 | } | |
53f62ad9 YJ |
692 | |
693 | // Apply ae/awb lock or unlock | |
e117f756 YJ |
694 | if (request_shot->ae_lock == AEMODE_LOCK_ON) |
695 | request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED; | |
696 | if (request_shot->awb_lock == AWBMODE_LOCK_ON) | |
697 | request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED; | |
698 | ||
2bdec060 SK |
699 | if (m_lastAaMode == request_shot->shot.ctl.aa.mode) { |
700 | shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0); | |
701 | } | |
702 | else { | |
703 | shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode; | |
704 | m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode); | |
705 | } | |
706 | if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) { | |
707 | shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0); | |
708 | } | |
709 | else { | |
710 | shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode; | |
711 | m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode); | |
712 | } | |
713 | if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) { | |
714 | shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0); | |
715 | } | |
716 | else { | |
717 | shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode; | |
718 | m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode); | |
719 | } | |
720 | if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) { | |
721 | shot_ext->shot.ctl.aa.aeExpCompensation = 0; | |
722 | } | |
723 | else { | |
724 | shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation; | |
725 | m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation); | |
726 | } | |
ef6f83ca | 727 | |
a3bcc37b | 728 | if (request_shot->shot.ctl.aa.videoStabilizationMode && m_vdisEnable) { |
5c88d1f2 C |
729 | m_vdisBubbleEn = true; |
730 | shot_ext->dis_bypass = 0; | |
7ef20f42 | 731 | shot_ext->dnr_bypass = 0; |
5c88d1f2 C |
732 | } else { |
733 | m_vdisBubbleEn = false; | |
734 | shot_ext->dis_bypass = 1; | |
7ef20f42 | 735 | shot_ext->dnr_bypass = 1; |
5c88d1f2 | 736 | } |
5c88d1f2 | 737 | |
ef6f83ca SK |
738 | shot_ext->shot.ctl.aa.afTrigger = 0; |
739 | ||
5506cebf SK |
740 | targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0]; |
741 | shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex; | |
742 | if (targetStreamIndex & MASK_OUTPUT_SCP) | |
743 | shot_ext->request_scp = 1; | |
13d8c7b4 | 744 | |
5506cebf SK |
745 | if (targetStreamIndex & MASK_OUTPUT_SCC) |
746 | shot_ext->request_scc = 1; | |
747 | ||
748 | if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF) | |
749 | shot_ext->fd_bypass = 0; | |
750 | ||
6ba9ef65 SK |
751 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0]; |
752 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1]; | |
5506cebf SK |
753 | |
754 | ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__, | |
755 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
756 | (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode), | |
757 | (int)(shot_ext->shot.ctl.aa.afMode)); | |
13d8c7b4 SK |
758 | } |
759 | ||
5c88d1f2 C |
760 | bool RequestManager::IsVdisEnable(void) |
761 | { | |
762 | return m_vdisBubbleEn; | |
763 | } | |
5c88d1f2 | 764 | |
9dd63e1f SK |
765 | int RequestManager::FindEntryIndexByFrameCnt(int frameCnt) |
766 | { | |
767 | for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { | |
1422aff9 | 768 | if ((int)entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt) |
9dd63e1f SK |
769 | return i; |
770 | } | |
771 | return -1; | |
772 | } | |
773 | ||
774 | void RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime) | |
13d8c7b4 | 775 | { |
9dd63e1f SK |
776 | int index = FindEntryIndexByFrameCnt(frameCnt); |
777 | if (index == -1) { | |
778 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
779 | return; | |
780 | } | |
781 | ||
13d8c7b4 | 782 | request_manager_entry * currentEntry = &(entries[index]); |
a8be0011 SK |
783 | if (currentEntry->internal_shot.isReprocessing == 1) { |
784 | ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__, | |
be494d19 | 785 | index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp); |
a8be0011 SK |
786 | } else { |
787 | currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime); | |
788 | ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__, | |
789 | index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp); | |
790 | } | |
13d8c7b4 SK |
791 | } |
792 | ||
5506cebf SK |
793 | |
794 | nsecs_t RequestManager::GetTimestampByFrameCnt(int frameCnt) | |
795 | { | |
796 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
797 | if (index == -1) { | |
798 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp); | |
799 | return m_lastTimeStamp; | |
800 | } | |
801 | else | |
802 | return GetTimestamp(index); | |
803 | } | |
804 | ||
805 | nsecs_t RequestManager::GetTimestamp(int index) | |
13d8c7b4 | 806 | { |
5f643a75 | 807 | Mutex::Autolock lock(m_requestMutex); |
54f4971e SK |
808 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { |
809 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
9dd63e1f SK |
810 | return 0; |
811 | } | |
812 | ||
13d8c7b4 | 813 | request_manager_entry * currentEntry = &(entries[index]); |
5f643a75 | 814 | nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp; |
5506cebf SK |
815 | if (frameTime == 0) { |
816 | ALOGV("DEBUG(%s): timestamp null, returning saved value", __FUNCTION__); | |
817 | frameTime = m_lastTimeStamp; | |
818 | } | |
9dd63e1f | 819 | ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime); |
13d8c7b4 SK |
820 | return frameTime; |
821 | } | |
822 | ||
2f4d175d SK |
823 | uint8_t RequestManager::GetOutputStreamByFrameCnt(int frameCnt) |
824 | { | |
825 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
826 | if (index == -1) { | |
827 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
828 | return 0; | |
829 | } | |
830 | else | |
831 | return GetOutputStream(index); | |
832 | } | |
833 | ||
834 | uint8_t RequestManager::GetOutputStream(int index) | |
835 | { | |
836 | Mutex::Autolock lock(m_requestMutex); | |
837 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { | |
838 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
839 | return 0; | |
840 | } | |
841 | ||
842 | request_manager_entry * currentEntry = &(entries[index]); | |
843 | return currentEntry->internal_shot.shot.ctl.request.outputStreams[0]; | |
844 | } | |
845 | ||
69d1e6e9 SK |
846 | camera2_shot_ext * RequestManager::GetInternalShotExtByFrameCnt(int frameCnt) |
847 | { | |
848 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
849 | if (index == -1) { | |
850 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
851 | return 0; | |
852 | } | |
853 | else | |
854 | return GetInternalShotExt(index); | |
855 | } | |
856 | ||
857 | camera2_shot_ext * RequestManager::GetInternalShotExt(int index) | |
858 | { | |
859 | Mutex::Autolock lock(m_requestMutex); | |
860 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { | |
861 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
862 | return 0; | |
863 | } | |
864 | ||
865 | request_manager_entry * currentEntry = &(entries[index]); | |
866 | return ¤tEntry->internal_shot; | |
867 | } | |
868 | ||
492a2506 | 869 | int RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext, bool drain) |
9dd63e1f | 870 | { |
041f38de | 871 | Mutex::Autolock lock(m_requestMutex); |
492a2506 | 872 | Mutex::Autolock lock2(m_numOfEntriesLock); |
ad37861e SK |
873 | int i; |
874 | ||
be494d19 | 875 | if (m_numOfEntries == 0) { |
5c88d1f2 | 876 | CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__); |
be494d19 SK |
877 | return -1; |
878 | } | |
ad37861e | 879 | |
be494d19 | 880 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { |
ad37861e | 881 | if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount) |
be494d19 | 882 | continue; |
ad37861e SK |
883 | |
884 | if (entries[i].status == REQUESTED) { | |
885 | entries[i].status = CAPTURED; | |
886 | return entries[i].internal_shot.shot.ctl.request.frameCount; | |
be494d19 | 887 | } |
492a2506 MS |
888 | if (drain && (entries[i].status >= CAPTURED)) { |
889 | return entries[i].internal_shot.shot.ctl.request.frameCount; | |
890 | } | |
5c88d1f2 | 891 | CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status); |
ad37861e | 892 | |
be494d19 | 893 | } |
5c88d1f2 | 894 | CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
ad37861e | 895 | |
be494d19 | 896 | return -1; |
9dd63e1f | 897 | } |
13d8c7b4 | 898 | |
b5237e6b SK |
899 | void RequestManager::SetInitialSkip(int count) |
900 | { | |
901 | ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt); | |
902 | if (count > m_sensorPipelineSkipCnt) | |
903 | m_sensorPipelineSkipCnt = count; | |
904 | } | |
905 | ||
ad37861e SK |
906 | int RequestManager::GetSkipCnt() |
907 | { | |
908 | ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt); | |
909 | if (m_sensorPipelineSkipCnt == 0) | |
910 | return m_sensorPipelineSkipCnt; | |
911 | else | |
912 | return --m_sensorPipelineSkipCnt; | |
913 | } | |
914 | ||
13d8c7b4 SK |
915 | void RequestManager::Dump(void) |
916 | { | |
13d8c7b4 SK |
917 | int i = 0; |
918 | request_manager_entry * currentEntry; | |
1264ab16 | 919 | Mutex::Autolock lock(m_numOfEntriesLock); |
ad37861e | 920 | ALOGD("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)", |
13d8c7b4 SK |
921 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); |
922 | ||
923 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { | |
924 | currentEntry = &(entries[i]); | |
5506cebf | 925 | ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i, |
be494d19 | 926 | currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount, |
ad37861e | 927 | currentEntry->output_stream_count, |
5506cebf | 928 | currentEntry->internal_shot.shot.ctl.request.outputStreams[0]); |
13d8c7b4 SK |
929 | } |
930 | } | |
c15a6b00 | 931 | |
9dd63e1f SK |
932 | int RequestManager::GetNextIndex(int index) |
933 | { | |
934 | index++; | |
935 | if (index >= NUM_MAX_REQUEST_MGR_ENTRY) | |
936 | index = 0; | |
937 | ||
938 | return index; | |
939 | } | |
940 | ||
f9a06609 SK |
941 | int RequestManager::GetPrevIndex(int index) |
942 | { | |
943 | index--; | |
944 | if (index < 0) | |
945 | index = NUM_MAX_REQUEST_MGR_ENTRY-1; | |
946 | ||
947 | return index; | |
948 | } | |
949 | ||
6044e509 | 950 | ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid): |
c15a6b00 JS |
951 | m_requestQueueOps(NULL), |
952 | m_frameQueueOps(NULL), | |
953 | m_callbackCookie(NULL), | |
954 | m_numOfRemainingReqInSvc(0), | |
955 | m_isRequestQueuePending(false), | |
13d8c7b4 | 956 | m_isRequestQueueNull(true), |
1422aff9 | 957 | m_halDevice(dev), |
13d8c7b4 | 958 | m_ionCameraClient(0), |
1422aff9 MS |
959 | m_isIspStarted(false), |
960 | m_sccLocalBufferValid(false), | |
961 | m_cameraId(cameraId), | |
9dd63e1f SK |
962 | m_scp_closing(false), |
963 | m_scp_closed(false), | |
1422aff9 MS |
964 | m_wideAspect(false), |
965 | m_zoomRatio(1), | |
966 | m_vdisBubbleCnt(0), | |
967 | m_vdisDupFrame(0), | |
968 | m_jpegEncodingCount(0), | |
969 | m_scpForceSuspended(false), | |
0f26b20f | 970 | m_afState(HAL_AFSTATE_INACTIVE), |
1422aff9 | 971 | m_afTriggerId(0), |
0f26b20f SK |
972 | m_afMode(NO_CHANGE), |
973 | m_afMode2(NO_CHANGE), | |
974 | m_IsAfModeUpdateRequired(false), | |
975 | m_IsAfTriggerRequired(false), | |
976 | m_IsAfLockRequired(false), | |
483728e7 | 977 | m_serviceAfState(ANDROID_CONTROL_AF_STATE_INACTIVE), |
8e2c2fdb SK |
978 | m_afPendingTriggerId(0), |
979 | m_afModeWaitingCnt(0), | |
1422aff9 MS |
980 | m_scpOutputSignalCnt(0), |
981 | m_scpOutputImageCnt(0), | |
a15b4e3f | 982 | m_nightCaptureCnt(0), |
2f4d175d | 983 | m_nightCaptureFrameCnt(0), |
572470e2 | 984 | m_lastSceneMode(0), |
2d5e6ec2 SK |
985 | m_thumbNailW(160), |
986 | m_thumbNailH(120) | |
13d8c7b4 | 987 | { |
ed4ad5fe | 988 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 989 | int ret = 0; |
6044e509 | 990 | int res = 0; |
c15a6b00 | 991 | |
13d8c7b4 | 992 | m_exynosPictureCSC = NULL; |
9dd63e1f | 993 | m_exynosVideoCSC = NULL; |
13d8c7b4 | 994 | |
c15a6b00 JS |
995 | if (!m_grallocHal) { |
996 | ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal); | |
997 | if (ret) | |
13d8c7b4 SK |
998 | ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__); |
999 | } | |
c15a6b00 | 1000 | |
daa1fcd6 | 1001 | m_camera2 = camera; |
c15a6b00 JS |
1002 | m_ionCameraClient = createIonClient(m_ionCameraClient); |
1003 | if(m_ionCameraClient == 0) | |
13d8c7b4 | 1004 | ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__); |
c15a6b00 | 1005 | |
9dd63e1f SK |
1006 | |
1007 | m_BayerManager = new BayerBufManager(); | |
c15a6b00 | 1008 | m_mainThread = new MainThread(this); |
52f54308 | 1009 | m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get())); |
6044e509 SK |
1010 | *openInvalid = InitializeISPChain(); |
1011 | if (*openInvalid < 0) { | |
ed4ad5fe | 1012 | ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__); |
6044e509 SK |
1013 | // clean process |
1014 | // 1. close video nodes | |
1015 | // SCP | |
5506cebf | 1016 | res = exynos_v4l2_close(m_camera_info.scp.fd); |
6044e509 SK |
1017 | if (res != NO_ERROR ) { |
1018 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1019 | } | |
1020 | // SCC | |
1021 | res = exynos_v4l2_close(m_camera_info.capture.fd); | |
1022 | if (res != NO_ERROR ) { | |
1023 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1024 | } | |
1025 | // Sensor | |
1026 | res = exynos_v4l2_close(m_camera_info.sensor.fd); | |
1027 | if (res != NO_ERROR ) { | |
1028 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1029 | } | |
1030 | // ISP | |
1031 | res = exynos_v4l2_close(m_camera_info.isp.fd); | |
1032 | if (res != NO_ERROR ) { | |
1033 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
1034 | } | |
1035 | } else { | |
1036 | m_sensorThread = new SensorThread(this); | |
1037 | m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0); | |
053d38cf | 1038 | m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0); |
ed4ad5fe | 1039 | ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__); |
52f54308 | 1040 | |
5506cebf SK |
1041 | for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++) |
1042 | m_subStreams[i].type = SUBSTREAM_TYPE_NONE; | |
6044e509 SK |
1043 | CSC_METHOD cscMethod = CSC_METHOD_HW; |
1044 | m_exynosPictureCSC = csc_init(cscMethod); | |
1045 | if (m_exynosPictureCSC == NULL) | |
1046 | ALOGE("ERR(%s): csc_init() fail", __FUNCTION__); | |
1047 | csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM); | |
c3fb36ed | 1048 | csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER); |
6d8e5b08 | 1049 | |
6044e509 SK |
1050 | m_exynosVideoCSC = csc_init(cscMethod); |
1051 | if (m_exynosVideoCSC == NULL) | |
1052 | ALOGE("ERR(%s): csc_init() fail", __FUNCTION__); | |
1053 | csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM); | |
c3fb36ed | 1054 | csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER); |
9dd63e1f | 1055 | |
6044e509 | 1056 | m_setExifFixedAttribute(); |
9a710a45 YJ |
1057 | |
1058 | // contol information clear | |
1059 | // flash | |
1060 | m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON; | |
1061 | m_ctlInfo.flash.m_afFlashDoneFlg= false; | |
9a710a45 | 1062 | m_ctlInfo.flash.m_flashEnableFlg = false; |
9a710a45 | 1063 | m_ctlInfo.flash.m_flashFrameCount = 0; |
d6d94475 | 1064 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE; |
9a710a45 | 1065 | m_ctlInfo.flash.m_flashTimeOut = 0; |
caea49e6 YJ |
1066 | m_ctlInfo.flash.m_flashDecisionResult = false; |
1067 | m_ctlInfo.flash.m_flashTorchMode = false; | |
e117f756 YJ |
1068 | m_ctlInfo.flash.m_precaptureState = 0; |
1069 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
73f5ad60 YJ |
1070 | // ae |
1071 | m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE; | |
4a9565ae YJ |
1072 | // af |
1073 | m_ctlInfo.af.m_afTriggerTimeOut = 0; | |
275c9744 YJ |
1074 | // scene |
1075 | m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX; | |
6044e509 | 1076 | } |
ed4ad5fe | 1077 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
1078 | } |
1079 | ||
1080 | ExynosCameraHWInterface2::~ExynosCameraHWInterface2() | |
1081 | { | |
ed4ad5fe | 1082 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 1083 | this->release(); |
ed4ad5fe | 1084 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
1085 | } |
1086 | ||
1087 | void ExynosCameraHWInterface2::release() | |
1088 | { | |
13d8c7b4 | 1089 | int i, res; |
ed4ad5fe | 1090 | ALOGD("(HAL2::release): ENTER"); |
9dd63e1f | 1091 | |
ad37861e SK |
1092 | if (m_streamThreads[1] != NULL) { |
1093 | m_streamThreads[1]->release(); | |
1094 | m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE); | |
1095 | } | |
1096 | ||
1097 | if (m_streamThreads[0] != NULL) { | |
1098 | m_streamThreads[0]->release(); | |
1099 | m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE); | |
1100 | } | |
1101 | ||
13d8c7b4 SK |
1102 | if (m_sensorThread != NULL) { |
1103 | m_sensorThread->release(); | |
13d8c7b4 | 1104 | } |
c15a6b00 JS |
1105 | |
1106 | if (m_mainThread != NULL) { | |
13d8c7b4 | 1107 | m_mainThread->release(); |
13d8c7b4 SK |
1108 | } |
1109 | ||
13d8c7b4 SK |
1110 | if (m_exynosPictureCSC) |
1111 | csc_deinit(m_exynosPictureCSC); | |
1112 | m_exynosPictureCSC = NULL; | |
1113 | ||
9dd63e1f SK |
1114 | if (m_exynosVideoCSC) |
1115 | csc_deinit(m_exynosVideoCSC); | |
1116 | m_exynosVideoCSC = NULL; | |
1117 | ||
ad37861e | 1118 | if (m_streamThreads[1] != NULL) { |
0eb27a9d | 1119 | ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination"); |
ad37861e | 1120 | while (!m_streamThreads[1]->IsTerminated()) |
041f38de | 1121 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1122 | ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 1 termination"); |
ad37861e SK |
1123 | m_streamThreads[1] = NULL; |
1124 | } | |
1125 | ||
1126 | if (m_streamThreads[0] != NULL) { | |
0eb27a9d | 1127 | ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination"); |
ad37861e | 1128 | while (!m_streamThreads[0]->IsTerminated()) |
041f38de | 1129 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1130 | ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 0 termination"); |
ad37861e SK |
1131 | m_streamThreads[0] = NULL; |
1132 | } | |
1133 | ||
9dd63e1f | 1134 | if (m_sensorThread != NULL) { |
0eb27a9d | 1135 | ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination"); |
9dd63e1f | 1136 | while (!m_sensorThread->IsTerminated()) |
041f38de | 1137 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1138 | ALOGD("(HAL2::release): END Waiting for (indirect) sensor thread termination"); |
9dd63e1f SK |
1139 | m_sensorThread = NULL; |
1140 | } | |
1141 | ||
ad37861e | 1142 | if (m_mainThread != NULL) { |
0eb27a9d | 1143 | ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination"); |
9dd63e1f | 1144 | while (!m_mainThread->IsTerminated()) |
041f38de | 1145 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1146 | ALOGD("(HAL2::release): END Waiting for (indirect) main thread termination"); |
9dd63e1f SK |
1147 | m_mainThread = NULL; |
1148 | } | |
1149 | ||
6d8e5b08 SK |
1150 | if (m_requestManager != NULL) { |
1151 | delete m_requestManager; | |
1152 | m_requestManager = NULL; | |
1153 | } | |
1154 | ||
1155 | if (m_BayerManager != NULL) { | |
1156 | delete m_BayerManager; | |
1157 | m_BayerManager = NULL; | |
1158 | } | |
6d8e5b08 | 1159 | for (i = 0; i < NUM_BAYER_BUFFERS; i++) |
c15a6b00 JS |
1160 | freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes); |
1161 | ||
90e439c1 SK |
1162 | if (m_sccLocalBufferValid) { |
1163 | for (i = 0; i < NUM_SCC_BUFFERS; i++) | |
1164 | #ifdef ENABLE_FRAME_SYNC | |
1165 | freeCameraMemory(&m_sccLocalBuffer[i], 2); | |
1166 | #else | |
1167 | freeCameraMemory(&m_sccLocalBuffer[i], 1); | |
1168 | #endif | |
1169 | } | |
1170 | else { | |
1171 | for (i = 0; i < NUM_SCC_BUFFERS; i++) | |
1172 | freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes); | |
1173 | } | |
c15a6b00 | 1174 | |
9dd63e1f | 1175 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__); |
13d8c7b4 SK |
1176 | res = exynos_v4l2_close(m_camera_info.sensor.fd); |
1177 | if (res != NO_ERROR ) { | |
9dd63e1f | 1178 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1179 | } |
1180 | ||
9dd63e1f | 1181 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__); |
13d8c7b4 SK |
1182 | res = exynos_v4l2_close(m_camera_info.isp.fd); |
1183 | if (res != NO_ERROR ) { | |
9dd63e1f | 1184 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1185 | } |
1186 | ||
9dd63e1f | 1187 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__); |
13d8c7b4 SK |
1188 | res = exynos_v4l2_close(m_camera_info.capture.fd); |
1189 | if (res != NO_ERROR ) { | |
9dd63e1f | 1190 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1191 | } |
1192 | ||
9dd63e1f | 1193 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__); |
5506cebf | 1194 | res = exynos_v4l2_close(m_camera_info.scp.fd); |
13d8c7b4 | 1195 | if (res != NO_ERROR ) { |
9dd63e1f | 1196 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 | 1197 | } |
9dd63e1f | 1198 | ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__); |
c15a6b00 | 1199 | deleteIonClient(m_ionCameraClient); |
ad37861e | 1200 | |
ed4ad5fe | 1201 | ALOGD("(HAL2::release): EXIT"); |
ad37861e SK |
1202 | } |
1203 | ||
6044e509 | 1204 | int ExynosCameraHWInterface2::InitializeISPChain() |
ad37861e SK |
1205 | { |
1206 | char node_name[30]; | |
1207 | int fd = 0; | |
1208 | int i; | |
6044e509 | 1209 | int ret = 0; |
ad37861e SK |
1210 | |
1211 | /* Open Sensor */ | |
1212 | memset(&node_name, 0x00, sizeof(char[30])); | |
1213 | sprintf(node_name, "%s%d", NODE_PREFIX, 40); | |
1214 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1215 | ||
1216 | if (fd < 0) { | |
1217 | ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1218 | } | |
1219 | else { | |
1220 | ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1221 | } | |
1222 | m_camera_info.sensor.fd = fd; | |
1223 | ||
1224 | /* Open ISP */ | |
1225 | memset(&node_name, 0x00, sizeof(char[30])); | |
1226 | sprintf(node_name, "%s%d", NODE_PREFIX, 41); | |
1227 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1228 | ||
1229 | if (fd < 0) { | |
1230 | ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1231 | } | |
1232 | else { | |
1233 | ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1234 | } | |
1235 | m_camera_info.isp.fd = fd; | |
1236 | ||
1237 | /* Open ScalerC */ | |
1238 | memset(&node_name, 0x00, sizeof(char[30])); | |
1239 | sprintf(node_name, "%s%d", NODE_PREFIX, 42); | |
1240 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1241 | ||
1242 | if (fd < 0) { | |
1243 | ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1244 | } | |
1245 | else { | |
1246 | ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1247 | } | |
1248 | m_camera_info.capture.fd = fd; | |
1249 | ||
1250 | /* Open ScalerP */ | |
1251 | memset(&node_name, 0x00, sizeof(char[30])); | |
1252 | sprintf(node_name, "%s%d", NODE_PREFIX, 44); | |
1253 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1254 | if (fd < 0) { | |
1255 | ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1256 | } | |
1257 | else { | |
1258 | ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1259 | } | |
5506cebf | 1260 | m_camera_info.scp.fd = fd; |
ad37861e SK |
1261 | |
1262 | if(m_cameraId == 0) | |
1263 | m_camera_info.sensor_id = SENSOR_NAME_S5K4E5; | |
1264 | else | |
1265 | m_camera_info.sensor_id = SENSOR_NAME_S5K6A3; | |
1266 | ||
1267 | memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext)); | |
1268 | m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL; | |
1269 | m_camera_info.dummy_shot.shot.magicNumber = 0x23456789; | |
1270 | ||
1271 | m_camera_info.dummy_shot.dis_bypass = 1; | |
1272 | m_camera_info.dummy_shot.dnr_bypass = 1; | |
1273 | m_camera_info.dummy_shot.fd_bypass = 1; | |
1274 | ||
1275 | /*sensor setting*/ | |
1276 | m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0; | |
1277 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0; | |
1278 | m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0; | |
1279 | ||
1280 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0; | |
1281 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0; | |
1282 | ||
1283 | /*request setting*/ | |
1284 | m_camera_info.dummy_shot.request_sensor = 1; | |
1285 | m_camera_info.dummy_shot.request_scc = 0; | |
1286 | m_camera_info.dummy_shot.request_scp = 0; | |
1287 | m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0; | |
ad37861e SK |
1288 | |
1289 | m_camera_info.sensor.width = m_camera2->getSensorRawW(); | |
1290 | m_camera_info.sensor.height = m_camera2->getSensorRawH(); | |
1291 | ||
1292 | m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16; | |
1293 | m_camera_info.sensor.planes = 2; | |
1294 | m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS; | |
1295 | m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1296 | m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF; | |
ad37861e SK |
1297 | |
1298 | for(i = 0; i < m_camera_info.sensor.buffers; i++){ | |
5c664f4c | 1299 | int res; |
ad37861e SK |
1300 | initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes); |
1301 | m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2; | |
1302 | m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value | |
5c664f4c AR |
1303 | res = allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1); |
1304 | if (res) { | |
1305 | ALOGE("ERROR(%s): failed to allocateCameraMemory for sensor buffer %d", __FUNCTION__, i); | |
1306 | // Free allocated sensor buffers | |
1307 | for (int j = 0; j < i; j++) { | |
1308 | freeCameraMemory(&m_camera_info.sensor.buffer[j], m_camera_info.sensor.planes); | |
1309 | } | |
1310 | return false; | |
1311 | } | |
ad37861e SK |
1312 | } |
1313 | ||
1314 | m_camera_info.isp.width = m_camera_info.sensor.width; | |
1315 | m_camera_info.isp.height = m_camera_info.sensor.height; | |
1316 | m_camera_info.isp.format = m_camera_info.sensor.format; | |
1317 | m_camera_info.isp.planes = m_camera_info.sensor.planes; | |
1318 | m_camera_info.isp.buffers = m_camera_info.sensor.buffers; | |
1319 | m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1320 | m_camera_info.isp.memory = V4L2_MEMORY_DMABUF; | |
ad37861e SK |
1321 | |
1322 | for(i = 0; i < m_camera_info.isp.buffers; i++){ | |
1323 | initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes); | |
1324 | m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0]; | |
1325 | m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1]; | |
1326 | m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0]; | |
1327 | m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1]; | |
1328 | m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0]; | |
1329 | m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1]; | |
1330 | }; | |
1331 | ||
1332 | /* init ISP */ | |
6044e509 SK |
1333 | ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id); |
1334 | if (ret < 0) { | |
1335 | ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ", __FUNCTION__, m_camera_info.sensor_id); | |
1336 | return false; | |
1337 | } | |
ad37861e SK |
1338 | cam_int_s_fmt(&(m_camera_info.isp)); |
1339 | ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__); | |
1340 | cam_int_reqbufs(&(m_camera_info.isp)); | |
1341 | ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__); | |
1342 | ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__); | |
1343 | ||
1344 | /* init Sensor */ | |
1345 | cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id); | |
1346 | ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__); | |
1347 | if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) { | |
1348 | ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__); | |
1349 | } | |
1350 | ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__); | |
1351 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
1352 | ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__); | |
1353 | for (i = 0; i < m_camera_info.sensor.buffers; i++) { | |
1354 | ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i); | |
ad37861e SK |
1355 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1 |
1356 | m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1; | |
52f54308 SK |
1357 | memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot), |
1358 | sizeof(struct camera2_shot_ext)); | |
ad37861e | 1359 | } |
52f54308 SK |
1360 | |
1361 | for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++) | |
1362 | cam_int_qbuf(&(m_camera_info.sensor), i); | |
1363 | ||
1364 | for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++) | |
1365 | m_requestManager->pushSensorQ(i); | |
1366 | ||
5506cebf | 1367 | ALOGV("== stream_on :: sensor"); |
ad37861e | 1368 | cam_int_streamon(&(m_camera_info.sensor)); |
5506cebf | 1369 | m_camera_info.sensor.status = true; |
ad37861e SK |
1370 | |
1371 | /* init Capture */ | |
1372 | m_camera_info.capture.width = m_camera2->getSensorW(); | |
1373 | m_camera_info.capture.height = m_camera2->getSensorH(); | |
1374 | m_camera_info.capture.format = V4L2_PIX_FMT_YUYV; | |
feb7df4c SK |
1375 | #ifdef ENABLE_FRAME_SYNC |
1376 | m_camera_info.capture.planes = 2; | |
1377 | #else | |
ad37861e | 1378 | m_camera_info.capture.planes = 1; |
feb7df4c | 1379 | #endif |
ac8c2060 | 1380 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
ad37861e SK |
1381 | m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
1382 | m_camera_info.capture.memory = V4L2_MEMORY_DMABUF; | |
ad37861e | 1383 | |
5506cebf SK |
1384 | m_camera_info.capture.status = false; |
1385 | ||
1386 | return true; | |
1387 | } | |
1388 | ||
1389 | void ExynosCameraHWInterface2::StartSCCThread(bool threadExists) | |
1390 | { | |
1391 | ALOGV("(%s)", __FUNCTION__); | |
1392 | StreamThread *AllocatedStream; | |
1393 | stream_parameters_t newParameters; | |
1394 | uint32_t format_actual; | |
5506cebf SK |
1395 | |
1396 | ||
1397 | if (!threadExists) { | |
1398 | m_streamThreads[1] = new StreamThread(this, 1); | |
1399 | } | |
1400 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); | |
b8d41ae2 | 1401 | if (!threadExists) { |
053d38cf | 1402 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf | 1403 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
b8d41ae2 SK |
1404 | AllocatedStream->m_numRegisteredStream = 1; |
1405 | } | |
5506cebf SK |
1406 | AllocatedStream->m_index = 1; |
1407 | ||
1408 | format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1409 | ||
1410 | newParameters.width = m_camera2->getSensorW(); | |
1411 | newParameters.height = m_camera2->getSensorH(); | |
1412 | newParameters.format = format_actual; | |
1413 | newParameters.streamOps = NULL; | |
ac8c2060 | 1414 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; |
feb7df4c | 1415 | #ifdef ENABLE_FRAME_SYNC |
5506cebf | 1416 | newParameters.planes = 2; |
2adfa429 | 1417 | #else |
5506cebf | 1418 | newParameters.planes = 1; |
2adfa429 | 1419 | #endif |
ad37861e | 1420 | |
5506cebf SK |
1421 | newParameters.numSvcBufsInHal = 0; |
1422 | ||
1423 | newParameters.node = &m_camera_info.capture; | |
1424 | ||
1425 | AllocatedStream->streamType = STREAM_TYPE_INDIRECT; | |
5506cebf SK |
1426 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); |
1427 | ||
90e439c1 SK |
1428 | if (!threadExists) { |
1429 | if (!m_sccLocalBufferValid) { | |
1430 | for (int i = 0; i < m_camera_info.capture.buffers; i++){ | |
1431 | initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes); | |
1432 | m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2; | |
5506cebf | 1433 | #ifdef ENABLE_FRAME_SYNC |
90e439c1 SK |
1434 | m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value |
1435 | allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1); | |
5506cebf | 1436 | #else |
90e439c1 | 1437 | allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes); |
5506cebf | 1438 | #endif |
90e439c1 SK |
1439 | m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i]; |
1440 | } | |
1441 | m_sccLocalBufferValid = true; | |
1442 | } | |
1443 | } else { | |
1444 | if (m_sccLocalBufferValid) { | |
1445 | for (int i = 0; i < m_camera_info.capture.buffers; i++) | |
1446 | m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i]; | |
1447 | } else { | |
1448 | ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__); | |
5506cebf SK |
1449 | } |
1450 | } | |
1451 | cam_int_s_input(newParameters.node, m_camera_info.sensor_id); | |
ac8c2060 | 1452 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
5506cebf | 1453 | cam_int_s_fmt(newParameters.node); |
ad37861e | 1454 | ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__); |
5506cebf | 1455 | cam_int_reqbufs(newParameters.node); |
ad37861e SK |
1456 | ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__); |
1457 | ||
5506cebf | 1458 | for (int i = 0; i < newParameters.node->buffers; i++) { |
ad37861e | 1459 | ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i); |
5506cebf SK |
1460 | cam_int_qbuf(newParameters.node, i); |
1461 | newParameters.svcBufStatus[i] = ON_DRIVER; | |
ad37861e SK |
1462 | } |
1463 | ||
1464 | ALOGV("== stream_on :: capture"); | |
5506cebf | 1465 | if (cam_int_streamon(newParameters.node) < 0) { |
6d8e5b08 SK |
1466 | ALOGE("ERR(%s): capture stream on fail", __FUNCTION__); |
1467 | } else { | |
1468 | m_camera_info.capture.status = true; | |
1469 | } | |
6044e509 | 1470 | |
5506cebf SK |
1471 | AllocatedStream->setParameter(&newParameters); |
1472 | AllocatedStream->m_activated = true; | |
1473 | AllocatedStream->m_isBufferInit = true; | |
ad37861e SK |
1474 | } |
1475 | ||
1476 | void ExynosCameraHWInterface2::StartISP() | |
1477 | { | |
ad37861e SK |
1478 | ALOGV("== stream_on :: isp"); |
1479 | cam_int_streamon(&(m_camera_info.isp)); | |
ad37861e | 1480 | exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM); |
13d8c7b4 SK |
1481 | } |
1482 | ||
c15a6b00 JS |
1483 | int ExynosCameraHWInterface2::getCameraId() const |
1484 | { | |
9dd63e1f | 1485 | return m_cameraId; |
c15a6b00 | 1486 | } |
c15a6b00 JS |
1487 | |
1488 | int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops) | |
1489 | { | |
13d8c7b4 | 1490 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
1491 | if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request) |
1492 | && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) { | |
1493 | m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops; | |
1494 | return 0; | |
1495 | } | |
1496 | else { | |
13d8c7b4 | 1497 | ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__); |
c15a6b00 JS |
1498 | return 1; |
1499 | } | |
1500 | } | |
1501 | ||
1502 | int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty() | |
1503 | { | |
5506cebf SK |
1504 | int i = 0; |
1505 | ||
b5237e6b | 1506 | ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries()); |
c15a6b00 | 1507 | if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) { |
13d8c7b4 | 1508 | ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__); |
c15a6b00 JS |
1509 | return 0; |
1510 | } | |
13d8c7b4 | 1511 | m_isRequestQueueNull = false; |
0f26b20f | 1512 | if (m_requestManager->GetNumEntries() == 0) |
572470e2 | 1513 | m_requestManager->SetInitialSkip(0); |
5506cebf SK |
1514 | |
1515 | if (m_isIspStarted == false) { | |
1516 | /* isp */ | |
1517 | m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS; | |
1518 | m_camera_info.isp.buffers = m_camera_info.sensor.buffers; | |
1519 | cam_int_s_fmt(&(m_camera_info.isp)); | |
1520 | cam_int_reqbufs(&(m_camera_info.isp)); | |
1521 | ||
1522 | /* sensor */ | |
1523 | if (m_camera_info.sensor.status == false) { | |
1524 | cam_int_s_fmt(&(m_camera_info.sensor)); | |
1525 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
1526 | ||
1527 | for (i = 0; i < m_camera_info.sensor.buffers; i++) { | |
1528 | ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i); | |
1529 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1 | |
1530 | m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1; | |
1531 | memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot), | |
1532 | sizeof(struct camera2_shot_ext)); | |
1533 | } | |
1534 | for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++) | |
1535 | cam_int_qbuf(&(m_camera_info.sensor), i); | |
1536 | ||
1537 | for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++) | |
1538 | m_requestManager->pushSensorQ(i); | |
1539 | ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__); | |
1540 | cam_int_streamon(&(m_camera_info.sensor)); | |
1541 | m_camera_info.sensor.status = true; | |
1542 | } | |
1543 | } | |
1544 | if (!(m_streamThreads[1].get())) { | |
1545 | ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__); | |
1546 | StartSCCThread(false); | |
1547 | } else { | |
1548 | if (m_streamThreads[1]->m_activated == false) { | |
1549 | ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__); | |
1550 | StartSCCThread(true); | |
1551 | } else { | |
1552 | if (m_camera_info.capture.status == false) { | |
ac8c2060 | 1553 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
5506cebf SK |
1554 | cam_int_s_fmt(&(m_camera_info.capture)); |
1555 | ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__); | |
1556 | cam_int_reqbufs(&(m_camera_info.capture)); | |
1557 | ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__); | |
1558 | ||
b8d41ae2 SK |
1559 | if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) { |
1560 | StreamThread * targetStream = m_streamThreads[1].get(); | |
1561 | stream_parameters_t *targetStreamParms = &(targetStream->m_parameters); | |
1562 | node_info_t *currentNode = targetStreamParms->node; | |
1563 | ||
1564 | struct v4l2_buffer v4l2_buf; | |
1565 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
1566 | ||
1567 | for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) { | |
1568 | v4l2_buf.m.planes = planes; | |
1569 | v4l2_buf.type = currentNode->type; | |
1570 | v4l2_buf.memory = currentNode->memory; | |
1571 | ||
1572 | v4l2_buf.length = currentNode->planes; | |
1573 | v4l2_buf.index = i; | |
1574 | ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i]; | |
1575 | ||
1576 | if (i < currentNode->buffers) { | |
1577 | #ifdef ENABLE_FRAME_SYNC | |
1578 | v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0]; | |
1579 | v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1]; | |
1580 | v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2]; | |
1581 | v4l2_buf.length += targetStreamParms->metaPlanes; | |
1582 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0]; | |
1583 | v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0]; | |
1584 | ||
1585 | ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length); | |
1586 | #endif | |
1587 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { | |
1588 | ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd); | |
1589 | } | |
1590 | ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd); | |
1591 | targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC; | |
1592 | } | |
1593 | else { | |
1594 | targetStreamParms->svcBufStatus[i] = ON_SERVICE; | |
1595 | } | |
1596 | ||
1597 | } | |
1598 | ||
1599 | } else { | |
1600 | for (int i = 0; i < m_camera_info.capture.buffers; i++) { | |
1601 | ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i); | |
1602 | cam_int_qbuf(&(m_camera_info.capture), i); | |
1603 | } | |
5506cebf SK |
1604 | } |
1605 | ALOGV("== stream_on :: capture"); | |
1606 | if (cam_int_streamon(&(m_camera_info.capture)) < 0) { | |
1607 | ALOGE("ERR(%s): capture stream on fail", __FUNCTION__); | |
1608 | } else { | |
1609 | m_camera_info.capture.status = true; | |
1610 | } | |
1611 | } | |
f9a06609 SK |
1612 | if (m_scpForceSuspended) { |
1613 | m_scpForceSuspended = false; | |
1614 | } | |
5506cebf SK |
1615 | } |
1616 | } | |
1617 | if (m_isIspStarted == false) { | |
1618 | StartISP(); | |
1619 | ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__); | |
71f3bb38 | 1620 | m_requestManager->SetInitialSkip(6); |
5506cebf SK |
1621 | m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0); |
1622 | m_isIspStarted = true; | |
1623 | } | |
c15a6b00 JS |
1624 | m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); |
1625 | return 0; | |
1626 | } | |
1627 | ||
1628 | int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops) | |
1629 | { | |
13d8c7b4 | 1630 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
1631 | if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame) |
1632 | && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) { | |
1633 | m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops; | |
1634 | return 0; | |
1635 | } | |
1636 | else { | |
13d8c7b4 | 1637 | ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__); |
c15a6b00 JS |
1638 | return 1; |
1639 | } | |
1640 | } | |
1641 | ||
1642 | int ExynosCameraHWInterface2::getInProgressCount() | |
1643 | { | |
1264ab16 AR |
1644 | int inProgressJpeg; |
1645 | int inProgressCount; | |
1646 | ||
1647 | { | |
1648 | Mutex::Autolock lock(m_jpegEncoderLock); | |
1649 | inProgressJpeg = m_jpegEncodingCount; | |
1650 | inProgressCount = m_requestManager->GetNumEntries(); | |
1651 | } | |
32cf9401 | 1652 | ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__, |
1264ab16 AR |
1653 | inProgressCount, inProgressJpeg, (inProgressCount + inProgressJpeg)); |
1654 | return (inProgressCount + inProgressJpeg); | |
c15a6b00 JS |
1655 | } |
1656 | ||
1657 | int ExynosCameraHWInterface2::flushCapturesInProgress() | |
1658 | { | |
1659 | return 0; | |
1660 | } | |
1661 | ||
c15a6b00 JS |
1662 | int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request) |
1663 | { | |
13d8c7b4 | 1664 | ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template); |
c15a6b00 JS |
1665 | |
1666 | if (request == NULL) return BAD_VALUE; | |
1667 | if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) { | |
1668 | return BAD_VALUE; | |
1669 | } | |
1670 | status_t res; | |
1671 | // Pass 1, calculate size and allocate | |
daa1fcd6 | 1672 | res = m_camera2->constructDefaultRequest(request_template, |
c15a6b00 JS |
1673 | request, |
1674 | true); | |
1675 | if (res != OK) { | |
1676 | return res; | |
1677 | } | |
1678 | // Pass 2, build request | |
daa1fcd6 | 1679 | res = m_camera2->constructDefaultRequest(request_template, |
c15a6b00 JS |
1680 | request, |
1681 | false); | |
1682 | if (res != OK) { | |
1683 | ALOGE("Unable to populate new request for template %d", | |
1684 | request_template); | |
1685 | } | |
1686 | ||
1687 | return res; | |
1688 | } | |
1689 | ||
1690 | int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops, | |
1691 | uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers) | |
1692 | { | |
ed4ad5fe | 1693 | ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__, width, height, format); |
5506cebf | 1694 | bool useDirectOutput = false; |
13d8c7b4 SK |
1695 | StreamThread *AllocatedStream; |
1696 | stream_parameters_t newParameters; | |
5506cebf SK |
1697 | substream_parameters_t *subParameters; |
1698 | StreamThread *parentStream; | |
1699 | status_t res; | |
1700 | int allocCase = 0; | |
c15a6b00 | 1701 | |
5506cebf SK |
1702 | if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) && |
1703 | m_camera2->isSupportedResolution(width, height)) { | |
9dd63e1f SK |
1704 | if (!(m_streamThreads[0].get())) { |
1705 | ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__); | |
1706 | allocCase = 0; | |
13d8c7b4 SK |
1707 | } |
1708 | else { | |
6bbb593a | 1709 | if ((m_streamThreads[0].get())->m_activated == true) { |
9dd63e1f SK |
1710 | ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__); |
1711 | allocCase = 1; | |
1712 | } | |
1713 | else { | |
1714 | ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__); | |
1715 | allocCase = 2; | |
1716 | } | |
13d8c7b4 | 1717 | } |
5506cebf SK |
1718 | |
1719 | // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio. | |
1720 | if ((width == 1920 && height == 1080) || (width == 1280 && height == 720) | |
1721 | || (width == 720 && height == 480) || (width == 1440 && height == 960) | |
1722 | || (width == 1344 && height == 896)) { | |
6d8e5b08 | 1723 | m_wideAspect = true; |
5506cebf | 1724 | } else { |
6d8e5b08 SK |
1725 | m_wideAspect = false; |
1726 | } | |
1727 | ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect); | |
1728 | ||
9dd63e1f | 1729 | if (allocCase == 0 || allocCase == 2) { |
5506cebf | 1730 | *stream_id = STREAM_ID_PREVIEW; |
9dd63e1f | 1731 | |
5506cebf | 1732 | m_streamThreads[0] = new StreamThread(this, *stream_id); |
9dd63e1f | 1733 | |
5506cebf | 1734 | AllocatedStream = (StreamThread*)(m_streamThreads[0].get()); |
053d38cf | 1735 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf SK |
1736 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
1737 | ||
1738 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
1739 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1740 | if (m_wideAspect) |
1741 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1742 | *max_buffers = 7; |
5506cebf SK |
1743 | |
1744 | newParameters.width = width; | |
1745 | newParameters.height = height; | |
1746 | newParameters.format = *format_actual; | |
1747 | newParameters.streamOps = stream_ops; | |
1748 | newParameters.usage = *usage; | |
ac8c2060 | 1749 | newParameters.numHwBuffers = NUM_SCP_BUFFERS; |
5506cebf SK |
1750 | newParameters.numOwnSvcBuffers = *max_buffers; |
1751 | newParameters.planes = NUM_PLANES(*format_actual); | |
1752 | newParameters.metaPlanes = 1; | |
1753 | newParameters.numSvcBufsInHal = 0; | |
a85ec381 | 1754 | newParameters.minUndequedBuffer = 3; |
bf96172c | 1755 | newParameters.needsIonMap = true; |
5506cebf SK |
1756 | |
1757 | newParameters.node = &m_camera_info.scp; | |
1758 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1759 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1760 | ||
1761 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1762 | AllocatedStream->m_index = 0; | |
9dd63e1f SK |
1763 | AllocatedStream->setParameter(&newParameters); |
1764 | AllocatedStream->m_activated = true; | |
5506cebf SK |
1765 | AllocatedStream->m_numRegisteredStream = 1; |
1766 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); | |
ad37861e SK |
1767 | m_requestManager->SetDefaultParameters(m_camera2->getSensorW()); |
1768 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW(); | |
5506cebf SK |
1769 | if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE) |
1770 | AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10); | |
1771 | if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE) | |
1772 | AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70); | |
a3bcc37b AR |
1773 | |
1774 | // set video stabilization killswitch | |
1775 | m_requestManager->m_vdisEnable = width > 352 && height > 288; | |
1776 | ||
9dd63e1f | 1777 | return 0; |
5506cebf SK |
1778 | } else if (allocCase == 1) { |
1779 | *stream_id = STREAM_ID_RECORD; | |
1780 | ||
1781 | subParameters = &m_subStreams[STREAM_ID_RECORD]; | |
1782 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
1783 | ||
9dd63e1f SK |
1784 | parentStream = (StreamThread*)(m_streamThreads[0].get()); |
1785 | if (!parentStream) { | |
1786 | return 1; | |
9dd63e1f | 1787 | } |
9dd63e1f | 1788 | |
804236a7 | 1789 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M |
6bbb593a | 1790 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; |
6568c0f1 AR |
1791 | if (m_wideAspect) |
1792 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1793 | *max_buffers = 7; |
9dd63e1f | 1794 | |
5506cebf SK |
1795 | subParameters->type = SUBSTREAM_TYPE_RECORD; |
1796 | subParameters->width = width; | |
1797 | subParameters->height = height; | |
1798 | subParameters->format = *format_actual; | |
1799 | subParameters->svcPlanes = NUM_PLANES(*format_actual); | |
1800 | subParameters->streamOps = stream_ops; | |
1801 | subParameters->usage = *usage; | |
1802 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1803 | subParameters->numSvcBufsInHal = 0; | |
1804 | subParameters->needBufferInit = false; | |
1805 | subParameters->minUndequedBuffer = 2; | |
1806 | ||
1807 | res = parentStream->attachSubStream(STREAM_ID_RECORD, 20); | |
1808 | if (res != NO_ERROR) { | |
1809 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
1810 | return 1; | |
1811 | } | |
1812 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); | |
1813 | ALOGV("(%s): Enabling Record", __FUNCTION__); | |
9dd63e1f SK |
1814 | return 0; |
1815 | } | |
13d8c7b4 | 1816 | } |
b8d41ae2 | 1817 | else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL) |
1422aff9 | 1818 | && ((int32_t)width == m_camera2->getSensorW()) && ((int32_t)height == m_camera2->getSensorH())) { |
13d8c7b4 | 1819 | |
5506cebf SK |
1820 | if (!(m_streamThreads[1].get())) { |
1821 | ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__); | |
1822 | useDirectOutput = true; | |
6d8e5b08 SK |
1823 | } |
1824 | else { | |
5506cebf | 1825 | ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__); |
90e439c1 | 1826 | useDirectOutput = false; |
5506cebf SK |
1827 | } |
1828 | if (useDirectOutput) { | |
1829 | *stream_id = STREAM_ID_ZSL; | |
1830 | ||
053d38cf | 1831 | m_streamThreads[1] = new StreamThread(this, *stream_id); |
5506cebf | 1832 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); |
053d38cf | 1833 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf SK |
1834 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
1835 | ||
1836 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
5506cebf SK |
1837 | |
1838 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1839 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1840 | if (m_wideAspect) |
1841 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1842 | *max_buffers = 7; |
5506cebf SK |
1843 | |
1844 | newParameters.width = width; | |
1845 | newParameters.height = height; | |
1846 | newParameters.format = *format_actual; | |
1847 | newParameters.streamOps = stream_ops; | |
1848 | newParameters.usage = *usage; | |
ac8c2060 | 1849 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; |
5506cebf SK |
1850 | newParameters.numOwnSvcBuffers = *max_buffers; |
1851 | newParameters.planes = NUM_PLANES(*format_actual); | |
1852 | newParameters.metaPlanes = 1; | |
1853 | ||
1854 | newParameters.numSvcBufsInHal = 0; | |
a85ec381 | 1855 | newParameters.minUndequedBuffer = 2; |
bf96172c | 1856 | newParameters.needsIonMap = false; |
5506cebf SK |
1857 | |
1858 | newParameters.node = &m_camera_info.capture; | |
1859 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1860 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1861 | ||
1862 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1863 | AllocatedStream->m_index = 1; | |
1864 | AllocatedStream->setParameter(&newParameters); | |
1865 | AllocatedStream->m_activated = true; | |
b8d41ae2 | 1866 | AllocatedStream->m_numRegisteredStream = 1; |
5506cebf SK |
1867 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); |
1868 | return 0; | |
90e439c1 SK |
1869 | } else { |
1870 | bool bJpegExists = false; | |
1871 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); | |
1872 | subParameters = &m_subStreams[STREAM_ID_JPEG]; | |
1873 | if (subParameters->type == SUBSTREAM_TYPE_JPEG) { | |
1874 | ALOGD("(%s): jpeg stream exists", __FUNCTION__); | |
1875 | bJpegExists = true; | |
1876 | AllocatedStream->detachSubStream(STREAM_ID_JPEG); | |
1877 | } | |
1878 | AllocatedStream->m_releasing = true; | |
1879 | ALOGD("START stream thread 1 release %d", __LINE__); | |
1880 | do { | |
1881 | AllocatedStream->release(); | |
041f38de | 1882 | usleep(SIG_WAITING_TICK); |
90e439c1 SK |
1883 | } while (AllocatedStream->m_releasing); |
1884 | ALOGD("END stream thread 1 release %d", __LINE__); | |
1885 | ||
1886 | *stream_id = STREAM_ID_ZSL; | |
1887 | ||
1888 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); | |
1889 | ||
1890 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
90e439c1 SK |
1891 | |
1892 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1893 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1894 | if (m_wideAspect) |
1895 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1896 | *max_buffers = 7; |
90e439c1 SK |
1897 | |
1898 | newParameters.width = width; | |
1899 | newParameters.height = height; | |
1900 | newParameters.format = *format_actual; | |
1901 | newParameters.streamOps = stream_ops; | |
1902 | newParameters.usage = *usage; | |
1903 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; | |
1904 | newParameters.numOwnSvcBuffers = *max_buffers; | |
1905 | newParameters.planes = NUM_PLANES(*format_actual); | |
1906 | newParameters.metaPlanes = 1; | |
1907 | ||
1908 | newParameters.numSvcBufsInHal = 0; | |
bf96172c SK |
1909 | newParameters.minUndequedBuffer = 2; |
1910 | newParameters.needsIonMap = false; | |
90e439c1 SK |
1911 | |
1912 | newParameters.node = &m_camera_info.capture; | |
1913 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1914 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1915 | ||
1916 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1917 | AllocatedStream->m_index = 1; | |
1918 | AllocatedStream->setParameter(&newParameters); | |
1919 | AllocatedStream->m_activated = true; | |
1920 | AllocatedStream->m_numRegisteredStream = 1; | |
1921 | if (bJpegExists) { | |
1922 | AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10); | |
1923 | } | |
1924 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); | |
1925 | return 0; | |
1926 | ||
6d8e5b08 | 1927 | } |
5506cebf SK |
1928 | } |
1929 | else if (format == HAL_PIXEL_FORMAT_BLOB | |
1930 | && m_camera2->isSupportedJpegResolution(width, height)) { | |
1931 | *stream_id = STREAM_ID_JPEG; | |
6d8e5b08 | 1932 | |
5506cebf SK |
1933 | subParameters = &m_subStreams[*stream_id]; |
1934 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
9dd63e1f | 1935 | |
5506cebf SK |
1936 | if (!(m_streamThreads[1].get())) { |
1937 | ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__); | |
1938 | StartSCCThread(false); | |
1939 | } | |
1940 | else if (m_streamThreads[1]->m_activated == false) { | |
1941 | ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__); | |
1942 | StartSCCThread(true); | |
1943 | } | |
1944 | parentStream = (StreamThread*)(m_streamThreads[1].get()); | |
13d8c7b4 SK |
1945 | |
1946 | *format_actual = HAL_PIXEL_FORMAT_BLOB; | |
13d8c7b4 | 1947 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; |
6568c0f1 AR |
1948 | if (m_wideAspect) |
1949 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1950 | *max_buffers = 5; |
13d8c7b4 | 1951 | |
5506cebf SK |
1952 | subParameters->type = SUBSTREAM_TYPE_JPEG; |
1953 | subParameters->width = width; | |
1954 | subParameters->height = height; | |
1955 | subParameters->format = *format_actual; | |
1956 | subParameters->svcPlanes = 1; | |
1957 | subParameters->streamOps = stream_ops; | |
1958 | subParameters->usage = *usage; | |
1959 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1960 | subParameters->numSvcBufsInHal = 0; | |
1961 | subParameters->needBufferInit = false; | |
1962 | subParameters->minUndequedBuffer = 2; | |
1963 | ||
1964 | res = parentStream->attachSubStream(STREAM_ID_JPEG, 10); | |
1965 | if (res != NO_ERROR) { | |
1966 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
1967 | return 1; | |
1968 | } | |
1969 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); | |
1970 | ALOGV("(%s): Enabling Jpeg", __FUNCTION__); | |
13d8c7b4 SK |
1971 | return 0; |
1972 | } | |
74d78ebe | 1973 | else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) { |
5506cebf SK |
1974 | *stream_id = STREAM_ID_PRVCB; |
1975 | ||
1976 | subParameters = &m_subStreams[STREAM_ID_PRVCB]; | |
1977 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
1978 | ||
74d78ebe SK |
1979 | parentStream = (StreamThread*)(m_streamThreads[0].get()); |
1980 | if (!parentStream) { | |
74d78ebe SK |
1981 | return 1; |
1982 | } | |
74d78ebe SK |
1983 | |
1984 | *format_actual = format; | |
1985 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
6568c0f1 AR |
1986 | if (m_wideAspect) |
1987 | *usage |= GRALLOC_USAGE_PRIVATE_CHROMA; | |
a0ea191c | 1988 | *max_buffers = 7; |
5506cebf SK |
1989 | |
1990 | subParameters->type = SUBSTREAM_TYPE_PRVCB; | |
1991 | subParameters->width = width; | |
1992 | subParameters->height = height; | |
1993 | subParameters->format = *format_actual; | |
1994 | subParameters->svcPlanes = NUM_PLANES(*format_actual); | |
1995 | subParameters->streamOps = stream_ops; | |
1996 | subParameters->usage = *usage; | |
1997 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1998 | subParameters->numSvcBufsInHal = 0; | |
1999 | subParameters->needBufferInit = false; | |
2000 | subParameters->minUndequedBuffer = 2; | |
2001 | ||
2002 | if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { | |
2003 | subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP; | |
2004 | subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP); | |
2005 | } | |
2006 | else { | |
2007 | subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
2008 | subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12); | |
2009 | } | |
74d78ebe | 2010 | |
5506cebf SK |
2011 | res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20); |
2012 | if (res != NO_ERROR) { | |
2013 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
2014 | return 1; | |
74d78ebe | 2015 | } |
5506cebf SK |
2016 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); |
2017 | ALOGV("(%s): Enabling previewcb", __FUNCTION__); | |
74d78ebe SK |
2018 | return 0; |
2019 | } | |
ed4ad5fe | 2020 | ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__); |
5506cebf | 2021 | return 1; |
c15a6b00 JS |
2022 | } |
2023 | ||
13d8c7b4 SK |
2024 | int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id, |
2025 | int num_buffers, buffer_handle_t *registeringBuffers) | |
c15a6b00 | 2026 | { |
13d8c7b4 SK |
2027 | int i,j; |
2028 | void *virtAddr[3]; | |
5506cebf SK |
2029 | int plane_index = 0; |
2030 | StreamThread * targetStream; | |
13d8c7b4 SK |
2031 | stream_parameters_t *targetStreamParms; |
2032 | node_info_t *currentNode; | |
2033 | ||
c15a6b00 JS |
2034 | struct v4l2_buffer v4l2_buf; |
2035 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
13d8c7b4 | 2036 | |
ed4ad5fe | 2037 | ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__, |
13d8c7b4 SK |
2038 | stream_id, num_buffers, (uint32_t)registeringBuffers); |
2039 | ||
5506cebf SK |
2040 | if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) { |
2041 | targetStream = m_streamThreads[0].get(); | |
13d8c7b4 | 2042 | targetStreamParms = &(m_streamThreads[0]->m_parameters); |
5c88d1f2 | 2043 | |
13d8c7b4 | 2044 | } |
5506cebf SK |
2045 | else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) { |
2046 | substream_parameters_t *targetParms; | |
2047 | targetParms = &m_subStreams[stream_id]; | |
9dd63e1f | 2048 | |
5506cebf | 2049 | targetParms->numSvcBuffers = num_buffers; |
9dd63e1f | 2050 | |
5506cebf SK |
2051 | for (i = 0 ; i < targetParms->numSvcBuffers ; i++) { |
2052 | ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__, | |
2053 | i, stream_id, (uint32_t)(registeringBuffers[i])); | |
9dd63e1f SK |
2054 | if (m_grallocHal) { |
2055 | if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i], | |
5506cebf SK |
2056 | targetParms->usage, 0, 0, |
2057 | targetParms->width, targetParms->height, virtAddr) != 0) { | |
9dd63e1f SK |
2058 | ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__); |
2059 | } | |
2060 | else { | |
2061 | ExynosBuffer currentBuf; | |
2062 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]); | |
5506cebf SK |
2063 | if (targetParms->svcPlanes == 1) { |
2064 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2065 | currentBuf.size.extS[0] = priv_handle->size; | |
2066 | currentBuf.size.extS[1] = 0; | |
2067 | currentBuf.size.extS[2] = 0; | |
2068 | } else if (targetParms->svcPlanes == 2) { | |
2069 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2070 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
2071 | ||
2072 | } else if (targetParms->svcPlanes == 3) { | |
2073 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2074 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
2075 | currentBuf.fd.extFd[2] = priv_handle->fd2; | |
2076 | } | |
2077 | for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) { | |
9dd63e1f | 2078 | currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index]; |
0d220b42 | 2079 | CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)", |
804236a7 | 2080 | __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index], |
5506cebf | 2081 | (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]); |
9dd63e1f | 2082 | } |
5506cebf SK |
2083 | targetParms->svcBufStatus[i] = ON_SERVICE; |
2084 | targetParms->svcBuffers[i] = currentBuf; | |
2085 | targetParms->svcBufHandle[i] = registeringBuffers[i]; | |
9dd63e1f SK |
2086 | } |
2087 | } | |
2088 | } | |
5506cebf | 2089 | targetParms->needBufferInit = true; |
9dd63e1f SK |
2090 | return 0; |
2091 | } | |
5506cebf SK |
2092 | else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) { |
2093 | targetStream = m_streamThreads[1].get(); | |
2094 | targetStreamParms = &(m_streamThreads[1]->m_parameters); | |
74d78ebe | 2095 | } |
13d8c7b4 | 2096 | else { |
ed4ad5fe | 2097 | ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id); |
be494d19 | 2098 | return 1; |
13d8c7b4 | 2099 | } |
c15a6b00 | 2100 | |
5506cebf | 2101 | if (targetStream->streamType == STREAM_TYPE_DIRECT) { |
13d8c7b4 SK |
2102 | if (num_buffers < targetStreamParms->numHwBuffers) { |
2103 | ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)", | |
2104 | __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers); | |
be494d19 | 2105 | return 1; |
13d8c7b4 SK |
2106 | } |
2107 | } | |
0d220b42 | 2108 | CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)", |
5506cebf SK |
2109 | __FUNCTION__, targetStreamParms->format, targetStreamParms->width, |
2110 | targetStreamParms->height, targetStreamParms->planes); | |
13d8c7b4 | 2111 | targetStreamParms->numSvcBuffers = num_buffers; |
5506cebf SK |
2112 | currentNode = targetStreamParms->node; |
2113 | currentNode->width = targetStreamParms->width; | |
2114 | currentNode->height = targetStreamParms->height; | |
2115 | currentNode->format = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format); | |
2116 | currentNode->planes = targetStreamParms->planes; | |
13d8c7b4 | 2117 | currentNode->buffers = targetStreamParms->numHwBuffers; |
5506cebf SK |
2118 | cam_int_s_input(currentNode, m_camera_info.sensor_id); |
2119 | cam_int_s_fmt(currentNode); | |
2120 | cam_int_reqbufs(currentNode); | |
2121 | for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) { | |
13d8c7b4 SK |
2122 | ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__, |
2123 | i, (uint32_t)(registeringBuffers[i])); | |
13d8c7b4 SK |
2124 | v4l2_buf.m.planes = planes; |
2125 | v4l2_buf.type = currentNode->type; | |
2126 | v4l2_buf.memory = currentNode->memory; | |
2127 | v4l2_buf.index = i; | |
2128 | v4l2_buf.length = currentNode->planes; | |
c15a6b00 | 2129 | |
13d8c7b4 | 2130 | ExynosBuffer currentBuf; |
feb7df4c | 2131 | ExynosBuffer metaBuf; |
13d8c7b4 SK |
2132 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]); |
2133 | ||
2134 | m_getAlignedYUVSize(currentNode->format, | |
2135 | currentNode->width, currentNode->height, ¤tBuf); | |
24231221 | 2136 | |
37e122d5 SK |
2137 | ALOGV("DEBUG(%s): ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride); |
2138 | if (currentNode->planes == 1) { | |
74d78ebe SK |
2139 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; |
2140 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
37e122d5 SK |
2141 | currentBuf.size.extS[0] = priv_handle->size; |
2142 | currentBuf.size.extS[1] = 0; | |
2143 | currentBuf.size.extS[2] = 0; | |
74d78ebe SK |
2144 | } else if (currentNode->planes == 2) { |
2145 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; | |
2146 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd1; | |
2147 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2148 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
2149 | ||
2150 | } else if (currentNode->planes == 3) { | |
2151 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; | |
2152 | v4l2_buf.m.planes[2].m.fd = priv_handle->fd1; | |
2153 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd2; | |
2154 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2155 | currentBuf.fd.extFd[2] = priv_handle->fd1; | |
2156 | currentBuf.fd.extFd[1] = priv_handle->fd2; | |
37e122d5 | 2157 | } |
0d220b42 | 2158 | |
5506cebf | 2159 | for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) { |
bf96172c SK |
2160 | if (targetStreamParms->needsIonMap) |
2161 | currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0); | |
13d8c7b4 | 2162 | v4l2_buf.m.planes[plane_index].length = currentBuf.size.extS[plane_index]; |
bf96172c | 2163 | ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)", |
13d8c7b4 SK |
2164 | __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd, |
2165 | (unsigned int)currentBuf.virt.extP[plane_index], | |
2166 | v4l2_buf.m.planes[plane_index].length); | |
2167 | } | |
c15a6b00 | 2168 | |
5506cebf | 2169 | if (i < currentNode->buffers) { |
feb7df4c SK |
2170 | |
2171 | ||
2172 | #ifdef ENABLE_FRAME_SYNC | |
5506cebf SK |
2173 | /* add plane for metadata*/ |
2174 | metaBuf.size.extS[0] = 4*1024; | |
2175 | allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0); | |
feb7df4c | 2176 | |
5506cebf SK |
2177 | v4l2_buf.length += targetStreamParms->metaPlanes; |
2178 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0]; | |
2179 | v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0]; | |
feb7df4c | 2180 | |
5506cebf | 2181 | ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length); |
feb7df4c | 2182 | #endif |
5506cebf SK |
2183 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { |
2184 | ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)", | |
2185 | __FUNCTION__, stream_id, currentNode->fd); | |
13d8c7b4 | 2186 | } |
5506cebf SK |
2187 | ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)", |
2188 | __FUNCTION__, stream_id, currentNode->fd); | |
2189 | targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC; | |
13d8c7b4 | 2190 | } |
5506cebf | 2191 | else { |
13d8c7b4 | 2192 | targetStreamParms->svcBufStatus[i] = ON_SERVICE; |
c15a6b00 | 2193 | } |
5506cebf | 2194 | |
13d8c7b4 | 2195 | targetStreamParms->svcBuffers[i] = currentBuf; |
feb7df4c | 2196 | targetStreamParms->metaBuffers[i] = metaBuf; |
13d8c7b4 SK |
2197 | targetStreamParms->svcBufHandle[i] = registeringBuffers[i]; |
2198 | } | |
6d8e5b08 | 2199 | |
5506cebf SK |
2200 | ALOGV("DEBUG(%s): calling streamon stream id = %d", __FUNCTION__, stream_id); |
2201 | cam_int_streamon(targetStreamParms->node); | |
ad37861e | 2202 | ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__); |
5506cebf | 2203 | currentNode->status = true; |
13d8c7b4 | 2204 | ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__); |
ad37861e | 2205 | |
c15a6b00 JS |
2206 | return 0; |
2207 | } | |
2208 | ||
2209 | int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id) | |
2210 | { | |
13d8c7b4 | 2211 | StreamThread *targetStream; |
5506cebf | 2212 | status_t res = NO_ERROR; |
ed4ad5fe | 2213 | ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id); |
5506cebf | 2214 | bool releasingScpMain = false; |
13d8c7b4 | 2215 | |
5506cebf | 2216 | if (stream_id == STREAM_ID_PREVIEW) { |
13d8c7b4 | 2217 | targetStream = (StreamThread*)(m_streamThreads[0].get()); |
d0a2bb69 SK |
2218 | if (!targetStream) { |
2219 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2220 | return NO_ERROR; |
d0a2bb69 | 2221 | } |
5506cebf SK |
2222 | targetStream->m_numRegisteredStream--; |
2223 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
2224 | releasingScpMain = true; | |
bf96172c SK |
2225 | if (targetStream->m_parameters.needsIonMap) { |
2226 | for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) { | |
2227 | for (int j = 0; j < targetStream->m_parameters.planes; j++) { | |
2228 | ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j], | |
2229 | targetStream->m_parameters.svcBuffers[i].size.extS[j]); | |
2230 | ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j, | |
c48f0170 | 2231 | targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j])); |
bf96172c | 2232 | } |
0d220b42 C |
2233 | } |
2234 | } | |
5506cebf | 2235 | } else if (stream_id == STREAM_ID_JPEG) { |
a038aa84 HC |
2236 | if (m_resizeBuf.size.s != 0) { |
2237 | freeCameraMemory(&m_resizeBuf, 1); | |
2238 | } | |
2239 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); | |
2240 | ||
13d8c7b4 | 2241 | targetStream = (StreamThread*)(m_streamThreads[1].get()); |
d0a2bb69 SK |
2242 | if (!targetStream) { |
2243 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2244 | return NO_ERROR; |
5506cebf | 2245 | } |
a038aa84 HC |
2246 | |
2247 | if (targetStream->detachSubStream(stream_id) != NO_ERROR) { | |
5506cebf SK |
2248 | ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res); |
2249 | return 1; | |
2250 | } | |
2251 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
74d78ebe | 2252 | return 0; |
5506cebf | 2253 | } else if (stream_id == STREAM_ID_RECORD) { |
a038aa84 HC |
2254 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); |
2255 | ||
5506cebf | 2256 | targetStream = (StreamThread*)(m_streamThreads[0].get()); |
d0a2bb69 SK |
2257 | if (!targetStream) { |
2258 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 HC |
2259 | return NO_ERROR; |
2260 | } | |
2261 | ||
2262 | if (targetStream->detachSubStream(stream_id) != NO_ERROR) { | |
2263 | ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res); | |
d0a2bb69 SK |
2264 | return 1; |
2265 | } | |
a038aa84 | 2266 | |
f0708d21 SK |
2267 | if (targetStream->m_numRegisteredStream != 0) |
2268 | return 0; | |
5506cebf | 2269 | } else if (stream_id == STREAM_ID_PRVCB) { |
a038aa84 HC |
2270 | if (m_previewCbBuf.size.s != 0) { |
2271 | freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes); | |
2272 | } | |
2273 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); | |
2274 | ||
5506cebf | 2275 | targetStream = (StreamThread*)(m_streamThreads[0].get()); |
d0a2bb69 SK |
2276 | if (!targetStream) { |
2277 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2278 | return NO_ERROR; |
d0a2bb69 | 2279 | } |
a038aa84 HC |
2280 | |
2281 | if (targetStream->detachSubStream(stream_id) != NO_ERROR) { | |
2282 | ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res); | |
2283 | return 1; | |
5506cebf | 2284 | } |
a038aa84 | 2285 | |
f0708d21 SK |
2286 | if (targetStream->m_numRegisteredStream != 0) |
2287 | return 0; | |
5506cebf SK |
2288 | } else if (stream_id == STREAM_ID_ZSL) { |
2289 | targetStream = (StreamThread*)(m_streamThreads[1].get()); | |
d0a2bb69 SK |
2290 | if (!targetStream) { |
2291 | ALOGW("(%s): Stream Not Exists", __FUNCTION__); | |
a038aa84 | 2292 | return NO_ERROR; |
d0a2bb69 | 2293 | } |
a038aa84 | 2294 | |
5506cebf SK |
2295 | targetStream->m_numRegisteredStream--; |
2296 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
bf96172c SK |
2297 | if (targetStream->m_parameters.needsIonMap) { |
2298 | for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) { | |
2299 | for (int j = 0; j < targetStream->m_parameters.planes; j++) { | |
2300 | ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j], | |
2301 | targetStream->m_parameters.svcBuffers[i].size.extS[j]); | |
2302 | ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j, | |
c48f0170 | 2303 | targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j])); |
bf96172c SK |
2304 | } |
2305 | } | |
2306 | } | |
5506cebf | 2307 | } else { |
13d8c7b4 | 2308 | ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id); |
be494d19 | 2309 | return 1; |
13d8c7b4 SK |
2310 | } |
2311 | ||
a038aa84 | 2312 | if (m_sensorThread != NULL && releasingScpMain) { |
2d5655e1 SK |
2313 | m_sensorThread->release(); |
2314 | ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__); | |
2315 | while (!m_sensorThread->IsTerminated()) | |
48728d49 | 2316 | usleep(SIG_WAITING_TICK); |
2d5655e1 SK |
2317 | ALOGD("(%s): END Waiting for (indirect) sensor thread termination", __FUNCTION__); |
2318 | } | |
2d5655e1 | 2319 | |
5506cebf SK |
2320 | if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) { |
2321 | ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__); | |
2322 | targetStream = (StreamThread*)(m_streamThreads[1].get()); | |
2323 | targetStream->m_releasing = true; | |
0eb27a9d | 2324 | ALOGD("START stream thread release %d", __LINE__); |
5506cebf | 2325 | do { |
5506cebf | 2326 | targetStream->release(); |
041f38de | 2327 | usleep(SIG_WAITING_TICK); |
5506cebf | 2328 | } while (targetStream->m_releasing); |
a8be0011 | 2329 | m_camera_info.capture.status = false; |
0eb27a9d | 2330 | ALOGD("END stream thread release %d", __LINE__); |
5506cebf SK |
2331 | } |
2332 | ||
a8be0011 | 2333 | if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) { |
5506cebf SK |
2334 | ALOGV("(%s): deactivating stream thread 0", __FUNCTION__); |
2335 | targetStream = (StreamThread*)(m_streamThreads[0].get()); | |
2336 | targetStream->m_releasing = true; | |
0eb27a9d | 2337 | ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__); |
5506cebf | 2338 | do { |
5506cebf | 2339 | targetStream->release(); |
041f38de | 2340 | usleep(SIG_WAITING_TICK); |
5506cebf | 2341 | } while (targetStream->m_releasing); |
0eb27a9d | 2342 | ALOGD("(%s): END Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__); |
5506cebf SK |
2343 | targetStream->SetSignal(SIGNAL_THREAD_TERMINATE); |
2344 | ||
2345 | if (targetStream != NULL) { | |
0eb27a9d SK |
2346 | ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__); |
2347 | while (!targetStream->IsTerminated()) | |
041f38de | 2348 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 2349 | ALOGD("(%s): END Waiting for (indirect) stream thread termination", __FUNCTION__); |
5506cebf SK |
2350 | m_streamThreads[0] = NULL; |
2351 | } | |
5506cebf | 2352 | if (m_camera_info.capture.status == true) { |
f9a06609 | 2353 | m_scpForceSuspended = true; |
5506cebf SK |
2354 | } |
2355 | m_isIspStarted = false; | |
2356 | } | |
2357 | ALOGV("(%s): END", __FUNCTION__); | |
c15a6b00 JS |
2358 | return 0; |
2359 | } | |
2360 | ||
2361 | int ExynosCameraHWInterface2::allocateReprocessStream( | |
1422aff9 MS |
2362 | uint32_t /*width*/, uint32_t /*height*/, uint32_t /*format*/, |
2363 | const camera2_stream_in_ops_t* /*reprocess_stream_ops*/, | |
2364 | uint32_t* /*stream_id*/, uint32_t* /*consumer_usage*/, uint32_t* /*max_buffers*/) | |
c15a6b00 | 2365 | { |
13d8c7b4 | 2366 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
2367 | return 0; |
2368 | } | |
2369 | ||
5506cebf SK |
2370 | int ExynosCameraHWInterface2::allocateReprocessStreamFromStream( |
2371 | uint32_t output_stream_id, | |
2372 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
2373 | // outputs | |
2374 | uint32_t *stream_id) | |
2375 | { | |
ed4ad5fe | 2376 | ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id); |
5506cebf SK |
2377 | *stream_id = STREAM_ID_JPEG_REPROCESS; |
2378 | ||
2379 | m_reprocessStreamId = *stream_id; | |
2380 | m_reprocessOps = reprocess_stream_ops; | |
2381 | m_reprocessOutputStreamId = output_stream_id; | |
2382 | return 0; | |
2383 | } | |
2384 | ||
c15a6b00 JS |
2385 | int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id) |
2386 | { | |
ed4ad5fe | 2387 | ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id); |
5506cebf SK |
2388 | if (stream_id == STREAM_ID_JPEG_REPROCESS) { |
2389 | m_reprocessStreamId = 0; | |
2390 | m_reprocessOps = NULL; | |
2391 | m_reprocessOutputStreamId = 0; | |
2392 | return 0; | |
2393 | } | |
2394 | return 1; | |
c15a6b00 JS |
2395 | } |
2396 | ||
2397 | int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2) | |
2398 | { | |
ca714238 | 2399 | Mutex::Autolock lock(m_afModeTriggerLock); |
0f26b20f SK |
2400 | ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2); |
2401 | ||
2402 | switch (trigger_id) { | |
2403 | case CAMERA2_TRIGGER_AUTOFOCUS: | |
2404 | ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1); | |
ca714238 | 2405 | OnAfTrigger(ext1); |
0f26b20f SK |
2406 | break; |
2407 | ||
2408 | case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS: | |
2409 | ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1); | |
2410 | OnAfCancel(ext1); | |
2411 | break; | |
e117f756 YJ |
2412 | case CAMERA2_TRIGGER_PRECAPTURE_METERING: |
2413 | ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1); | |
2414 | OnPrecaptureMeteringTriggerStart(ext1); | |
2415 | break; | |
0f26b20f SK |
2416 | default: |
2417 | break; | |
2418 | } | |
c15a6b00 JS |
2419 | return 0; |
2420 | } | |
2421 | ||
2422 | int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user) | |
2423 | { | |
0f26b20f | 2424 | ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb); |
c15a6b00 JS |
2425 | m_notifyCb = notify_cb; |
2426 | m_callbackCookie = user; | |
2427 | return 0; | |
2428 | } | |
2429 | ||
2430 | int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops) | |
2431 | { | |
13d8c7b4 | 2432 | ALOGV("DEBUG(%s):", __FUNCTION__); |
cf95ef60 | 2433 | *ops = NULL; |
c15a6b00 JS |
2434 | return 0; |
2435 | } | |
2436 | ||
1422aff9 | 2437 | int ExynosCameraHWInterface2::dump(int /*fd*/) |
c15a6b00 | 2438 | { |
13d8c7b4 | 2439 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
2440 | return 0; |
2441 | } | |
2442 | ||
13d8c7b4 SK |
2443 | void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf) |
2444 | { | |
2445 | switch (colorFormat) { | |
2446 | // 1p | |
2447 | case V4L2_PIX_FMT_RGB565 : | |
2448 | case V4L2_PIX_FMT_YUYV : | |
2449 | case V4L2_PIX_FMT_UYVY : | |
2450 | case V4L2_PIX_FMT_VYUY : | |
2451 | case V4L2_PIX_FMT_YVYU : | |
2452 | buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h); | |
2453 | buf->size.extS[1] = 0; | |
2454 | buf->size.extS[2] = 0; | |
2455 | break; | |
2456 | // 2p | |
2457 | case V4L2_PIX_FMT_NV12 : | |
2458 | case V4L2_PIX_FMT_NV12T : | |
2459 | case V4L2_PIX_FMT_NV21 : | |
2460 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); | |
2461 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16); | |
2462 | buf->size.extS[2] = 0; | |
2463 | break; | |
2464 | case V4L2_PIX_FMT_NV12M : | |
2465 | case V4L2_PIX_FMT_NV12MT_16X16 : | |
9dd63e1f | 2466 | case V4L2_PIX_FMT_NV21M: |
13d8c7b4 SK |
2467 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); |
2468 | buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256); | |
2469 | buf->size.extS[2] = 0; | |
2470 | break; | |
2471 | case V4L2_PIX_FMT_NV16 : | |
2472 | case V4L2_PIX_FMT_NV61 : | |
2473 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); | |
2474 | buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h, 16); | |
2475 | buf->size.extS[2] = 0; | |
2476 | break; | |
2477 | // 3p | |
2478 | case V4L2_PIX_FMT_YUV420 : | |
2479 | case V4L2_PIX_FMT_YVU420 : | |
2480 | buf->size.extS[0] = (w * h); | |
2481 | buf->size.extS[1] = (w * h) >> 2; | |
2482 | buf->size.extS[2] = (w * h) >> 2; | |
2483 | break; | |
2484 | case V4L2_PIX_FMT_YUV420M: | |
2485 | case V4L2_PIX_FMT_YVU420M : | |
4a3f1820 SK |
2486 | buf->size.extS[0] = ALIGN(w, 32) * ALIGN(h, 16); |
2487 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8); | |
2488 | buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8); | |
2489 | break; | |
13d8c7b4 | 2490 | case V4L2_PIX_FMT_YUV422P : |
0d220b42 | 2491 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); |
13d8c7b4 SK |
2492 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8); |
2493 | buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8); | |
2494 | break; | |
2495 | default: | |
2496 | ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat); | |
2497 | return; | |
2498 | break; | |
2499 | } | |
2500 | } | |
c15a6b00 | 2501 | |
13d8c7b4 SK |
2502 | bool ExynosCameraHWInterface2::m_getRatioSize(int src_w, int src_h, |
2503 | int dst_w, int dst_h, | |
2504 | int *crop_x, int *crop_y, | |
2505 | int *crop_w, int *crop_h, | |
2506 | int zoom) | |
c15a6b00 | 2507 | { |
13d8c7b4 SK |
2508 | *crop_w = src_w; |
2509 | *crop_h = src_h; | |
2510 | ||
2511 | if ( src_w != dst_w | |
2512 | || src_h != dst_h) { | |
2513 | float src_ratio = 1.0f; | |
2514 | float dst_ratio = 1.0f; | |
2515 | ||
2516 | // ex : 1024 / 768 | |
2517 | src_ratio = (float)src_w / (float)src_h; | |
2518 | ||
2519 | // ex : 352 / 288 | |
2520 | dst_ratio = (float)dst_w / (float)dst_h; | |
2521 | ||
2522 | if (dst_w * dst_h < src_w * src_h) { | |
2523 | if (dst_ratio <= src_ratio) { | |
2524 | // shrink w | |
2525 | *crop_w = src_h * dst_ratio; | |
2526 | *crop_h = src_h; | |
2527 | } else { | |
2528 | // shrink h | |
2529 | *crop_w = src_w; | |
2530 | *crop_h = src_w / dst_ratio; | |
c15a6b00 | 2531 | } |
13d8c7b4 SK |
2532 | } else { |
2533 | if (dst_ratio <= src_ratio) { | |
2534 | // shrink w | |
2535 | *crop_w = src_h * dst_ratio; | |
2536 | *crop_h = src_h; | |
2537 | } else { | |
2538 | // shrink h | |
2539 | *crop_w = src_w; | |
2540 | *crop_h = src_w / dst_ratio; | |
c15a6b00 JS |
2541 | } |
2542 | } | |
c15a6b00 JS |
2543 | } |
2544 | ||
13d8c7b4 SK |
2545 | if (zoom != 0) { |
2546 | float zoomLevel = ((float)zoom + 10.0) / 10.0; | |
2547 | *crop_w = (int)((float)*crop_w / zoomLevel); | |
2548 | *crop_h = (int)((float)*crop_h / zoomLevel); | |
2549 | } | |
2550 | ||
2551 | #define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2) | |
2552 | unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1)); | |
2553 | if (w_align != 0) { | |
2554 | if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align | |
1422aff9 | 2555 | && (int)(*crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align)) <= dst_w) { |
13d8c7b4 SK |
2556 | *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align); |
2557 | } | |
2558 | else | |
2559 | *crop_w -= w_align; | |
2560 | } | |
2561 | ||
2562 | #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2) | |
2563 | unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1)); | |
2564 | if (h_align != 0) { | |
2565 | if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align | |
1422aff9 | 2566 | && (int)(*crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align)) <= dst_h) { |
13d8c7b4 SK |
2567 | *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align); |
2568 | } | |
2569 | else | |
2570 | *crop_h -= h_align; | |
2571 | } | |
2572 | ||
2573 | *crop_x = (src_w - *crop_w) >> 1; | |
2574 | *crop_y = (src_h - *crop_h) >> 1; | |
2575 | ||
2576 | if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1)) | |
2577 | *crop_x -= 1; | |
2578 | ||
2579 | if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1)) | |
2580 | *crop_y -= 1; | |
2581 | ||
2582 | return true; | |
2583 | } | |
2584 | ||
9dd63e1f | 2585 | BayerBufManager::BayerBufManager() |
13d8c7b4 | 2586 | { |
9dd63e1f SK |
2587 | ALOGV("DEBUG(%s): ", __FUNCTION__); |
2588 | for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) { | |
2589 | entries[i].status = BAYER_ON_HAL_EMPTY; | |
2590 | entries[i].reqFrameCnt = 0; | |
13d8c7b4 | 2591 | } |
9dd63e1f SK |
2592 | sensorEnqueueHead = 0; |
2593 | sensorDequeueHead = 0; | |
2594 | ispEnqueueHead = 0; | |
2595 | ispDequeueHead = 0; | |
2596 | numOnSensor = 0; | |
2597 | numOnIsp = 0; | |
2598 | numOnHalFilled = 0; | |
2599 | numOnHalEmpty = NUM_BAYER_BUFFERS; | |
13d8c7b4 SK |
2600 | } |
2601 | ||
6d8e5b08 SK |
2602 | BayerBufManager::~BayerBufManager() |
2603 | { | |
2604 | ALOGV("%s", __FUNCTION__); | |
2605 | } | |
2606 | ||
9dd63e1f | 2607 | int BayerBufManager::GetIndexForSensorEnqueue() |
13d8c7b4 | 2608 | { |
9dd63e1f SK |
2609 | int ret = 0; |
2610 | if (numOnHalEmpty == 0) | |
2611 | ret = -1; | |
2612 | else | |
2613 | ret = sensorEnqueueHead; | |
2614 | ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret); | |
2615 | return ret; | |
13d8c7b4 SK |
2616 | } |
2617 | ||
9dd63e1f | 2618 | int BayerBufManager::MarkSensorEnqueue(int index) |
13d8c7b4 | 2619 | { |
9dd63e1f SK |
2620 | ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index); |
2621 | ||
2622 | // sanity check | |
2623 | if (index != sensorEnqueueHead) { | |
2624 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead); | |
2625 | return -1; | |
2626 | } | |
2627 | if (entries[index].status != BAYER_ON_HAL_EMPTY) { | |
2628 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2629 | index, entries[index].status, BAYER_ON_HAL_EMPTY); | |
2630 | return -1; | |
13d8c7b4 | 2631 | } |
13d8c7b4 | 2632 | |
9dd63e1f SK |
2633 | entries[index].status = BAYER_ON_SENSOR; |
2634 | entries[index].reqFrameCnt = 0; | |
2635 | numOnHalEmpty--; | |
2636 | numOnSensor++; | |
2637 | sensorEnqueueHead = GetNextIndex(index); | |
2638 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2639 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2640 | return 0; | |
2641 | } | |
13d8c7b4 | 2642 | |
1422aff9 | 2643 | int BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t* /*timeStamp*/) |
13d8c7b4 | 2644 | { |
9dd63e1f SK |
2645 | ALOGV("DEBUG(%s) : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt); |
2646 | ||
9dd63e1f | 2647 | if (entries[index].status != BAYER_ON_SENSOR) { |
ad37861e | 2648 | ALOGE("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, |
9dd63e1f SK |
2649 | index, entries[index].status, BAYER_ON_SENSOR); |
2650 | return -1; | |
13d8c7b4 | 2651 | } |
13d8c7b4 | 2652 | |
9dd63e1f | 2653 | entries[index].status = BAYER_ON_HAL_FILLED; |
9dd63e1f SK |
2654 | numOnHalFilled++; |
2655 | numOnSensor--; | |
ad37861e | 2656 | |
9dd63e1f SK |
2657 | return 0; |
2658 | } | |
2659 | ||
2660 | int BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt) | |
2661 | { | |
2662 | int ret = 0; | |
2663 | if (numOnHalFilled == 0) | |
2664 | ret = -1; | |
2665 | else { | |
2666 | *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt; | |
2667 | ret = ispEnqueueHead; | |
13d8c7b4 | 2668 | } |
9dd63e1f | 2669 | ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret); |
13d8c7b4 SK |
2670 | return ret; |
2671 | } | |
2672 | ||
9dd63e1f SK |
2673 | int BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt) |
2674 | { | |
2675 | int ret = 0; | |
2676 | if (numOnIsp == 0) | |
2677 | ret = -1; | |
2678 | else { | |
2679 | *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt; | |
2680 | ret = ispDequeueHead; | |
2681 | } | |
2682 | ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret); | |
2683 | return ret; | |
2684 | } | |
13d8c7b4 | 2685 | |
9dd63e1f | 2686 | int BayerBufManager::MarkIspEnqueue(int index) |
13d8c7b4 | 2687 | { |
9dd63e1f SK |
2688 | ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index); |
2689 | ||
2690 | // sanity check | |
2691 | if (index != ispEnqueueHead) { | |
2692 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead); | |
13d8c7b4 SK |
2693 | return -1; |
2694 | } | |
9dd63e1f SK |
2695 | if (entries[index].status != BAYER_ON_HAL_FILLED) { |
2696 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2697 | index, entries[index].status, BAYER_ON_HAL_FILLED); | |
2698 | return -1; | |
13d8c7b4 SK |
2699 | } |
2700 | ||
9dd63e1f SK |
2701 | entries[index].status = BAYER_ON_ISP; |
2702 | numOnHalFilled--; | |
2703 | numOnIsp++; | |
2704 | ispEnqueueHead = GetNextIndex(index); | |
2705 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2706 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2707 | return 0; | |
2708 | } | |
2709 | ||
2710 | int BayerBufManager::MarkIspDequeue(int index) | |
2711 | { | |
2712 | ALOGV("DEBUG(%s) : BayerIndex[%d]", __FUNCTION__, index); | |
2713 | ||
2714 | // sanity check | |
2715 | if (index != ispDequeueHead) { | |
2716 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead); | |
2717 | return -1; | |
13d8c7b4 | 2718 | } |
9dd63e1f SK |
2719 | if (entries[index].status != BAYER_ON_ISP) { |
2720 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2721 | index, entries[index].status, BAYER_ON_ISP); | |
13d8c7b4 SK |
2722 | return -1; |
2723 | } | |
2724 | ||
9dd63e1f SK |
2725 | entries[index].status = BAYER_ON_HAL_EMPTY; |
2726 | entries[index].reqFrameCnt = 0; | |
2727 | numOnHalEmpty++; | |
2728 | numOnIsp--; | |
2729 | ispDequeueHead = GetNextIndex(index); | |
2730 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2731 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2732 | return 0; | |
2733 | } | |
13d8c7b4 | 2734 | |
9dd63e1f SK |
2735 | int BayerBufManager::GetNumOnSensor() |
2736 | { | |
2737 | return numOnSensor; | |
13d8c7b4 SK |
2738 | } |
2739 | ||
9dd63e1f | 2740 | int BayerBufManager::GetNumOnHalFilled() |
13d8c7b4 | 2741 | { |
9dd63e1f SK |
2742 | return numOnHalFilled; |
2743 | } | |
2744 | ||
2745 | int BayerBufManager::GetNumOnIsp() | |
2746 | { | |
2747 | return numOnIsp; | |
2748 | } | |
2749 | ||
2750 | int BayerBufManager::GetNextIndex(int index) | |
2751 | { | |
2752 | index++; | |
2753 | if (index >= NUM_BAYER_BUFFERS) | |
2754 | index = 0; | |
2755 | ||
2756 | return index; | |
2757 | } | |
2758 | ||
2759 | void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self) | |
2760 | { | |
2761 | camera_metadata_t *currentRequest = NULL; | |
2762 | camera_metadata_t *currentFrame = NULL; | |
2763 | size_t numEntries = 0; | |
2764 | size_t frameSize = 0; | |
2765 | camera_metadata_t * preparedFrame = NULL; | |
13d8c7b4 SK |
2766 | camera_metadata_t *deregisteredRequest = NULL; |
2767 | uint32_t currentSignal = self->GetProcessingSignal(); | |
2768 | MainThread * selfThread = ((MainThread*)self); | |
2769 | int res = 0; | |
2770 | ||
ad37861e | 2771 | int ret; |
ca714238 SK |
2772 | int afMode; |
2773 | uint32_t afRegion[4]; | |
ad37861e | 2774 | |
13d8c7b4 SK |
2775 | ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal); |
2776 | ||
2777 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
2778 | ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__); | |
2779 | ||
2780 | ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__); | |
2781 | selfThread->SetSignal(SIGNAL_THREAD_TERMINATE); | |
2782 | return; | |
2783 | } | |
2784 | ||
2785 | if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) { | |
2786 | ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__); | |
ad37861e | 2787 | if (m_requestManager->IsRequestQueueFull()==false) { |
ca714238 | 2788 | Mutex::Autolock lock(m_afModeTriggerLock); |
13d8c7b4 SK |
2789 | m_requestQueueOps->dequeue_request(m_requestQueueOps, ¤tRequest); |
2790 | if (NULL == currentRequest) { | |
e2068c92 | 2791 | ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal); |
13d8c7b4 | 2792 | m_isRequestQueueNull = true; |
5c88d1f2 C |
2793 | if (m_requestManager->IsVdisEnable()) |
2794 | m_vdisBubbleCnt = 1; | |
13d8c7b4 SK |
2795 | } |
2796 | else { | |
ca714238 SK |
2797 | m_requestManager->RegisterRequest(currentRequest, &afMode, afRegion); |
2798 | ||
2799 | SetAfMode((enum aa_afmode)afMode); | |
2800 | SetAfRegion(afRegion); | |
13d8c7b4 SK |
2801 | |
2802 | m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps); | |
2803 | ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc); | |
ad37861e | 2804 | if (m_requestManager->IsRequestQueueFull()==false) |
13d8c7b4 | 2805 | selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly |
9dd63e1f | 2806 | |
13d8c7b4 SK |
2807 | m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); |
2808 | } | |
c15a6b00 JS |
2809 | } |
2810 | else { | |
13d8c7b4 SK |
2811 | m_isRequestQueuePending = true; |
2812 | } | |
2813 | } | |
2814 | ||
2815 | if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) { | |
2816 | ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__); | |
2817 | /*while (1)*/ { | |
0f26b20f | 2818 | ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService()); |
ad37861e | 2819 | if (ret == false) |
0d220b42 | 2820 | CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret); |
ad37861e | 2821 | |
13d8c7b4 | 2822 | m_requestManager->DeregisterRequest(&deregisteredRequest); |
ad37861e SK |
2823 | |
2824 | ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest); | |
2825 | if (ret < 0) | |
0d220b42 | 2826 | CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret); |
ad37861e SK |
2827 | |
2828 | ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, ¤tFrame); | |
2829 | if (ret < 0) | |
0d220b42 | 2830 | CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret); |
ad37861e | 2831 | |
13d8c7b4 | 2832 | if (currentFrame==NULL) { |
ad37861e | 2833 | ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ ); |
13d8c7b4 SK |
2834 | } |
2835 | else { | |
daa1fcd6 | 2836 | ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize); |
13d8c7b4 SK |
2837 | } |
2838 | res = append_camera_metadata(currentFrame, preparedFrame); | |
2839 | if (res==0) { | |
2840 | ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__); | |
2841 | m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame); | |
ad37861e SK |
2842 | } |
2843 | else { | |
2844 | ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res); | |
2845 | } | |
2846 | } | |
2847 | if (!m_isRequestQueueNull) { | |
2848 | selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); | |
2849 | } | |
c15a6b00 | 2850 | |
ad37861e SK |
2851 | if (getInProgressCount()>0) { |
2852 | ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__); | |
2853 | m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); | |
2854 | } | |
c15a6b00 | 2855 | } |
ad37861e SK |
2856 | ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__); |
2857 | return; | |
2858 | } | |
c15a6b00 | 2859 | |
13d8c7b4 SK |
2860 | void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext) |
2861 | { | |
ad37861e SK |
2862 | ALOGD("#### common Section"); |
2863 | ALOGD("#### magic(%x) ", | |
13d8c7b4 | 2864 | shot_ext->shot.magicNumber); |
ad37861e SK |
2865 | ALOGD("#### ctl Section"); |
2866 | ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)", | |
13d8c7b4 | 2867 | shot_ext->shot.ctl.request.metadataMode, |
b56dcc00 | 2868 | shot_ext->shot.ctl.lens.aperture, |
13d8c7b4 SK |
2869 | shot_ext->shot.ctl.sensor.exposureTime, |
2870 | shot_ext->shot.ctl.sensor.frameDuration, | |
b56dcc00 SK |
2871 | shot_ext->shot.ctl.sensor.sensitivity, |
2872 | shot_ext->shot.ctl.aa.awbMode); | |
13d8c7b4 | 2873 | |
5506cebf | 2874 | ALOGD("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)", |
9dd63e1f | 2875 | shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc, |
5506cebf | 2876 | shot_ext->shot.ctl.request.outputStreams[0]); |
13d8c7b4 | 2877 | |
ad37861e SK |
2878 | ALOGD("#### DM Section"); |
2879 | ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)", | |
13d8c7b4 | 2880 | shot_ext->shot.dm.request.metadataMode, |
b56dcc00 | 2881 | shot_ext->shot.dm.lens.aperture, |
13d8c7b4 SK |
2882 | shot_ext->shot.dm.sensor.exposureTime, |
2883 | shot_ext->shot.dm.sensor.frameDuration, | |
2884 | shot_ext->shot.dm.sensor.sensitivity, | |
b56dcc00 SK |
2885 | shot_ext->shot.dm.sensor.timeStamp, |
2886 | shot_ext->shot.dm.aa.awbMode, | |
2887 | shot_ext->shot.dm.request.frameCount ); | |
13d8c7b4 SK |
2888 | } |
2889 | ||
e117f756 | 2890 | void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext) |
6f19b6cf | 2891 | { |
e117f756 YJ |
2892 | // Flash |
2893 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2894 | case IS_FLASH_STATE_ON: | |
40acdcc8 | 2895 | ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
4a9565ae YJ |
2896 | // check AF locked |
2897 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
2898 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { | |
2899 | if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) { | |
2900 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS; | |
2901 | m_ctlInfo.flash.m_flashTimeOut = 5; | |
2902 | } else | |
2903 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON; | |
2904 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT; | |
2905 | } else { | |
2906 | m_ctlInfo.flash.m_flashTimeOut--; | |
2907 | } | |
2908 | } else { | |
2909 | if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) { | |
2910 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS; | |
2911 | m_ctlInfo.flash.m_flashTimeOut = 5; | |
2912 | } else | |
2913 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON; | |
2914 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT; | |
2915 | } | |
9a710a45 | 2916 | break; |
e117f756 | 2917 | case IS_FLASH_STATE_ON_WAIT: |
6f19b6cf | 2918 | break; |
e117f756 YJ |
2919 | case IS_FLASH_STATE_ON_DONE: |
2920 | if (!m_ctlInfo.flash.m_afFlashDoneFlg) | |
2921 | // auto transition at pre-capture trigger | |
2922 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
6f19b6cf | 2923 | break; |
e117f756 | 2924 | case IS_FLASH_STATE_AUTO_AE_AWB_LOCK: |
40acdcc8 | 2925 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 YJ |
2926 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO; |
2927 | //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED; | |
2928 | shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED; | |
2929 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT; | |
2930 | break; | |
2931 | case IS_FLASH_STATE_AE_AWB_LOCK_WAIT: | |
2932 | case IS_FLASH_STATE_AUTO_WAIT: | |
2933 | shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0; | |
2934 | shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0; | |
2935 | break; | |
2936 | case IS_FLASH_STATE_AUTO_DONE: | |
40acdcc8 | 2937 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
d91c0269 | 2938 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
e117f756 YJ |
2939 | break; |
2940 | case IS_FLASH_STATE_AUTO_OFF: | |
40acdcc8 | 2941 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 | 2942 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
e117f756 YJ |
2943 | m_ctlInfo.flash.m_flashEnableFlg = false; |
2944 | break; | |
2945 | case IS_FLASH_STATE_CAPTURE: | |
40acdcc8 | 2946 | ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 YJ |
2947 | m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT; |
2948 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE; | |
2949 | shot_ext->request_scc = 0; | |
2950 | shot_ext->request_scp = 0; | |
2951 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition | |
2952 | break; | |
2953 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
2954 | shot_ext->request_scc = 0; | |
2955 | shot_ext->request_scp = 0; | |
2956 | break; | |
2957 | case IS_FLASH_STATE_CAPTURE_JPEG: | |
73f5ad60 | 2958 | ALOGV("(%s): [Flash] Flash Capture (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut)); |
e117f756 YJ |
2959 | shot_ext->request_scc = 1; |
2960 | shot_ext->request_scp = 1; | |
2961 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END; // auto transition | |
2962 | break; | |
2963 | case IS_FLASH_STATE_CAPTURE_END: | |
40acdcc8 | 2964 | ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
e117f756 YJ |
2965 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
2966 | shot_ext->request_scc = 0; | |
2967 | shot_ext->request_scp = 0; | |
2968 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
d6d94475 | 2969 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE; |
e117f756 | 2970 | m_ctlInfo.flash.m_afFlashDoneFlg= false; |
cdd53a9f YJ |
2971 | break; |
2972 | case IS_FLASH_STATE_NONE: | |
e117f756 YJ |
2973 | break; |
2974 | default: | |
2975 | ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); | |
6f19b6cf YJ |
2976 | } |
2977 | } | |
2978 | ||
e117f756 | 2979 | void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext) |
6f19b6cf | 2980 | { |
e117f756 YJ |
2981 | // Flash |
2982 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2983 | case IS_FLASH_STATE_AUTO_WAIT: | |
2984 | if (m_ctlInfo.flash.m_flashDecisionResult) { | |
2985 | if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) { | |
2986 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
73f5ad60 | 2987 | ALOGV("(%s): [Flash] Lis : AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode); |
9257e29e | 2988 | } else { |
73f5ad60 | 2989 | ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__); |
9257e29e | 2990 | } |
e117f756 YJ |
2991 | } else { |
2992 | //If flash isn't activated at flash auto mode, skip flash auto control | |
2993 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
73f5ad60 | 2994 | ALOGV("(%s): [Flash] Skip : AUTO -> OFF", __FUNCTION__); |
9257e29e | 2995 | } |
e117f756 | 2996 | break; |
9257e29e | 2997 | } |
9257e29e YJ |
2998 | } |
2999 | ||
e117f756 | 3000 | void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext) |
9257e29e | 3001 | { |
e117f756 YJ |
3002 | // Flash |
3003 | switch (m_ctlInfo.flash.m_flashCnt) { | |
3004 | case IS_FLASH_STATE_ON_WAIT: | |
3005 | if (shot_ext->shot.dm.flash.decision > 0) { | |
3006 | // store decision result to skip capture sequenece | |
73f5ad60 | 3007 | ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision); |
e117f756 YJ |
3008 | if (shot_ext->shot.dm.flash.decision == 2) |
3009 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
3010 | else | |
3011 | m_ctlInfo.flash.m_flashDecisionResult = true; | |
3012 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE; | |
3013 | } else { | |
3014 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { | |
73f5ad60 | 3015 | ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__); |
e117f756 YJ |
3016 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE; |
3017 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
caea49e6 | 3018 | } else { |
e117f756 | 3019 | m_ctlInfo.flash.m_flashTimeOut--; |
6f19b6cf | 3020 | } |
6f19b6cf | 3021 | } |
e117f756 YJ |
3022 | break; |
3023 | case IS_FLASH_STATE_AE_AWB_LOCK_WAIT: | |
3024 | if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) { | |
73f5ad60 | 3025 | ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode); |
e117f756 YJ |
3026 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT; |
3027 | } else { | |
73f5ad60 | 3028 | ALOGV("(%s): [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__); |
e117f756 YJ |
3029 | } |
3030 | break; | |
3031 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
3032 | if (m_ctlInfo.flash.m_flashDecisionResult) { | |
3033 | if (shot_ext->shot.dm.flash.firingStable) { | |
3034 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
6f19b6cf | 3035 | } else { |
9257e29e | 3036 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { |
e117f756 YJ |
3037 | ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__); |
3038 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
9257e29e | 3039 | } else { |
e117f756 | 3040 | ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut); |
9257e29e YJ |
3041 | m_ctlInfo.flash.m_flashTimeOut--; |
3042 | } | |
6f19b6cf | 3043 | } |
e117f756 YJ |
3044 | } else { |
3045 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
6f19b6cf | 3046 | } |
e117f756 | 3047 | break; |
6f19b6cf | 3048 | } |
6f19b6cf YJ |
3049 | } |
3050 | ||
cdd53a9f YJ |
3051 | void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext) |
3052 | { | |
3053 | switch (m_ctlInfo.flash.i_flashMode) { | |
3054 | case AA_AEMODE_ON: | |
3055 | // At flash off mode, capture can be done as zsl capture | |
3056 | shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED; | |
3057 | break; | |
3058 | case AA_AEMODE_ON_AUTO_FLASH: | |
3059 | // At flash auto mode, main flash have to be done if pre-flash was done. | |
3060 | if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg) | |
3061 | shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED; | |
1422aff9 MS |
3062 | // FALLTHRU |
3063 | default: | |
cdd53a9f YJ |
3064 | break; |
3065 | } | |
3066 | } | |
3067 | ||
4a9565ae YJ |
3068 | void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext) |
3069 | { | |
ca714238 SK |
3070 | shot_ext->shot.ctl.aa.afRegions[0] = currentAfRegion[0]; |
3071 | shot_ext->shot.ctl.aa.afRegions[1] = currentAfRegion[1]; | |
3072 | shot_ext->shot.ctl.aa.afRegions[2] = currentAfRegion[2]; | |
3073 | shot_ext->shot.ctl.aa.afRegions[3] = currentAfRegion[3]; | |
3074 | } | |
3075 | ||
3076 | void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion) | |
3077 | { | |
3078 | currentAfRegion[0] = afRegion[0]; | |
3079 | currentAfRegion[1] = afRegion[1]; | |
3080 | currentAfRegion[2] = afRegion[2]; | |
3081 | currentAfRegion[3] = afRegion[3]; | |
4a9565ae YJ |
3082 | } |
3083 | ||
8a3fc5dd | 3084 | void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode) |
4a9565ae YJ |
3085 | { |
3086 | if (m_afState == HAL_AFSTATE_SCANNING) { | |
3087 | ALOGD("(%s): restarting trigger ", __FUNCTION__); | |
8a3fc5dd | 3088 | } else if (!mode) { |
4a9565ae YJ |
3089 | if (m_afState != HAL_AFSTATE_NEEDS_COMMAND) |
3090 | ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState); | |
3091 | else | |
40acdcc8 | 3092 | m_afState = HAL_AFSTATE_STARTED; |
4a9565ae | 3093 | } |
40acdcc8 | 3094 | ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState); |
4a9565ae YJ |
3095 | shot_ext->shot.ctl.aa.afTrigger = 1; |
3096 | shot_ext->shot.ctl.aa.afMode = m_afMode; | |
3097 | m_IsAfTriggerRequired = false; | |
3098 | } | |
3099 | ||
13d8c7b4 SK |
3100 | void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self) |
3101 | { | |
3102 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3103 | SensorThread * selfThread = ((SensorThread*)self); | |
3104 | int index; | |
ad37861e | 3105 | int index_isp; |
13d8c7b4 SK |
3106 | status_t res; |
3107 | nsecs_t frameTime; | |
3108 | int bayersOnSensor = 0, bayersOnIsp = 0; | |
ad37861e SK |
3109 | int j = 0; |
3110 | bool isCapture = false; | |
13d8c7b4 SK |
3111 | ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal); |
3112 | ||
3113 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
2c872806 | 3114 | CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__); |
13d8c7b4 | 3115 | |
9dd63e1f | 3116 | ALOGV("(%s): calling sensor streamoff", __FUNCTION__); |
13d8c7b4 | 3117 | cam_int_streamoff(&(m_camera_info.sensor)); |
9dd63e1f | 3118 | ALOGV("(%s): calling sensor streamoff done", __FUNCTION__); |
b5237e6b SK |
3119 | |
3120 | m_camera_info.sensor.buffers = 0; | |
3121 | ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__); | |
3122 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
3123 | ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__); | |
5506cebf | 3124 | m_camera_info.sensor.status = false; |
ad37861e | 3125 | |
9dd63e1f SK |
3126 | ALOGV("(%s): calling ISP streamoff", __FUNCTION__); |
3127 | isp_int_streamoff(&(m_camera_info.isp)); | |
3128 | ALOGV("(%s): calling ISP streamoff done", __FUNCTION__); | |
ad37861e | 3129 | |
b5237e6b SK |
3130 | m_camera_info.isp.buffers = 0; |
3131 | ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__); | |
3132 | cam_int_reqbufs(&(m_camera_info.isp)); | |
3133 | ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__); | |
3134 | ||
13d8c7b4 | 3135 | exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM); |
13d8c7b4 | 3136 | |
52f54308 | 3137 | m_requestManager->releaseSensorQ(); |
2adfa429 | 3138 | m_requestManager->ResetEntry(); |
ad37861e | 3139 | ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__); |
13d8c7b4 SK |
3140 | selfThread->SetSignal(SIGNAL_THREAD_TERMINATE); |
3141 | return; | |
3142 | } | |
3143 | ||
3144 | if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING) | |
3145 | { | |
3146 | ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__); | |
9dd63e1f | 3147 | int targetStreamIndex = 0, i=0; |
ad37861e | 3148 | int matchedFrameCnt = -1, processingReqIndex; |
13d8c7b4 | 3149 | struct camera2_shot_ext *shot_ext; |
ad37861e | 3150 | struct camera2_shot_ext *shot_ext_capture; |
0f26b20f | 3151 | bool triggered = false; |
9dd63e1f | 3152 | |
ad37861e | 3153 | /* dqbuf from sensor */ |
5506cebf | 3154 | ALOGV("Sensor DQbuf start"); |
13d8c7b4 | 3155 | index = cam_int_dqbuf(&(m_camera_info.sensor)); |
52f54308 SK |
3156 | m_requestManager->pushSensorQ(index); |
3157 | ALOGV("Sensor DQbuf done(%d)", index); | |
9dd63e1f | 3158 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]); |
ad37861e | 3159 | |
a15b4e3f SK |
3160 | if (m_nightCaptureCnt != 0) { |
3161 | matchedFrameCnt = m_nightCaptureFrameCnt; | |
e117f756 | 3162 | } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) { |
9a710a45 | 3163 | matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount; |
caea49e6 | 3164 | ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt); |
6f19b6cf | 3165 | } else { |
492a2506 | 3166 | matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext, m_isRequestQueueNull); |
a15b4e3f | 3167 | } |
ad37861e | 3168 | |
5c88d1f2 C |
3169 | if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) { |
3170 | matchedFrameCnt = m_vdisDupFrame; | |
3171 | } | |
5c88d1f2 | 3172 | |
a07cbd98 | 3173 | if (matchedFrameCnt != -1) { |
ce77365b | 3174 | if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) { |
5c88d1f2 C |
3175 | frameTime = systemTime(); |
3176 | m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime); | |
3177 | m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo); | |
ce77365b HC |
3178 | } else { |
3179 | ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt); | |
5c88d1f2 | 3180 | } |
9a710a45 | 3181 | |
40acdcc8 YJ |
3182 | // face af mode setting in case of face priority scene mode |
3183 | if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) { | |
3184 | ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode); | |
3185 | m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode; | |
3186 | } | |
3187 | ||
308291de | 3188 | m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2]; |
e4657e32 SK |
3189 | float zoomLeft, zoomTop, zoomWidth, zoomHeight; |
3190 | int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0; | |
3191 | ||
3192 | m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(), | |
5506cebf | 3193 | m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height, |
e4657e32 SK |
3194 | &crop_x, &crop_y, |
3195 | &crop_w, &crop_h, | |
3196 | 0); | |
3197 | ||
5506cebf | 3198 | if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) { |
308291de | 3199 | zoomWidth = m_camera2->getSensorW() / m_zoomRatio; |
e4657e32 | 3200 | zoomHeight = zoomWidth * |
5506cebf | 3201 | m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width; |
e4657e32 | 3202 | } else { |
308291de | 3203 | zoomHeight = m_camera2->getSensorH() / m_zoomRatio; |
e4657e32 | 3204 | zoomWidth = zoomHeight * |
5506cebf | 3205 | m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height; |
e4657e32 SK |
3206 | } |
3207 | zoomLeft = (crop_w - zoomWidth) / 2; | |
3208 | zoomTop = (crop_h - zoomHeight) / 2; | |
3209 | ||
1422aff9 | 3210 | int32_t new_cropRegion[3] = { (int32_t)zoomLeft, (int32_t)zoomTop, (int32_t)zoomWidth }; |
e4657e32 | 3211 | |
408f6161 HC |
3212 | int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4); |
3213 | if (cropCompensation) | |
3214 | new_cropRegion[2] -= cropCompensation; | |
308291de | 3215 | |
e4657e32 SK |
3216 | shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0]; |
3217 | shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1]; | |
3218 | shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2]; | |
8a3fc5dd | 3219 | if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) { |
ca714238 | 3220 | ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode); |
0f26b20f SK |
3221 | shot_ext->shot.ctl.aa.afMode = m_afMode; |
3222 | if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) { | |
ed4ad5fe | 3223 | ALOGD("### With Automatic triger for continuous modes"); |
0f26b20f SK |
3224 | m_afState = HAL_AFSTATE_STARTED; |
3225 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3226 | triggered = true; | |
40acdcc8 YJ |
3227 | if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) || |
3228 | (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) { | |
3229 | switch (m_afMode) { | |
3230 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
6bd0cd3b | 3231 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE; |
40acdcc8 | 3232 | ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode); |
1422aff9 MS |
3233 | // FALLTHRU |
3234 | default: | |
40acdcc8 | 3235 | break; |
40acdcc8 YJ |
3236 | } |
3237 | } | |
cdd53a9f YJ |
3238 | // reset flash result |
3239 | if (m_ctlInfo.flash.m_afFlashDoneFlg) { | |
3240 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
3241 | m_ctlInfo.flash.m_afFlashDoneFlg = false; | |
3242 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
d6d94475 | 3243 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE; |
cdd53a9f | 3244 | } |
40acdcc8 | 3245 | m_ctlInfo.af.m_afTriggerTimeOut = 1; |
0f26b20f | 3246 | } |
40acdcc8 | 3247 | |
0f26b20f | 3248 | m_IsAfModeUpdateRequired = false; |
311d52eb | 3249 | // support inifinity focus mode |
fdbaf5d2 | 3250 | if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) { |
311d52eb YJ |
3251 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY; |
3252 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3253 | triggered = true; | |
3254 | } | |
0f26b20f SK |
3255 | if (m_afMode2 != NO_CHANGE) { |
3256 | enum aa_afmode tempAfMode = m_afMode2; | |
3257 | m_afMode2 = NO_CHANGE; | |
3258 | SetAfMode(tempAfMode); | |
3259 | } | |
3260 | } | |
3261 | else { | |
3262 | shot_ext->shot.ctl.aa.afMode = NO_CHANGE; | |
3263 | } | |
3264 | if (m_IsAfTriggerRequired) { | |
e117f756 | 3265 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
4a9565ae | 3266 | // flash case |
e117f756 | 3267 | if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) { |
4a9565ae YJ |
3268 | if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) { |
3269 | // Flash is enabled and start AF | |
8a3fc5dd | 3270 | m_afTrigger(shot_ext, 1); |
6f19b6cf | 3271 | } else { |
ca714238 | 3272 | m_afTrigger(shot_ext, 0); |
6f19b6cf | 3273 | } |
6f19b6cf YJ |
3274 | } |
3275 | } else { | |
4a9565ae | 3276 | // non-flash case |
ca714238 | 3277 | m_afTrigger(shot_ext, 0); |
9900d0c4 | 3278 | } |
4a9565ae | 3279 | } else { |
8e2c2fdb | 3280 | shot_ext->shot.ctl.aa.afTrigger = 0; |
0f26b20f | 3281 | } |
5506cebf SK |
3282 | |
3283 | if (m_wideAspect) { | |
3284 | shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO; | |
5506cebf SK |
3285 | } else { |
3286 | shot_ext->setfile = ISS_SUB_SCENARIO_STILL; | |
0f26b20f SK |
3287 | } |
3288 | if (triggered) | |
3289 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3290 | ||
3291 | // TODO : check collision with AFMode Update | |
3292 | if (m_IsAfLockRequired) { | |
3293 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF; | |
3294 | m_IsAfLockRequired = false; | |
3295 | } | |
4ed2f103 | 3296 | ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)", |
ad37861e SK |
3297 | index, |
3298 | shot_ext->shot.ctl.request.frameCount, | |
3299 | shot_ext->request_scp, | |
3300 | shot_ext->request_scc, | |
3301 | shot_ext->dis_bypass, sizeof(camera2_shot)); | |
4a9565ae YJ |
3302 | |
3303 | // update AF region | |
3304 | m_updateAfRegion(shot_ext); | |
3305 | ||
572470e2 | 3306 | m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode; |
6bf36b60 SK |
3307 | if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT |
3308 | && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED) | |
3309 | shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON; | |
a15b4e3f | 3310 | if (m_nightCaptureCnt == 0) { |
5506cebf | 3311 | if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE |
a15b4e3f SK |
3312 | && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) { |
3313 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d | 3314 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
a15b4e3f SK |
3315 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; |
3316 | m_nightCaptureCnt = 4; | |
3317 | m_nightCaptureFrameCnt = matchedFrameCnt; | |
3318 | shot_ext->request_scc = 0; | |
3319 | } | |
3320 | } | |
3321 | else if (m_nightCaptureCnt == 1) { | |
3322 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d SK |
3323 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30; |
3324 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
a15b4e3f | 3325 | m_nightCaptureCnt--; |
2f4d175d | 3326 | m_nightCaptureFrameCnt = 0; |
a15b4e3f SK |
3327 | shot_ext->request_scc = 1; |
3328 | } | |
6bf36b60 SK |
3329 | else if (m_nightCaptureCnt == 2) { |
3330 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d SK |
3331 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
3332 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
6bf36b60 SK |
3333 | m_nightCaptureCnt--; |
3334 | shot_ext->request_scc = 0; | |
3335 | } | |
1c5e692d | 3336 | else if (m_nightCaptureCnt == 3) { |
a15b4e3f | 3337 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; |
1c5e692d SK |
3338 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
3339 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
3340 | m_nightCaptureCnt--; | |
3341 | shot_ext->request_scc = 0; | |
3342 | } | |
3343 | else if (m_nightCaptureCnt == 4) { | |
3344 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
3345 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; | |
3346 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
a15b4e3f SK |
3347 | m_nightCaptureCnt--; |
3348 | shot_ext->request_scc = 0; | |
3349 | } | |
6f19b6cf | 3350 | |
3c17a3f7 SK |
3351 | switch (shot_ext->shot.ctl.aa.aeTargetFpsRange[1]) { |
3352 | case 15: | |
3353 | shot_ext->shot.ctl.sensor.frameDuration = (66666 * 1000); | |
3354 | break; | |
3355 | ||
3356 | case 24: | |
3357 | shot_ext->shot.ctl.sensor.frameDuration = (41666 * 1000); | |
3358 | break; | |
3359 | ||
3360 | case 25: | |
3361 | shot_ext->shot.ctl.sensor.frameDuration = (40000 * 1000); | |
3362 | break; | |
3363 | ||
3364 | case 30: | |
3365 | default: | |
3366 | shot_ext->shot.ctl.sensor.frameDuration = (33333 * 1000); | |
3367 | break; | |
3368 | } | |
3369 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
3370 | ||
6f19b6cf YJ |
3371 | // Flash mode |
3372 | // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence | |
e117f756 YJ |
3373 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) |
3374 | && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) | |
6f19b6cf | 3375 | && (m_cameraId == 0)) { |
e117f756 YJ |
3376 | if (!m_ctlInfo.flash.m_flashDecisionResult) { |
3377 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
3378 | m_ctlInfo.flash.m_afFlashDoneFlg = false; | |
d6d94475 | 3379 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE; |
cdd53a9f YJ |
3380 | } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) || |
3381 | (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) { | |
40acdcc8 | 3382 | ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__); |
6f19b6cf | 3383 | shot_ext->request_scc = 0; |
9a710a45 YJ |
3384 | m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt; |
3385 | m_ctlInfo.flash.m_flashEnableFlg = true; | |
e117f756 YJ |
3386 | m_ctlInfo.flash.m_afFlashDoneFlg = false; |
3387 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE; | |
73f5ad60 | 3388 | } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) { |
40acdcc8 YJ |
3389 | ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
3390 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; | |
3391 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
40acdcc8 | 3392 | m_ctlInfo.flash.m_afFlashDoneFlg= false; |
d6d94475 | 3393 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE; |
caea49e6 | 3394 | } |
4a9565ae YJ |
3395 | } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) { |
3396 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
caea49e6 YJ |
3397 | } |
3398 | ||
34d2b94a SK |
3399 | if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) { |
3400 | if (m_ctlInfo.flash.m_flashTorchMode == false) { | |
3401 | m_ctlInfo.flash.m_flashTorchMode = true; | |
3402 | } | |
caea49e6 | 3403 | } else { |
34d2b94a | 3404 | if (m_ctlInfo.flash.m_flashTorchMode == true) { |
caea49e6 YJ |
3405 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF; |
3406 | shot_ext->shot.ctl.flash.firingPower = 0; | |
3407 | m_ctlInfo.flash.m_flashTorchMode = false; | |
3408 | } else { | |
3409 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP; | |
6f19b6cf YJ |
3410 | } |
3411 | } | |
3412 | ||
5506cebf | 3413 | if (shot_ext->isReprocessing) { |
69d1e6e9 | 3414 | ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__); |
5506cebf SK |
3415 | m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0]; |
3416 | shot_ext->request_scp = 0; | |
3417 | shot_ext->request_scc = 0; | |
3418 | m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount; | |
69d1e6e9 | 3419 | m_ctlInfo.flash.m_flashDecisionResult = false; |
d6d94475 MS |
3420 | void *shot = m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt); |
3421 | if (!shot) { // m_isRequestQueueNull reuse current | |
3422 | ALOGD("(%s): isReprocessing: " | |
3423 | "m_reprocessingFrameCnt missing, using shot_ext", | |
3424 | __FUNCTION__); | |
3425 | shot = shot_ext; | |
3426 | } | |
3427 | memcpy(&m_jpegMetadata, shot, sizeof(struct camera2_shot_ext)); | |
5506cebf | 3428 | m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START); |
cdd53a9f YJ |
3429 | m_ctlInfo.flash.m_flashEnableFlg = false; |
3430 | } | |
3431 | ||
3432 | if (m_ctlInfo.flash.m_flashEnableFlg) { | |
3433 | m_preCaptureListenerSensor(shot_ext); | |
3434 | m_preCaptureSetter(shot_ext); | |
5506cebf | 3435 | } |
fdbaf5d2 | 3436 | |
8e2c2fdb SK |
3437 | ALOGV("(%s): queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__, |
3438 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
3439 | (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode), | |
3440 | (int)(shot_ext->shot.ctl.aa.afTrigger)); | |
7d0efb59 | 3441 | |
5c88d1f2 C |
3442 | if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) { |
3443 | shot_ext->dis_bypass = 1; | |
9c046e3a | 3444 | shot_ext->dnr_bypass = 1; |
5c88d1f2 C |
3445 | shot_ext->request_scp = 0; |
3446 | shot_ext->request_scc = 0; | |
3447 | m_vdisBubbleCnt--; | |
3448 | matchedFrameCnt = -1; | |
3449 | } else { | |
3450 | m_vdisDupFrame = matchedFrameCnt; | |
3451 | } | |
f9a06609 SK |
3452 | if (m_scpForceSuspended) |
3453 | shot_ext->request_scc = 0; | |
5c88d1f2 | 3454 | |
5506cebf | 3455 | uint32_t current_scp = shot_ext->request_scp; |
a85ec381 | 3456 | uint32_t current_scc = shot_ext->request_scc; |
7d0efb59 | 3457 | |
c0b6e17e | 3458 | if (shot_ext->shot.dm.request.frameCount == 0) { |
4aa4d739 | 3459 | CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount); |
c0b6e17e C |
3460 | } |
3461 | ||
ad37861e | 3462 | cam_int_qbuf(&(m_camera_info.isp), index); |
13d8c7b4 | 3463 | |
ad37861e SK |
3464 | ALOGV("### isp DQBUF start"); |
3465 | index_isp = cam_int_dqbuf(&(m_camera_info.isp)); | |
ef6f83ca | 3466 | |
ad37861e | 3467 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]); |
13d8c7b4 | 3468 | |
e117f756 YJ |
3469 | if (m_ctlInfo.flash.m_flashEnableFlg) |
3470 | m_preCaptureListenerISP(shot_ext); | |
9a710a45 | 3471 | |
7ef20f42 | 3472 | ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)", |
ad37861e SK |
3473 | index, |
3474 | shot_ext->shot.ctl.request.frameCount, | |
3475 | shot_ext->request_scp, | |
3476 | shot_ext->request_scc, | |
7ef20f42 HC |
3477 | shot_ext->dis_bypass, |
3478 | shot_ext->dnr_bypass, sizeof(camera2_shot)); | |
fdbaf5d2 | 3479 | |
ef6f83ca | 3480 | ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__, |
8e2c2fdb SK |
3481 | (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode), |
3482 | (int)(shot_ext->shot.dm.aa.awbMode), | |
3483 | (int)(shot_ext->shot.dm.aa.afMode)); | |
13d8c7b4 | 3484 | |
2f4d175d | 3485 | #ifndef ENABLE_FRAME_SYNC |
5506cebf | 3486 | m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0]; |
2f4d175d | 3487 | #endif |
2adfa429 | 3488 | |
fd2d78a2 SK |
3489 | if (!shot_ext->fd_bypass) { |
3490 | /* FD orientation axis transformation */ | |
3491 | for (int i=0; i < CAMERA2_MAX_FACES; i++) { | |
3492 | if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0) | |
3493 | shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW | |
ef6f83ca | 3494 | * shot_ext->shot.dm.stats.faceRectangles[i][0]) |
5506cebf | 3495 | / m_streamThreads[0].get()->m_parameters.width; |
fd2d78a2 SK |
3496 | if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0) |
3497 | shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH | |
ef6f83ca | 3498 | * shot_ext->shot.dm.stats.faceRectangles[i][1]) |
5506cebf | 3499 | / m_streamThreads[0].get()->m_parameters.height; |
fd2d78a2 SK |
3500 | if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0) |
3501 | shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW | |
ef6f83ca | 3502 | * shot_ext->shot.dm.stats.faceRectangles[i][2]) |
5506cebf | 3503 | / m_streamThreads[0].get()->m_parameters.width; |
fd2d78a2 SK |
3504 | if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0) |
3505 | shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH | |
ef6f83ca | 3506 | * shot_ext->shot.dm.stats.faceRectangles[i][3]) |
5506cebf | 3507 | / m_streamThreads[0].get()->m_parameters.height; |
fd2d78a2 SK |
3508 | } |
3509 | } | |
cdd53a9f YJ |
3510 | // aeState control |
3511 | if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT) | |
3512 | m_preCaptureAeState(shot_ext); | |
47d3a1ea | 3513 | |
275c9744 | 3514 | // At scene mode face priority |
40acdcc8 YJ |
3515 | if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE) |
3516 | shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE; | |
275c9744 | 3517 | |
48728d49 | 3518 | if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) { |
a15b4e3f SK |
3519 | m_requestManager->ApplyDynamicMetadata(shot_ext); |
3520 | } | |
69d1e6e9 SK |
3521 | |
3522 | if (current_scc != shot_ext->request_scc) { | |
3523 | ALOGD("(%s): scc frame drop1 request_scc(%d to %d)", | |
3524 | __FUNCTION__, current_scc, shot_ext->request_scc); | |
3525 | m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount); | |
3526 | } | |
3527 | if (shot_ext->request_scc) { | |
3528 | ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)"); | |
3529 | if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) { | |
d6d94475 MS |
3530 | void *shot = shot_ext; |
3531 | if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE) { | |
3532 | shot = m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt); | |
3533 | if (!shot) { // m_isRequestQueueNull reuse current | |
3534 | ALOGD("(%s): request_scc: " | |
3535 | "m_reprocessingFrameCnt missing, using shot_ext", | |
3536 | __FUNCTION__); | |
3537 | shot = shot_ext; | |
3538 | } | |
3539 | } | |
3540 | memcpy(&m_jpegMetadata, shot, sizeof(struct camera2_shot_ext)); | |
69d1e6e9 SK |
3541 | } |
3542 | m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING); | |
3543 | } | |
3544 | if (current_scp != shot_ext->request_scp) { | |
3545 | ALOGD("(%s): scp frame drop1 request_scp(%d to %d)", | |
3546 | __FUNCTION__, current_scp, shot_ext->request_scp); | |
3547 | m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount); | |
3548 | } | |
3549 | if (shot_ext->request_scp) { | |
3550 | ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)"); | |
3551 | m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING); | |
3552 | } | |
3553 | ||
3554 | ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__, | |
3555 | shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp); | |
3556 | if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) { | |
3557 | ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__); | |
3558 | m_scp_closed = true; | |
3559 | } | |
3560 | else | |
3561 | m_scp_closed = false; | |
3562 | ||
0f26b20f | 3563 | OnAfNotification(shot_ext->shot.dm.aa.afState); |
10e122bd SK |
3564 | OnPrecaptureMeteringNotificationISP(); |
3565 | } else { | |
8b5b8078 | 3566 | memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl)); |
10e122bd SK |
3567 | shot_ext->shot.ctl.request.frameCount = 0xfffffffe; |
3568 | shot_ext->request_sensor = 1; | |
3569 | shot_ext->dis_bypass = 1; | |
3570 | shot_ext->dnr_bypass = 1; | |
3571 | shot_ext->fd_bypass = 1; | |
3572 | shot_ext->drc_bypass = 1; | |
3573 | shot_ext->request_scc = 0; | |
3574 | shot_ext->request_scp = 0; | |
3575 | if (m_wideAspect) { | |
3576 | shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO; | |
10e122bd SK |
3577 | } else { |
3578 | shot_ext->setfile = ISS_SUB_SCENARIO_STILL; | |
3579 | } | |
572470e2 SK |
3580 | shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode; |
3581 | if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) { | |
3582 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8; | |
3583 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
3584 | } | |
10e122bd | 3585 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
cdd53a9f | 3586 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF; |
10e122bd | 3587 | ALOGV("### isp QBUF start (bubble)"); |
8b5b8078 HC |
3588 | ALOGV("bubble: queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", |
3589 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
3590 | (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode), | |
3591 | (int)(shot_ext->shot.ctl.aa.afTrigger)); | |
3592 | ||
10e122bd SK |
3593 | cam_int_qbuf(&(m_camera_info.isp), index); |
3594 | ALOGV("### isp DQBUF start (bubble)"); | |
3595 | index_isp = cam_int_dqbuf(&(m_camera_info.isp)); | |
3596 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]); | |
8b5b8078 HC |
3597 | ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)", |
3598 | (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode), | |
3599 | (int)(shot_ext->shot.dm.aa.awbMode), | |
3600 | (int)(shot_ext->shot.dm.aa.afMode)); | |
3601 | ||
10e122bd | 3602 | OnAfNotification(shot_ext->shot.dm.aa.afState); |
ad37861e | 3603 | } |
13d8c7b4 | 3604 | |
52f54308 SK |
3605 | index = m_requestManager->popSensorQ(); |
3606 | if(index < 0){ | |
3607 | ALOGE("sensorQ is empty"); | |
3608 | return; | |
3609 | } | |
3610 | ||
ca714238 | 3611 | processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index])); |
ad37861e SK |
3612 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]); |
3613 | if (m_scp_closing || m_scp_closed) { | |
3614 | ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed); | |
3615 | shot_ext->request_scc = 0; | |
3616 | shot_ext->request_scp = 0; | |
3617 | shot_ext->request_sensor = 0; | |
3618 | } | |
ad37861e | 3619 | cam_int_qbuf(&(m_camera_info.sensor), index); |
52f54308 | 3620 | ALOGV("Sensor Qbuf done(%d)", index); |
c15a6b00 | 3621 | |
ef6f83ca SK |
3622 | if (!m_scp_closing |
3623 | && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){ | |
da7ca692 | 3624 | ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)", |
ef6f83ca | 3625 | matchedFrameCnt, processingReqIndex); |
ad37861e SK |
3626 | selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); |
3627 | } | |
c15a6b00 | 3628 | } |
ad37861e SK |
3629 | return; |
3630 | } | |
9dd63e1f | 3631 | |
86646da4 SK |
3632 | void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self) |
3633 | { | |
3634 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3635 | StreamThread * selfThread = ((StreamThread*)self); | |
3636 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
5506cebf SK |
3637 | node_info_t *currentNode = selfStreamParms->node; |
3638 | substream_parameters_t *subParms; | |
86646da4 SK |
3639 | buffer_handle_t * buf = NULL; |
3640 | status_t res; | |
3641 | void *virtAddr[3]; | |
3642 | int i, j; | |
3643 | int index; | |
3644 | nsecs_t timestamp; | |
3645 | ||
3646 | if (!(selfThread->m_isBufferInit)) | |
3647 | { | |
3648 | for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) { | |
3649 | res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf); | |
3650 | if (res != NO_ERROR || buf == NULL) { | |
3651 | ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res); | |
3652 | return; | |
3653 | } | |
3654 | ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
3655 | ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
3656 | ||
5506cebf | 3657 | index = selfThread->findBufferIndex(buf); |
86646da4 SK |
3658 | if (index == -1) { |
3659 | ALOGE("ERR(%s): could not find buffer index", __FUNCTION__); | |
3660 | } | |
3661 | else { | |
3662 | ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)", | |
3663 | __FUNCTION__, index, selfStreamParms->svcBufStatus[index]); | |
3664 | if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC) | |
3665 | selfStreamParms->svcBufStatus[index] = ON_DRIVER; | |
3666 | else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE) | |
3667 | selfStreamParms->svcBufStatus[index] = ON_HAL; | |
3668 | else { | |
3669 | ALOGV("DBG(%s): buffer status abnormal (%d) " | |
3670 | , __FUNCTION__, selfStreamParms->svcBufStatus[index]); | |
3671 | } | |
3672 | selfStreamParms->numSvcBufsInHal++; | |
86646da4 | 3673 | } |
5506cebf | 3674 | selfStreamParms->bufIndex = 0; |
86646da4 SK |
3675 | } |
3676 | selfThread->m_isBufferInit = true; | |
3677 | } | |
5506cebf SK |
3678 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3679 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3680 | continue; | |
86646da4 | 3681 | |
5506cebf SK |
3682 | subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId]; |
3683 | if (subParms->type && subParms->needBufferInit) { | |
3684 | ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)", | |
3685 | __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers); | |
74d78ebe SK |
3686 | int checkingIndex = 0; |
3687 | bool found = false; | |
5506cebf SK |
3688 | for ( i = 0 ; i < subParms->numSvcBuffers; i++) { |
3689 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
74d78ebe SK |
3690 | if (res != NO_ERROR || buf == NULL) { |
3691 | ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res); | |
3692 | return; | |
3693 | } | |
5506cebf SK |
3694 | subParms->numSvcBufsInHal++; |
3695 | ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
3696 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
74d78ebe SK |
3697 | |
3698 | if (m_grallocHal->lock(m_grallocHal, *buf, | |
5506cebf SK |
3699 | subParms->usage, 0, 0, |
3700 | subParms->width, subParms->height, virtAddr) != 0) { | |
74d78ebe SK |
3701 | ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__); |
3702 | } | |
3703 | else { | |
5506cebf | 3704 | ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)", |
74d78ebe SK |
3705 | __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]); |
3706 | } | |
3707 | found = false; | |
5506cebf SK |
3708 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { |
3709 | if (subParms->svcBufHandle[checkingIndex] == *buf ) { | |
74d78ebe SK |
3710 | found = true; |
3711 | break; | |
3712 | } | |
3713 | } | |
5506cebf | 3714 | ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex); |
74d78ebe SK |
3715 | if (!found) break; |
3716 | ||
3717 | index = checkingIndex; | |
3718 | ||
3719 | if (index == -1) { | |
3720 | ALOGV("ERR(%s): could not find buffer index", __FUNCTION__); | |
3721 | } | |
3722 | else { | |
3723 | ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)", | |
5506cebf SK |
3724 | __FUNCTION__, index, subParms->svcBufStatus[index]); |
3725 | if (subParms->svcBufStatus[index]== ON_SERVICE) | |
3726 | subParms->svcBufStatus[index] = ON_HAL; | |
74d78ebe SK |
3727 | else { |
3728 | ALOGV("DBG(%s): buffer status abnormal (%d) " | |
5506cebf | 3729 | , __FUNCTION__, subParms->svcBufStatus[index]); |
74d78ebe | 3730 | } |
5506cebf | 3731 | if (*buf != subParms->svcBufHandle[index]) |
74d78ebe SK |
3732 | ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__); |
3733 | else | |
3734 | ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__); | |
3735 | } | |
5506cebf | 3736 | subParms->svcBufIndex = 0; |
74d78ebe | 3737 | } |
5506cebf SK |
3738 | if (subParms->type == SUBSTREAM_TYPE_JPEG) { |
3739 | m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2; | |
3740 | m_resizeBuf.size.extS[1] = 0; | |
3741 | m_resizeBuf.size.extS[2] = 0; | |
3742 | ||
3743 | if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) { | |
3744 | ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__); | |
3745 | } | |
3746 | } | |
3747 | if (subParms->type == SUBSTREAM_TYPE_PRVCB) { | |
3748 | m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width, | |
3749 | subParms->height, &m_previewCbBuf); | |
86646da4 | 3750 | |
5506cebf SK |
3751 | if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) { |
3752 | ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__); | |
3753 | } | |
3754 | } | |
3755 | subParms->needBufferInit= false; | |
3756 | } | |
3757 | } | |
86646da4 SK |
3758 | } |
3759 | ||
c15a6b00 JS |
3760 | void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self) |
3761 | { | |
13d8c7b4 SK |
3762 | StreamThread * selfThread = ((StreamThread*)self); |
3763 | ALOGV("DEBUG(%s): ", __FUNCTION__ ); | |
3764 | memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t)); | |
3765 | selfThread->m_isBufferInit = false; | |
5506cebf SK |
3766 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3767 | selfThread->m_attachedSubStreams[i].streamId = -1; | |
3768 | selfThread->m_attachedSubStreams[i].priority = 0; | |
3769 | } | |
c15a6b00 JS |
3770 | return; |
3771 | } | |
3772 | ||
5506cebf SK |
3773 | int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf, |
3774 | int stream_id, nsecs_t frameTimeStamp) | |
c15a6b00 | 3775 | { |
5506cebf SK |
3776 | substream_parameters_t *subParms = &m_subStreams[stream_id]; |
3777 | ||
3778 | switch (stream_id) { | |
c15a6b00 | 3779 | |
5506cebf SK |
3780 | case STREAM_ID_JPEG: |
3781 | return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp); | |
c15a6b00 | 3782 | |
5506cebf SK |
3783 | case STREAM_ID_RECORD: |
3784 | return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp); | |
c15a6b00 | 3785 | |
5506cebf SK |
3786 | case STREAM_ID_PRVCB: |
3787 | return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp); | |
3788 | ||
3789 | default: | |
3790 | return 0; | |
c15a6b00 | 3791 | } |
5506cebf SK |
3792 | } |
3793 | void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self) | |
3794 | { | |
3795 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3796 | StreamThread * selfThread = ((StreamThread*)self); | |
3797 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
3798 | node_info_t *currentNode = selfStreamParms->node; | |
3799 | int i = 0; | |
3800 | nsecs_t frameTimeStamp; | |
13d8c7b4 | 3801 | |
b55ed664 | 3802 | if (currentSignal & SIGNAL_THREAD_RELEASE) { |
5506cebf | 3803 | CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); |
b55ed664 SK |
3804 | |
3805 | if (selfThread->m_isBufferInit) { | |
a8be0011 SK |
3806 | if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) { |
3807 | ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__, | |
5506cebf | 3808 | selfThread->m_index, currentNode->fd); |
a8be0011 SK |
3809 | if (cam_int_streamoff(currentNode) < 0 ) { |
3810 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
3811 | } | |
3812 | ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__, | |
3813 | selfThread->m_index, currentNode->fd); | |
3814 | currentNode->buffers = 0; | |
3815 | cam_int_reqbufs(currentNode); | |
3816 | ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__, | |
3817 | selfThread->m_index, currentNode->fd); | |
3818 | } | |
b55ed664 SK |
3819 | } |
3820 | #ifdef ENABLE_FRAME_SYNC | |
3821 | // free metabuffers | |
5506cebf SK |
3822 | for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++) |
3823 | if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) { | |
b55ed664 SK |
3824 | freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1); |
3825 | selfStreamParms->metaBuffers[i].fd.extFd[0] = 0; | |
3826 | selfStreamParms->metaBuffers[i].size.extS[0] = 0; | |
3827 | } | |
3828 | #endif | |
3829 | selfThread->m_isBufferInit = false; | |
b55ed664 | 3830 | selfThread->m_releasing = false; |
5506cebf SK |
3831 | selfThread->m_activated = false; |
3832 | ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
3833 | return; | |
3834 | } | |
3835 | if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) { | |
3836 | status_t res; | |
3837 | buffer_handle_t * buf = NULL; | |
3838 | bool found = false; | |
3839 | ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START", | |
3840 | __FUNCTION__, selfThread->m_index); | |
3841 | res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf); | |
3842 | if (res != NO_ERROR || buf == NULL) { | |
3843 | ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res); | |
3844 | return; | |
3845 | } | |
3846 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
3847 | int checkingIndex = 0; | |
3848 | for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) { | |
3849 | if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
3850 | found = true; | |
3851 | break; | |
3852 | } | |
3853 | } | |
3854 | ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ", | |
3855 | __FUNCTION__, (unsigned int)buf, found, checkingIndex); | |
b55ed664 | 3856 | |
5506cebf SK |
3857 | if (!found) return; |
3858 | ||
3859 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { | |
3860 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3861 | continue; | |
3862 | ||
3863 | #ifdef ENABLE_FRAME_SYNC | |
a8be0011 | 3864 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt); |
5506cebf SK |
3865 | m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt); |
3866 | #else | |
3867 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()); | |
3868 | #endif | |
3869 | if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) | |
3870 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]), | |
3871 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
3872 | } | |
3873 | ||
3874 | res = m_reprocessOps->release_buffer(m_reprocessOps, buf); | |
3875 | if (res != NO_ERROR) { | |
3876 | ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res); | |
3877 | return; | |
3878 | } | |
3879 | ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_REPROCESSING_START", | |
3880 | __FUNCTION__,selfThread->m_index); | |
b55ed664 SK |
3881 | |
3882 | return; | |
3883 | } | |
13d8c7b4 | 3884 | if (currentSignal & SIGNAL_STREAM_DATA_COMING) { |
c15a6b00 | 3885 | buffer_handle_t * buf = NULL; |
5506cebf | 3886 | status_t res = 0; |
c15a6b00 JS |
3887 | int i, j; |
3888 | int index; | |
ad37861e | 3889 | nsecs_t timestamp; |
5506cebf | 3890 | #ifdef ENABLE_FRAME_SYNC |
feb7df4c | 3891 | camera2_stream *frame; |
2f4d175d | 3892 | uint8_t currentOutputStreams; |
a85ec381 | 3893 | bool directOutputEnabled = false; |
5506cebf | 3894 | #endif |
c0b6e17e | 3895 | int numOfUndqbuf = 0; |
c0b6e17e | 3896 | |
5506cebf | 3897 | ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index); |
ad37861e | 3898 | |
86646da4 | 3899 | m_streamBufferInit(self); |
c15a6b00 | 3900 | |
b5237e6b | 3901 | do { |
5506cebf SK |
3902 | ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__, |
3903 | selfThread->m_index, selfThread->streamType); | |
b5237e6b | 3904 | |
feb7df4c | 3905 | #ifdef ENABLE_FRAME_SYNC |
5506cebf SK |
3906 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes); |
3907 | frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]); | |
5506cebf | 3908 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount); |
2f4d175d SK |
3909 | currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount); |
3910 | ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams); | |
a85ec381 SK |
3911 | if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)|| |
3912 | ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) { | |
3913 | directOutputEnabled = true; | |
3914 | } | |
3915 | if (!directOutputEnabled) { | |
3916 | if (!m_nightCaptureFrameCnt) | |
3917 | m_requestManager->NotifyStreamOutput(frame->rcount); | |
3918 | } | |
feb7df4c | 3919 | #else |
5506cebf SK |
3920 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode); |
3921 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()) | |
feb7df4c | 3922 | #endif |
5506cebf SK |
3923 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d) sigcnt(%d)",__FUNCTION__, |
3924 | selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt); | |
804236a7 | 3925 | |
5506cebf | 3926 | if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] != ON_DRIVER) |
86646da4 | 3927 | ALOGV("DBG(%s): DQed buffer status abnormal (%d) ", |
5506cebf SK |
3928 | __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]); |
3929 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL; | |
b5237e6b | 3930 | |
5506cebf SK |
3931 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3932 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3933 | continue; | |
5506cebf | 3934 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 3935 | if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { |
5506cebf SK |
3936 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]), |
3937 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
b5237e6b | 3938 | } |
2f4d175d SK |
3939 | #else |
3940 | if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { | |
3941 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]), | |
3942 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
3943 | } | |
3944 | #endif | |
86646da4 | 3945 | } |
c0b6e17e | 3946 | |
5506cebf | 3947 | if (m_requestManager->GetSkipCnt() <= 0) { |
5506cebf | 3948 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 3949 | if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) { |
5506cebf | 3950 | ALOGV("** Display Preview(frameCnt:%d)", frame->rcount); |
2f4d175d SK |
3951 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3952 | frameTimeStamp, | |
3953 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3954 | } | |
3955 | else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) { | |
c48f0170 | 3956 | ALOGV("** SCC output (frameCnt:%d)", frame->rcount); |
2f4d175d SK |
3957 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3958 | frameTimeStamp, | |
3959 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3960 | } | |
a85ec381 SK |
3961 | else { |
3962 | res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps, | |
3963 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3964 | ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); | |
3965 | } | |
5506cebf | 3966 | #else |
2f4d175d | 3967 | if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) { |
5506cebf | 3968 | ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex()); |
5506cebf SK |
3969 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3970 | frameTimeStamp, | |
3971 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
74d78ebe | 3972 | } |
5506cebf | 3973 | else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) { |
5506cebf | 3974 | ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex()); |
5506cebf SK |
3975 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3976 | frameTimeStamp, | |
3977 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
74d78ebe | 3978 | } |
2f4d175d | 3979 | #endif |
5506cebf | 3980 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); |
86646da4 SK |
3981 | } |
3982 | else { | |
3983 | res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps, | |
5506cebf SK |
3984 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); |
3985 | ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); | |
86646da4 | 3986 | } |
2f4d175d | 3987 | #ifdef ENABLE_FRAME_SYNC |
a85ec381 SK |
3988 | if (directOutputEnabled) { |
3989 | if (!m_nightCaptureFrameCnt) | |
3990 | m_requestManager->NotifyStreamOutput(frame->rcount); | |
3991 | } | |
2f4d175d | 3992 | #endif |
86646da4 | 3993 | if (res == 0) { |
5506cebf | 3994 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE; |
86646da4 SK |
3995 | selfStreamParms->numSvcBufsInHal--; |
3996 | } | |
3997 | else { | |
5506cebf | 3998 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL; |
b5237e6b | 3999 | } |
86646da4 | 4000 | |
c15a6b00 | 4001 | } |
ce8e830a | 4002 | while(0); |
b5237e6b | 4003 | |
1422aff9 | 4004 | while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS)) |
a85ec381 | 4005 | < selfStreamParms->minUndequedBuffer) { |
86646da4 SK |
4006 | res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf); |
4007 | if (res != NO_ERROR || buf == NULL) { | |
a85ec381 | 4008 | ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index, res, selfStreamParms->numSvcBufsInHal); |
86646da4 SK |
4009 | break; |
4010 | } | |
4011 | selfStreamParms->numSvcBufsInHal++; | |
5506cebf | 4012 | ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, |
86646da4 SK |
4013 | selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal, |
4014 | ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
4015 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4016 | ||
4017 | bool found = false; | |
4018 | int checkingIndex = 0; | |
4019 | for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) { | |
4020 | if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4021 | found = true; | |
9dd63e1f SK |
4022 | break; |
4023 | } | |
86646da4 | 4024 | } |
86646da4 | 4025 | if (!found) break; |
5506cebf SK |
4026 | selfStreamParms->bufIndex = checkingIndex; |
4027 | if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) { | |
86646da4 | 4028 | uint32_t plane_index = 0; |
5506cebf | 4029 | ExynosBuffer* currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]); |
86646da4 SK |
4030 | struct v4l2_buffer v4l2_buf; |
4031 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
13d8c7b4 | 4032 | |
86646da4 SK |
4033 | v4l2_buf.m.planes = planes; |
4034 | v4l2_buf.type = currentNode->type; | |
4035 | v4l2_buf.memory = currentNode->memory; | |
5506cebf | 4036 | v4l2_buf.index = selfStreamParms->bufIndex; |
86646da4 SK |
4037 | v4l2_buf.length = currentNode->planes; |
4038 | ||
5506cebf SK |
4039 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; |
4040 | v4l2_buf.m.planes[2].m.fd = priv_handle->fd1; | |
4041 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd2; | |
4042 | for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) { | |
4043 | v4l2_buf.m.planes[plane_index].length = currentBuf->size.extS[plane_index]; | |
4044 | } | |
4045 | #ifdef ENABLE_FRAME_SYNC | |
4046 | /* add plane for metadata*/ | |
4047 | v4l2_buf.length += selfStreamParms->metaPlanes; | |
4048 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0]; | |
4049 | v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0]; | |
4050 | #endif | |
4051 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { | |
4052 | ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail", | |
4053 | __FUNCTION__, selfThread->m_index); | |
4054 | return; | |
4055 | } | |
4056 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER; | |
4057 | ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)", | |
4058 | __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex); | |
4059 | } | |
4060 | } | |
4061 | ||
4062 | ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index); | |
4063 | } | |
4064 | return; | |
4065 | } | |
4066 | ||
4067 | void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self) | |
4068 | { | |
4069 | uint32_t currentSignal = self->GetProcessingSignal(); | |
4070 | StreamThread * selfThread = ((StreamThread*)self); | |
4071 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4072 | node_info_t *currentNode = selfStreamParms->node; | |
4073 | ||
4074 | ||
4075 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
4076 | CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
4077 | ||
4078 | if (selfThread->m_isBufferInit) { | |
4079 | if (currentNode->fd == m_camera_info.capture.fd) { | |
4080 | if (m_camera_info.capture.status == true) { | |
4081 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__, | |
4082 | selfThread->m_index, currentNode->fd); | |
4083 | if (cam_int_streamoff(currentNode) < 0 ){ | |
4084 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
4085 | } else { | |
4086 | m_camera_info.capture.status = false; | |
4087 | } | |
4088 | } | |
4089 | } else { | |
4090 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__, | |
4091 | selfThread->m_index, currentNode->fd); | |
4092 | if (cam_int_streamoff(currentNode) < 0 ){ | |
4093 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
4094 | } | |
4095 | } | |
4096 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index); | |
4097 | ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__, | |
4098 | selfThread->m_index, currentNode->fd); | |
4099 | currentNode->buffers = 0; | |
4100 | cam_int_reqbufs(currentNode); | |
4101 | ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__, | |
4102 | selfThread->m_index, currentNode->fd); | |
4103 | } | |
4104 | ||
4105 | selfThread->m_isBufferInit = false; | |
4106 | selfThread->m_releasing = false; | |
4107 | selfThread->m_activated = false; | |
4108 | ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
4109 | return; | |
4110 | } | |
4111 | ||
4112 | if (currentSignal & SIGNAL_STREAM_DATA_COMING) { | |
4113 | #ifdef ENABLE_FRAME_SYNC | |
4114 | camera2_stream *frame; | |
2f4d175d | 4115 | uint8_t currentOutputStreams; |
5506cebf SK |
4116 | #endif |
4117 | nsecs_t frameTimeStamp; | |
4118 | ||
4119 | ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING", | |
4120 | __FUNCTION__,selfThread->m_index); | |
4121 | ||
4122 | m_streamBufferInit(self); | |
4123 | ||
ed4ad5fe | 4124 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index); |
5506cebf | 4125 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode); |
ed4ad5fe | 4126 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__, |
5506cebf SK |
4127 | selfThread->m_index, selfStreamParms->bufIndex); |
4128 | ||
4129 | #ifdef ENABLE_FRAME_SYNC | |
4130 | frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]); | |
5506cebf | 4131 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount); |
2f4d175d SK |
4132 | currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount); |
4133 | ALOGV("frame count(SCC) : %d outputStream(%x)", frame->rcount, currentOutputStreams); | |
5506cebf SK |
4134 | #else |
4135 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()); | |
4136 | #endif | |
4137 | ||
4138 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { | |
4139 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
4140 | continue; | |
feb7df4c | 4141 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 4142 | if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { |
5506cebf | 4143 | m_requestManager->NotifyStreamOutput(frame->rcount); |
5506cebf SK |
4144 | m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]), |
4145 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
be494d19 | 4146 | } |
2f4d175d SK |
4147 | #else |
4148 | if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { | |
4149 | m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]), | |
4150 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
4151 | } | |
4152 | #endif | |
be494d19 | 4153 | } |
5506cebf SK |
4154 | cam_int_qbuf(currentNode, selfStreamParms->bufIndex); |
4155 | ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index); | |
86646da4 | 4156 | |
5506cebf SK |
4157 | |
4158 | ||
4159 | ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE", | |
4160 | __FUNCTION__, selfThread->m_index); | |
86646da4 | 4161 | } |
5506cebf SK |
4162 | |
4163 | ||
86646da4 SK |
4164 | return; |
4165 | } | |
4166 | ||
5506cebf | 4167 | void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self) |
86646da4 SK |
4168 | { |
4169 | uint32_t currentSignal = self->GetProcessingSignal(); | |
4170 | StreamThread * selfThread = ((StreamThread*)self); | |
4171 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
5506cebf SK |
4172 | node_info_t *currentNode = selfStreamParms->node; |
4173 | ||
4174 | ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal); | |
86646da4 | 4175 | |
5506cebf SK |
4176 | // Do something in Child thread handler |
4177 | // Should change function to class that inherited StreamThread class to support dynamic stream allocation | |
4178 | if (selfThread->streamType == STREAM_TYPE_DIRECT) { | |
4179 | m_streamFunc_direct(self); | |
4180 | } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) { | |
4181 | m_streamFunc_indirect(self); | |
4182 | } | |
86646da4 | 4183 | |
5506cebf SK |
4184 | return; |
4185 | } | |
4186 | int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) | |
4187 | { | |
4188 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4189 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_JPEG]; | |
4190 | status_t res; | |
4191 | ExynosRect jpegRect; | |
4192 | bool found = false; | |
de48e362 | 4193 | int srcW, srcH, srcCropX, srcCropY; |
5506cebf SK |
4194 | int pictureW, pictureH, pictureFramesize = 0; |
4195 | int pictureFormat; | |
4196 | int cropX, cropY, cropW, cropH = 0; | |
4197 | ExynosBuffer resizeBufInfo; | |
4198 | ExynosRect m_jpegPictureRect; | |
4199 | buffer_handle_t * buf = NULL; | |
c06b3290 SK |
4200 | camera2_jpeg_blob * jpegBlob = NULL; |
4201 | int jpegBufSize = 0; | |
86646da4 | 4202 | |
5506cebf SK |
4203 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4204 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4205 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4206 | found = true; | |
4207 | break; | |
86646da4 | 4208 | } |
5506cebf SK |
4209 | subParms->svcBufIndex++; |
4210 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4211 | subParms->svcBufIndex = 0; | |
4212 | } | |
4213 | if (!found) { | |
4214 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4215 | subParms->svcBufIndex++; | |
4216 | return 1; | |
86646da4 SK |
4217 | } |
4218 | ||
1264ab16 AR |
4219 | { |
4220 | Mutex::Autolock lock(m_jpegEncoderLock); | |
4221 | m_jpegEncodingCount++; | |
4222 | } | |
32cf9401 | 4223 | |
de48e362 SK |
4224 | m_getRatioSize(selfStreamParms->width, selfStreamParms->height, |
4225 | m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height, | |
4226 | &srcCropX, &srcCropY, | |
4227 | &srcW, &srcH, | |
4228 | 0); | |
4229 | ||
5506cebf SK |
4230 | m_jpegPictureRect.w = subParms->width; |
4231 | m_jpegPictureRect.h = subParms->height; | |
7d0efb59 | 4232 | |
5506cebf SK |
4233 | ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d", |
4234 | __FUNCTION__, selfStreamParms->width, selfStreamParms->height, | |
4235 | m_jpegPictureRect.w, m_jpegPictureRect.h); | |
7d0efb59 | 4236 | |
de48e362 | 4237 | m_getRatioSize(srcW, srcH, |
5506cebf SK |
4238 | m_jpegPictureRect.w, m_jpegPictureRect.h, |
4239 | &cropX, &cropY, | |
4240 | &pictureW, &pictureH, | |
4241 | 0); | |
4242 | pictureFormat = V4L2_PIX_FMT_YUYV; | |
4243 | pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH); | |
4244 | ||
4245 | if (m_exynosPictureCSC) { | |
4246 | float zoom_w = 0, zoom_h = 0; | |
4247 | if (m_zoomRatio == 0) | |
4248 | m_zoomRatio = 1; | |
4249 | ||
4250 | if (m_jpegPictureRect.w >= m_jpegPictureRect.h) { | |
4251 | zoom_w = pictureW / m_zoomRatio; | |
4252 | zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w; | |
4253 | } else { | |
4254 | zoom_h = pictureH / m_zoomRatio; | |
4255 | zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h; | |
7d0efb59 | 4256 | } |
de48e362 SK |
4257 | cropX = (srcW - zoom_w) / 2; |
4258 | cropY = (srcH - zoom_h) / 2; | |
5506cebf SK |
4259 | cropW = zoom_w; |
4260 | cropH = zoom_h; | |
4261 | ||
4262 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", | |
4263 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
4264 | ||
4265 | csc_set_src_format(m_exynosPictureCSC, | |
de48e362 | 4266 | ALIGN(srcW, 16), ALIGN(srcH, 16), |
5506cebf SK |
4267 | cropX, cropY, cropW, cropH, |
4268 | V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), | |
4269 | 0); | |
7d0efb59 | 4270 | |
5506cebf SK |
4271 | csc_set_dst_format(m_exynosPictureCSC, |
4272 | m_jpegPictureRect.w, m_jpegPictureRect.h, | |
4273 | 0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h, | |
4274 | V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16), | |
4275 | 0); | |
4276 | for (int i = 0 ; i < 3 ; i++) | |
4277 | ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ", | |
4278 | __FUNCTION__, i, srcImageBuf->fd.extFd[i]); | |
4279 | csc_set_src_buffer(m_exynosPictureCSC, | |
4280 | (void **)&srcImageBuf->fd.fd); | |
7d0efb59 | 4281 | |
5506cebf SK |
4282 | csc_set_dst_buffer(m_exynosPictureCSC, |
4283 | (void **)&m_resizeBuf.fd.fd); | |
4284 | for (int i = 0 ; i < 3 ; i++) | |
4285 | ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d", | |
4286 | __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]); | |
7d0efb59 | 4287 | |
5506cebf SK |
4288 | if (csc_convert(m_exynosPictureCSC) != 0) |
4289 | ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__); | |
4290 | ||
4291 | } | |
4292 | else { | |
4293 | ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__); | |
7d0efb59 C |
4294 | } |
4295 | ||
5506cebf | 4296 | resizeBufInfo = m_resizeBuf; |
86646da4 | 4297 | |
5506cebf | 4298 | m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf); |
86646da4 | 4299 | |
5506cebf SK |
4300 | for (int i = 1; i < 3; i++) { |
4301 | if (m_resizeBuf.size.extS[i] != 0) | |
4302 | m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1]; | |
86646da4 | 4303 | |
5506cebf SK |
4304 | ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]); |
4305 | } | |
2adfa429 | 4306 | |
5506cebf SK |
4307 | jpegRect.w = m_jpegPictureRect.w; |
4308 | jpegRect.h = m_jpegPictureRect.h; | |
4309 | jpegRect.colorFormat = V4L2_PIX_FMT_NV16; | |
86646da4 | 4310 | |
5506cebf SK |
4311 | for (int j = 0 ; j < 3 ; j++) |
4312 | ALOGV("DEBUG(%s): dest buf node fd.extFd[%d]=%d size=%d virt=%x ", | |
4313 | __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j], | |
4314 | (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j], | |
4315 | (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]); | |
86646da4 | 4316 | |
c06b3290 SK |
4317 | jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0]; |
4318 | if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) { | |
5506cebf | 4319 | ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__); |
c06b3290 SK |
4320 | } else { |
4321 | m_resizeBuf = resizeBufInfo; | |
2adfa429 | 4322 | |
c06b3290 SK |
4323 | int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s; |
4324 | ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__, | |
4325 | m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize); | |
4326 | char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]); | |
4327 | jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]); | |
2adfa429 | 4328 | |
c06b3290 SK |
4329 | if (jpegBuffer[jpegSize-1] == 0) |
4330 | jpegSize--; | |
4331 | jpegBlob->jpeg_size = jpegSize; | |
4332 | jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID; | |
4333 | } | |
cf593314 | 4334 | subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize; |
5506cebf | 4335 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); |
be494d19 | 4336 | |
5506cebf SK |
4337 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", |
4338 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4339 | if (res == 0) { | |
4340 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4341 | subParms->numSvcBufsInHal--; | |
4342 | } | |
4343 | else { | |
4344 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4345 | } | |
86646da4 | 4346 | |
5506cebf SK |
4347 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4348 | { | |
4349 | bool found = false; | |
4350 | int checkingIndex = 0; | |
86646da4 | 4351 | |
5506cebf SK |
4352 | ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
4353 | ||
4354 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
4355 | if (res != NO_ERROR || buf == NULL) { | |
4356 | ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4357 | break; | |
4358 | } | |
4359 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4360 | subParms->numSvcBufsInHal ++; | |
4361 | ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4362 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
4363 | ||
4364 | ||
4365 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { | |
4366 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4367 | found = true; | |
4368 | break; | |
86646da4 | 4369 | } |
5506cebf SK |
4370 | } |
4371 | ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found); | |
be494d19 | 4372 | |
5506cebf SK |
4373 | if (!found) { |
4374 | break; | |
4375 | } | |
4376 | ||
4377 | subParms->svcBufIndex = checkingIndex; | |
4378 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4379 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4380 | } | |
4381 | else { | |
4382 | ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4383 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4384 | } | |
4385 | } | |
1264ab16 AR |
4386 | { |
4387 | Mutex::Autolock lock(m_jpegEncoderLock); | |
4388 | m_jpegEncodingCount--; | |
4389 | } | |
5506cebf SK |
4390 | return 0; |
4391 | } | |
86646da4 | 4392 | |
5506cebf SK |
4393 | int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) |
4394 | { | |
4395 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4396 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_RECORD]; | |
4397 | status_t res; | |
4398 | ExynosRect jpegRect; | |
4399 | bool found = false; | |
4400 | int cropX, cropY, cropW, cropH = 0; | |
4401 | buffer_handle_t * buf = NULL; | |
86646da4 | 4402 | |
5506cebf SK |
4403 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4404 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4405 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4406 | found = true; | |
4407 | break; | |
4408 | } | |
4409 | subParms->svcBufIndex++; | |
4410 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4411 | subParms->svcBufIndex = 0; | |
4412 | } | |
4413 | if (!found) { | |
4414 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4415 | subParms->svcBufIndex++; | |
4416 | return 1; | |
4417 | } | |
86646da4 | 4418 | |
5506cebf SK |
4419 | if (m_exynosVideoCSC) { |
4420 | int videoW = subParms->width, videoH = subParms->height; | |
4421 | int cropX, cropY, cropW, cropH = 0; | |
4422 | int previewW = selfStreamParms->width, previewH = selfStreamParms->height; | |
4423 | m_getRatioSize(previewW, previewH, | |
4424 | videoW, videoH, | |
4425 | &cropX, &cropY, | |
4426 | &cropW, &cropH, | |
4427 | 0); | |
86646da4 | 4428 | |
5506cebf SK |
4429 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", |
4430 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
86646da4 | 4431 | |
5506cebf | 4432 | csc_set_src_format(m_exynosVideoCSC, |
4a3f1820 | 4433 | ALIGN(previewW, 32), previewH, |
5506cebf SK |
4434 | cropX, cropY, cropW, cropH, |
4435 | selfStreamParms->format, | |
4436 | 0); | |
86646da4 | 4437 | |
5506cebf SK |
4438 | csc_set_dst_format(m_exynosVideoCSC, |
4439 | videoW, videoH, | |
4440 | 0, 0, videoW, videoH, | |
4441 | subParms->format, | |
4442 | 1); | |
86646da4 | 4443 | |
5506cebf SK |
4444 | csc_set_src_buffer(m_exynosVideoCSC, |
4445 | (void **)&srcImageBuf->fd.fd); | |
86646da4 | 4446 | |
5506cebf SK |
4447 | csc_set_dst_buffer(m_exynosVideoCSC, |
4448 | (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd))); | |
4449 | ||
4450 | if (csc_convert(m_exynosVideoCSC) != 0) { | |
4451 | ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__); | |
4452 | } | |
4453 | else { | |
4454 | ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__); | |
86646da4 | 4455 | } |
5506cebf SK |
4456 | } |
4457 | else { | |
4458 | ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__); | |
4459 | } | |
86646da4 | 4460 | |
5506cebf | 4461 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); |
86646da4 | 4462 | |
5506cebf SK |
4463 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", |
4464 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4465 | if (res == 0) { | |
4466 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4467 | subParms->numSvcBufsInHal--; | |
4468 | } | |
4469 | else { | |
4470 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4471 | } | |
86646da4 | 4472 | |
5506cebf SK |
4473 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4474 | { | |
4475 | bool found = false; | |
4476 | int checkingIndex = 0; | |
86646da4 | 4477 | |
5506cebf SK |
4478 | ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
4479 | ||
4480 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
4481 | if (res != NO_ERROR || buf == NULL) { | |
4482 | ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4483 | break; | |
4484 | } | |
4485 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4486 | subParms->numSvcBufsInHal ++; | |
4487 | ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4488 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
4489 | ||
4490 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { | |
4491 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4492 | found = true; | |
4493 | break; | |
86646da4 | 4494 | } |
13d8c7b4 | 4495 | } |
5506cebf | 4496 | ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex); |
86646da4 | 4497 | |
5506cebf SK |
4498 | if (!found) { |
4499 | break; | |
4500 | } | |
86646da4 | 4501 | |
5506cebf SK |
4502 | subParms->svcBufIndex = checkingIndex; |
4503 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4504 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4505 | } | |
4506 | else { | |
4507 | ALOGV("DEBUG(%s): record bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4508 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4509 | } | |
4510 | } | |
4511 | return 0; | |
86646da4 SK |
4512 | } |
4513 | ||
5506cebf | 4514 | int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) |
86646da4 | 4515 | { |
5506cebf SK |
4516 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); |
4517 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_PRVCB]; | |
4518 | status_t res; | |
4519 | bool found = false; | |
4520 | int cropX, cropY, cropW, cropH = 0; | |
4521 | buffer_handle_t * buf = NULL; | |
86646da4 | 4522 | |
5506cebf SK |
4523 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4524 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4525 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4526 | found = true; | |
4527 | break; | |
4528 | } | |
4529 | subParms->svcBufIndex++; | |
4530 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4531 | subParms->svcBufIndex = 0; | |
4532 | } | |
4533 | if (!found) { | |
4534 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4535 | subParms->svcBufIndex++; | |
4536 | return 1; | |
4537 | } | |
86646da4 | 4538 | |
5506cebf SK |
4539 | if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { |
4540 | if (m_exynosVideoCSC) { | |
4541 | int previewCbW = subParms->width, previewCbH = subParms->height; | |
4542 | int cropX, cropY, cropW, cropH = 0; | |
4543 | int previewW = selfStreamParms->width, previewH = selfStreamParms->height; | |
4544 | m_getRatioSize(previewW, previewH, | |
4545 | previewCbW, previewCbH, | |
4546 | &cropX, &cropY, | |
4547 | &cropW, &cropH, | |
4548 | 0); | |
86646da4 | 4549 | |
5506cebf SK |
4550 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", |
4551 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
4552 | csc_set_src_format(m_exynosVideoCSC, | |
4a3f1820 | 4553 | ALIGN(previewW, 32), previewH, |
5506cebf SK |
4554 | cropX, cropY, cropW, cropH, |
4555 | selfStreamParms->format, | |
4556 | 0); | |
86646da4 | 4557 | |
5506cebf SK |
4558 | csc_set_dst_format(m_exynosVideoCSC, |
4559 | previewCbW, previewCbH, | |
4560 | 0, 0, previewCbW, previewCbH, | |
4561 | subParms->internalFormat, | |
4562 | 1); | |
86646da4 | 4563 | |
5506cebf SK |
4564 | csc_set_src_buffer(m_exynosVideoCSC, |
4565 | (void **)&srcImageBuf->fd.fd); | |
4566 | ||
4567 | csc_set_dst_buffer(m_exynosVideoCSC, | |
4568 | (void **)(&(m_previewCbBuf.fd.fd))); | |
4569 | ||
4570 | if (csc_convert(m_exynosVideoCSC) != 0) { | |
4571 | ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__); | |
4572 | } | |
4573 | else { | |
4574 | ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__); | |
4575 | } | |
4576 | if (previewCbW == ALIGN(previewCbW, 16)) { | |
4577 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0], | |
4578 | m_previewCbBuf.virt.extP[0], previewCbW * previewCbH); | |
4579 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH, | |
4580 | m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 ); | |
4581 | } | |
4582 | else { | |
4583 | // TODO : copy line by line ? | |
4584 | } | |
4585 | } | |
4586 | else { | |
4587 | ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__); | |
4588 | } | |
86646da4 | 4589 | } |
5506cebf SK |
4590 | else if (subParms->format == HAL_PIXEL_FORMAT_YV12) { |
4591 | int previewCbW = subParms->width, previewCbH = subParms->height; | |
4592 | int stride = ALIGN(previewCbW, 16); | |
4a3f1820 | 4593 | int uv_stride = ALIGN(previewCbW/2, 16); |
0d220b42 | 4594 | int c_stride = ALIGN(stride / 2, 16); |
4a3f1820 SK |
4595 | |
4596 | if (previewCbW == ALIGN(previewCbW, 32)) { | |
4597 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0], | |
4598 | srcImageBuf->virt.extP[0], stride * previewCbH); | |
4599 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH, | |
4600 | srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 ); | |
4601 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2), | |
4602 | srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 ); | |
4603 | } else { | |
4604 | char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]); | |
4605 | char * srcAddr = (char *)(srcImageBuf->virt.extP[0]); | |
4606 | for (int i = 0 ; i < previewCbH ; i++) { | |
4607 | memcpy(dstAddr, srcAddr, previewCbW); | |
4608 | dstAddr += stride; | |
4609 | srcAddr += ALIGN(stride, 32); | |
4610 | } | |
4611 | dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH); | |
4612 | srcAddr = (char *)(srcImageBuf->virt.extP[1]); | |
4613 | for (int i = 0 ; i < previewCbH/2 ; i++) { | |
4614 | memcpy(dstAddr, srcAddr, previewCbW/2); | |
4615 | dstAddr += c_stride; | |
4616 | srcAddr += uv_stride; | |
4617 | } | |
4618 | srcAddr = (char *)(srcImageBuf->virt.extP[2]); | |
4619 | for (int i = 0 ; i < previewCbH/2 ; i++) { | |
4620 | memcpy(dstAddr, srcAddr, previewCbW/2); | |
4621 | dstAddr += c_stride; | |
4622 | srcAddr += uv_stride; | |
4623 | } | |
4624 | } | |
5506cebf SK |
4625 | } |
4626 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); | |
4627 | ||
4628 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", | |
4629 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4630 | if (res == 0) { | |
4631 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4632 | subParms->numSvcBufsInHal--; | |
4633 | } | |
4634 | else { | |
4635 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
86646da4 SK |
4636 | } |
4637 | ||
5506cebf SK |
4638 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4639 | { | |
4640 | bool found = false; | |
4641 | int checkingIndex = 0; | |
86646da4 | 4642 | |
5506cebf | 4643 | ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
86646da4 | 4644 | |
5506cebf SK |
4645 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); |
4646 | if (res != NO_ERROR || buf == NULL) { | |
4647 | ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4648 | break; | |
4649 | } | |
4650 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4651 | subParms->numSvcBufsInHal ++; | |
4652 | ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4653 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
86646da4 | 4654 | |
86646da4 | 4655 | |
5506cebf SK |
4656 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { |
4657 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4658 | found = true; | |
4659 | break; | |
4660 | } | |
4661 | } | |
4662 | ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex); | |
4663 | ||
4664 | if (!found) { | |
4665 | break; | |
4666 | } | |
4667 | ||
4668 | subParms->svcBufIndex = checkingIndex; | |
4669 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4670 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4671 | } | |
4672 | else { | |
4673 | ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4674 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4675 | } | |
4676 | } | |
4677 | return 0; | |
c15a6b00 JS |
4678 | } |
4679 | ||
2d5e6ec2 SK |
4680 | bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h) |
4681 | { | |
4682 | int sizeOfSupportList; | |
4683 | ||
4684 | //REAR Camera | |
4685 | if(this->getCameraId() == 0) { | |
17071e43 | 4686 | sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int32_t)*2); |
2d5e6ec2 SK |
4687 | |
4688 | for(int i = 0; i < sizeOfSupportList; i++) { | |
4689 | if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h)) | |
4690 | return true; | |
4691 | } | |
4692 | ||
4693 | } | |
4694 | else { | |
17071e43 | 4695 | sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int32_t)*2); |
2d5e6ec2 SK |
4696 | |
4697 | for(int i = 0; i < sizeOfSupportList; i++) { | |
4698 | if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h)) | |
4699 | return true; | |
4700 | } | |
4701 | } | |
4702 | ||
4703 | return false; | |
4704 | } | |
13d8c7b4 SK |
4705 | bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf, |
4706 | ExynosBuffer *jpegBuf, | |
4707 | ExynosRect *rect) | |
4708 | { | |
4709 | unsigned char *addr; | |
4710 | ||
4711 | ExynosJpegEncoderForCamera jpegEnc; | |
4712 | bool ret = false; | |
4713 | int res = 0; | |
4714 | ||
4715 | unsigned int *yuvSize = yuvBuf->size.extS; | |
4716 | ||
4717 | if (jpegEnc.create()) { | |
9dd63e1f | 4718 | ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__); |
13d8c7b4 SK |
4719 | goto jpeg_encode_done; |
4720 | } | |
4721 | ||
87423e56 | 4722 | if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) { |
9dd63e1f | 4723 | ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__); |
13d8c7b4 SK |
4724 | goto jpeg_encode_done; |
4725 | } | |
4726 | ||
4727 | if (jpegEnc.setSize(rect->w, rect->h)) { | |
9dd63e1f | 4728 | ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__); |
13d8c7b4 SK |
4729 | goto jpeg_encode_done; |
4730 | } | |
4731 | ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h); | |
4732 | ||
4733 | if (jpegEnc.setColorFormat(rect->colorFormat)) { | |
9dd63e1f | 4734 | ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__); |
13d8c7b4 SK |
4735 | goto jpeg_encode_done; |
4736 | } | |
13d8c7b4 SK |
4737 | |
4738 | if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) { | |
9dd63e1f | 4739 | ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__); |
13d8c7b4 SK |
4740 | goto jpeg_encode_done; |
4741 | } | |
13d8c7b4 | 4742 | |
48728d49 | 4743 | if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) { |
2d5e6ec2 | 4744 | mExifInfo.enableThumb = true; |
48728d49 | 4745 | if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) { |
4017b08c SK |
4746 | // in the case of unsupported parameter, disable thumbnail |
4747 | mExifInfo.enableThumb = false; | |
2d5e6ec2 | 4748 | } else { |
48728d49 SK |
4749 | m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0]; |
4750 | m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1]; | |
2d5e6ec2 SK |
4751 | } |
4752 | ||
4753 | ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH); | |
4754 | ||
4755 | } else { | |
4756 | mExifInfo.enableThumb = false; | |
4757 | } | |
54f4971e | 4758 | |
2d5e6ec2 SK |
4759 | if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) { |
4760 | ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH); | |
54f4971e SK |
4761 | goto jpeg_encode_done; |
4762 | } | |
4763 | ||
2d5e6ec2 | 4764 | ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW); |
87423e56 | 4765 | if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) { |
54f4971e SK |
4766 | ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__); |
4767 | goto jpeg_encode_done; | |
4768 | } | |
13d8c7b4 | 4769 | |
54f4971e | 4770 | m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata); |
9dd63e1f | 4771 | ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize); |
54f4971e | 4772 | if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) { |
9dd63e1f | 4773 | ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__); |
13d8c7b4 SK |
4774 | goto jpeg_encode_done; |
4775 | } | |
54f4971e | 4776 | if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) { |
9dd63e1f | 4777 | ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__); |
13d8c7b4 SK |
4778 | goto jpeg_encode_done; |
4779 | } | |
13d8c7b4 SK |
4780 | |
4781 | if (jpegEnc.updateConfig()) { | |
9dd63e1f | 4782 | ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__); |
13d8c7b4 SK |
4783 | goto jpeg_encode_done; |
4784 | } | |
4785 | ||
1422aff9 | 4786 | if ((res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo))) { |
9dd63e1f | 4787 | ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res); |
13d8c7b4 SK |
4788 | goto jpeg_encode_done; |
4789 | } | |
4790 | ||
4791 | ret = true; | |
4792 | ||
4793 | jpeg_encode_done: | |
4794 | ||
4795 | if (jpegEnc.flagCreate() == true) | |
4796 | jpegEnc.destroy(); | |
4797 | ||
4798 | return ret; | |
4799 | } | |
4800 | ||
e117f756 YJ |
4801 | void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id) |
4802 | { | |
4803 | m_ctlInfo.flash.m_precaptureTriggerId = id; | |
73f5ad60 | 4804 | m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE; |
e117f756 YJ |
4805 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) { |
4806 | // flash is required | |
4807 | switch (m_ctlInfo.flash.m_flashCnt) { | |
4808 | case IS_FLASH_STATE_AUTO_DONE: | |
d91c0269 | 4809 | case IS_FLASH_STATE_AUTO_OFF: |
e117f756 YJ |
4810 | // Flash capture sequence, AF flash was executed before |
4811 | break; | |
4812 | default: | |
4813 | // Full flash sequence | |
4814 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
4815 | m_ctlInfo.flash.m_flashEnableFlg = true; | |
8a3fc5dd | 4816 | m_ctlInfo.flash.m_flashTimeOut = 0; |
e117f756 YJ |
4817 | } |
4818 | } else { | |
73f5ad60 YJ |
4819 | // Skip pre-capture in case of non-flash. |
4820 | ALOGV("[PreCap] Flash OFF mode "); | |
e117f756 YJ |
4821 | m_ctlInfo.flash.m_flashEnableFlg = false; |
4822 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE; | |
e117f756 | 4823 | } |
73f5ad60 YJ |
4824 | ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt); |
4825 | OnPrecaptureMeteringNotificationSensor(); | |
e117f756 | 4826 | } |
13d8c7b4 | 4827 | |
0f26b20f SK |
4828 | void ExynosCameraHWInterface2::OnAfTrigger(int id) |
4829 | { | |
8e2c2fdb | 4830 | m_afTriggerId = id; |
6f19b6cf | 4831 | |
0f26b20f SK |
4832 | switch (m_afMode) { |
4833 | case AA_AFMODE_AUTO: | |
4834 | case AA_AFMODE_MACRO: | |
34d2b94a | 4835 | case AA_AFMODE_MANUAL: |
73f5ad60 | 4836 | ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode); |
caea49e6 YJ |
4837 | // If flash is enable, Flash operation is executed before triggering AF |
4838 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) | |
e117f756 | 4839 | && (m_ctlInfo.flash.m_flashEnableFlg == false) |
caea49e6 | 4840 | && (m_cameraId == 0)) { |
73f5ad60 | 4841 | ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode); |
e117f756 YJ |
4842 | m_ctlInfo.flash.m_flashEnableFlg = true; |
4843 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
caea49e6 | 4844 | m_ctlInfo.flash.m_flashDecisionResult = false; |
e117f756 | 4845 | m_ctlInfo.flash.m_afFlashDoneFlg = true; |
caea49e6 | 4846 | } |
0f26b20f SK |
4847 | OnAfTriggerAutoMacro(id); |
4848 | break; | |
4849 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
73f5ad60 | 4850 | ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode); |
0f26b20f SK |
4851 | OnAfTriggerCAFVideo(id); |
4852 | break; | |
4853 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
73f5ad60 | 4854 | ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode); |
0f26b20f SK |
4855 | OnAfTriggerCAFPicture(id); |
4856 | break; | |
8e2c2fdb | 4857 | |
34d2b94a | 4858 | case AA_AFMODE_OFF: |
0f26b20f SK |
4859 | default: |
4860 | break; | |
4861 | } | |
4862 | } | |
4863 | ||
1422aff9 | 4864 | void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int /*id*/) |
0f26b20f SK |
4865 | { |
4866 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4867 | |
4868 | switch (m_afState) { | |
4869 | case HAL_AFSTATE_INACTIVE: | |
6caa0c80 SK |
4870 | case HAL_AFSTATE_PASSIVE_FOCUSED: |
4871 | case HAL_AFSTATE_SCANNING: | |
0f26b20f SK |
4872 | nextState = HAL_AFSTATE_NEEDS_COMMAND; |
4873 | m_IsAfTriggerRequired = true; | |
4874 | break; | |
4875 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4876 | nextState = NO_TRANSITION; | |
4877 | break; | |
4878 | case HAL_AFSTATE_STARTED: | |
4879 | nextState = NO_TRANSITION; | |
4880 | break; | |
0f26b20f SK |
4881 | case HAL_AFSTATE_LOCKED: |
4882 | nextState = HAL_AFSTATE_NEEDS_COMMAND; | |
4883 | m_IsAfTriggerRequired = true; | |
4884 | break; | |
4885 | case HAL_AFSTATE_FAILED: | |
4886 | nextState = HAL_AFSTATE_NEEDS_COMMAND; | |
4887 | m_IsAfTriggerRequired = true; | |
4888 | break; | |
4889 | default: | |
4890 | break; | |
4891 | } | |
4892 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4893 | if (nextState != NO_TRANSITION) | |
4894 | m_afState = nextState; | |
4895 | } | |
4896 | ||
4897 | void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id) | |
4898 | { | |
4899 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4900 | |
4901 | switch (m_afState) { | |
4902 | case HAL_AFSTATE_INACTIVE: | |
4903 | nextState = HAL_AFSTATE_FAILED; | |
4904 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4905 | break; | |
4906 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4907 | // not used | |
4908 | break; | |
4909 | case HAL_AFSTATE_STARTED: | |
4910 | nextState = HAL_AFSTATE_NEEDS_DETERMINATION; | |
9900d0c4 | 4911 | m_AfHwStateFailed = false; |
0f26b20f SK |
4912 | break; |
4913 | case HAL_AFSTATE_SCANNING: | |
4914 | nextState = HAL_AFSTATE_NEEDS_DETERMINATION; | |
9900d0c4 | 4915 | m_AfHwStateFailed = false; |
caea49e6 YJ |
4916 | // If flash is enable, Flash operation is executed before triggering AF |
4917 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) | |
e117f756 | 4918 | && (m_ctlInfo.flash.m_flashEnableFlg == false) |
caea49e6 | 4919 | && (m_cameraId == 0)) { |
73f5ad60 | 4920 | ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id); |
e117f756 YJ |
4921 | m_ctlInfo.flash.m_flashEnableFlg = true; |
4922 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
caea49e6 | 4923 | m_ctlInfo.flash.m_flashDecisionResult = false; |
e117f756 | 4924 | m_ctlInfo.flash.m_afFlashDoneFlg = true; |
caea49e6 | 4925 | } |
0f26b20f SK |
4926 | break; |
4927 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
4928 | nextState = NO_TRANSITION; | |
4929 | break; | |
4930 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
4931 | m_IsAfLockRequired = true; | |
9900d0c4 | 4932 | if (m_AfHwStateFailed) { |
caea49e6 | 4933 | ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__); |
9900d0c4 SK |
4934 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); |
4935 | nextState = HAL_AFSTATE_FAILED; | |
4936 | } | |
4937 | else { | |
caea49e6 | 4938 | ALOGV("(%s): [CAF] LAST : success", __FUNCTION__); |
9900d0c4 SK |
4939 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); |
4940 | nextState = HAL_AFSTATE_LOCKED; | |
4941 | } | |
4942 | m_AfHwStateFailed = false; | |
0f26b20f SK |
4943 | break; |
4944 | case HAL_AFSTATE_LOCKED: | |
4945 | nextState = NO_TRANSITION; | |
4946 | break; | |
4947 | case HAL_AFSTATE_FAILED: | |
4948 | nextState = NO_TRANSITION; | |
4949 | break; | |
4950 | default: | |
4951 | break; | |
4952 | } | |
4953 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4954 | if (nextState != NO_TRANSITION) | |
4955 | m_afState = nextState; | |
4956 | } | |
4957 | ||
4958 | ||
1422aff9 | 4959 | void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int /*id*/) |
0f26b20f SK |
4960 | { |
4961 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4962 | |
4963 | switch (m_afState) { | |
4964 | case HAL_AFSTATE_INACTIVE: | |
4965 | nextState = HAL_AFSTATE_FAILED; | |
4966 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4967 | break; | |
4968 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4969 | // not used | |
4970 | break; | |
4971 | case HAL_AFSTATE_STARTED: | |
4972 | m_IsAfLockRequired = true; | |
4973 | nextState = HAL_AFSTATE_FAILED; | |
4974 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4975 | break; | |
4976 | case HAL_AFSTATE_SCANNING: | |
4977 | m_IsAfLockRequired = true; | |
4978 | nextState = HAL_AFSTATE_FAILED; | |
4979 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4980 | break; | |
4981 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
4982 | // not used | |
4983 | break; | |
4984 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
4985 | m_IsAfLockRequired = true; | |
4986 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
4987 | nextState = HAL_AFSTATE_LOCKED; | |
4988 | break; | |
4989 | case HAL_AFSTATE_LOCKED: | |
4990 | nextState = NO_TRANSITION; | |
4991 | break; | |
4992 | case HAL_AFSTATE_FAILED: | |
4993 | nextState = NO_TRANSITION; | |
4994 | break; | |
4995 | default: | |
4996 | break; | |
4997 | } | |
4998 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4999 | if (nextState != NO_TRANSITION) | |
5000 | m_afState = nextState; | |
5001 | } | |
5002 | ||
73f5ad60 YJ |
5003 | void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor() |
5004 | { | |
5005 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
5006 | // Just noti of pre-capture start | |
5007 | if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) { | |
5008 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
5009 | ANDROID_CONTROL_AE_STATE_PRECAPTURE, | |
5010 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
5011 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); | |
5012 | m_notifyCb(CAMERA2_MSG_AUTOWB, | |
5013 | ANDROID_CONTROL_AWB_STATE_CONVERGED, | |
5014 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
5015 | m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE; | |
5016 | } | |
5017 | } | |
5018 | } | |
5019 | ||
5020 | void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP() | |
e117f756 YJ |
5021 | { |
5022 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
5023 | if (m_ctlInfo.flash.m_flashEnableFlg) { | |
5024 | // flash case | |
5025 | switch (m_ctlInfo.flash.m_flashCnt) { | |
5026 | case IS_FLASH_STATE_AUTO_DONE: | |
d91c0269 | 5027 | case IS_FLASH_STATE_AUTO_OFF: |
73f5ad60 YJ |
5028 | if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) { |
5029 | // End notification | |
e117f756 | 5030 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, |
8a3fc5dd | 5031 | ANDROID_CONTROL_AE_STATE_CONVERGED, |
e117f756 | 5032 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
8a3fc5dd | 5033 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 | 5034 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
8a3fc5dd | 5035 | ANDROID_CONTROL_AWB_STATE_CONVERGED, |
e117f756 YJ |
5036 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
5037 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
5038 | } else { | |
5039 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
5040 | ANDROID_CONTROL_AE_STATE_PRECAPTURE, | |
5041 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
40acdcc8 | 5042 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 YJ |
5043 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
5044 | ANDROID_CONTROL_AWB_STATE_CONVERGED, | |
5045 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
73f5ad60 | 5046 | m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE; |
e117f756 | 5047 | } |
73f5ad60 YJ |
5048 | break; |
5049 | case IS_FLASH_STATE_CAPTURE: | |
5050 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
5051 | case IS_FLASH_STATE_CAPTURE_JPEG: | |
5052 | case IS_FLASH_STATE_CAPTURE_END: | |
5053 | ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt); | |
5054 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
5055 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
8a3fc5dd | 5056 | ANDROID_CONTROL_AE_STATE_CONVERGED, |
73f5ad60 YJ |
5057 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
5058 | m_notifyCb(CAMERA2_MSG_AUTOWB, | |
8a3fc5dd | 5059 | ANDROID_CONTROL_AWB_STATE_CONVERGED, |
73f5ad60 YJ |
5060 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
5061 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
5062 | break; | |
e117f756 YJ |
5063 | } |
5064 | } else { | |
5065 | // non-flash case | |
73f5ad60 | 5066 | if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) { |
d91c0269 | 5067 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, |
8a3fc5dd | 5068 | ANDROID_CONTROL_AE_STATE_CONVERGED, |
d91c0269 | 5069 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
8a3fc5dd | 5070 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 | 5071 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
8a3fc5dd | 5072 | ANDROID_CONTROL_AWB_STATE_CONVERGED, |
e117f756 | 5073 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); |
d91c0269 | 5074 | m_ctlInfo.flash.m_precaptureTriggerId = 0; |
e117f756 YJ |
5075 | } |
5076 | } | |
5077 | } | |
5078 | } | |
5079 | ||
0f26b20f SK |
5080 | void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti) |
5081 | { | |
5082 | switch (m_afMode) { | |
5083 | case AA_AFMODE_AUTO: | |
5084 | case AA_AFMODE_MACRO: | |
5085 | OnAfNotificationAutoMacro(noti); | |
5086 | break; | |
5087 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
5088 | OnAfNotificationCAFVideo(noti); | |
5089 | break; | |
5090 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
5091 | OnAfNotificationCAFPicture(noti); | |
5092 | break; | |
5093 | case AA_AFMODE_OFF: | |
5094 | default: | |
5095 | break; | |
5096 | } | |
5097 | } | |
5098 | ||
5099 | void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti) | |
5100 | { | |
5101 | int nextState = NO_TRANSITION; | |
5102 | bool bWrongTransition = false; | |
5103 | ||
5104 | if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) { | |
5105 | switch (noti) { | |
5106 | case AA_AFSTATE_INACTIVE: | |
5107 | case AA_AFSTATE_ACTIVE_SCAN: | |
5108 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5109 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5110 | default: | |
5111 | nextState = NO_TRANSITION; | |
5112 | break; | |
5113 | } | |
5114 | } | |
5115 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5116 | switch (noti) { | |
5117 | case AA_AFSTATE_INACTIVE: | |
5118 | nextState = NO_TRANSITION; | |
5119 | break; | |
5120 | case AA_AFSTATE_ACTIVE_SCAN: | |
5121 | nextState = HAL_AFSTATE_SCANNING; | |
5122 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN); | |
5123 | break; | |
5124 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5125 | nextState = NO_TRANSITION; | |
5126 | break; | |
5127 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5128 | nextState = NO_TRANSITION; | |
5129 | break; | |
5130 | default: | |
5131 | bWrongTransition = true; | |
5132 | break; | |
5133 | } | |
5134 | } | |
5135 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5136 | switch (noti) { | |
5137 | case AA_AFSTATE_INACTIVE: | |
5138 | bWrongTransition = true; | |
5139 | break; | |
5140 | case AA_AFSTATE_ACTIVE_SCAN: | |
5141 | nextState = NO_TRANSITION; | |
5142 | break; | |
5143 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
9a710a45 | 5144 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 YJ |
5145 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
5146 | switch (m_ctlInfo.flash.m_flashCnt) { | |
5147 | case IS_FLASH_STATE_ON_DONE: | |
5148 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
4a9565ae | 5149 | nextState = NO_TRANSITION; |
e117f756 YJ |
5150 | break; |
5151 | case IS_FLASH_STATE_AUTO_DONE: | |
4a9565ae | 5152 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
9a710a45 YJ |
5153 | nextState = HAL_AFSTATE_LOCKED; |
5154 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
e117f756 YJ |
5155 | break; |
5156 | default: | |
9a710a45 YJ |
5157 | nextState = NO_TRANSITION; |
5158 | } | |
5159 | } else { | |
5160 | nextState = HAL_AFSTATE_LOCKED; | |
5161 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5162 | } | |
0f26b20f SK |
5163 | break; |
5164 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
9a710a45 | 5165 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 YJ |
5166 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
5167 | switch (m_ctlInfo.flash.m_flashCnt) { | |
5168 | case IS_FLASH_STATE_ON_DONE: | |
5169 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
4a9565ae | 5170 | nextState = NO_TRANSITION; |
e117f756 YJ |
5171 | break; |
5172 | case IS_FLASH_STATE_AUTO_DONE: | |
4a9565ae | 5173 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
9a710a45 YJ |
5174 | nextState = HAL_AFSTATE_FAILED; |
5175 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
e117f756 YJ |
5176 | break; |
5177 | default: | |
9a710a45 YJ |
5178 | nextState = NO_TRANSITION; |
5179 | } | |
5180 | } else { | |
5181 | nextState = HAL_AFSTATE_FAILED; | |
5182 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5183 | } | |
0f26b20f SK |
5184 | break; |
5185 | default: | |
5186 | bWrongTransition = true; | |
5187 | break; | |
5188 | } | |
5189 | } | |
5190 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5191 | switch (noti) { | |
5192 | case AA_AFSTATE_INACTIVE: | |
5193 | case AA_AFSTATE_ACTIVE_SCAN: | |
5194 | bWrongTransition = true; | |
5195 | break; | |
5196 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5197 | nextState = NO_TRANSITION; | |
5198 | break; | |
5199 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5200 | default: | |
5201 | bWrongTransition = true; | |
5202 | break; | |
5203 | } | |
5204 | } | |
5205 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5206 | switch (noti) { | |
5207 | case AA_AFSTATE_INACTIVE: | |
5208 | case AA_AFSTATE_ACTIVE_SCAN: | |
5209 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5210 | bWrongTransition = true; | |
5211 | break; | |
5212 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5213 | nextState = NO_TRANSITION; | |
5214 | break; | |
5215 | default: | |
5216 | bWrongTransition = true; | |
5217 | break; | |
5218 | } | |
5219 | } | |
5220 | if (bWrongTransition) { | |
5221 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5222 | return; | |
5223 | } | |
5224 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5225 | if (nextState != NO_TRANSITION) | |
5226 | m_afState = nextState; | |
5227 | } | |
5228 | ||
5229 | void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti) | |
5230 | { | |
5231 | int nextState = NO_TRANSITION; | |
5232 | bool bWrongTransition = false; | |
5233 | ||
5234 | if (m_afState == HAL_AFSTATE_INACTIVE) { | |
5235 | switch (noti) { | |
5236 | case AA_AFSTATE_INACTIVE: | |
5237 | case AA_AFSTATE_ACTIVE_SCAN: | |
5238 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5239 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5240 | default: | |
5241 | nextState = NO_TRANSITION; | |
5242 | break; | |
5243 | } | |
40acdcc8 YJ |
5244 | // Check AF notification after triggering |
5245 | if (m_ctlInfo.af.m_afTriggerTimeOut > 0) { | |
5246 | if (m_ctlInfo.af.m_afTriggerTimeOut > 5) { | |
5247 | ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode); | |
5248 | SetAfMode(AA_AFMODE_OFF); | |
5249 | SetAfMode(m_afMode); | |
5250 | m_ctlInfo.af.m_afTriggerTimeOut = 0; | |
5251 | } else { | |
5252 | m_ctlInfo.af.m_afTriggerTimeOut++; | |
5253 | } | |
5254 | } | |
0f26b20f SK |
5255 | } |
5256 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5257 | switch (noti) { | |
5258 | case AA_AFSTATE_INACTIVE: | |
5259 | nextState = NO_TRANSITION; | |
5260 | break; | |
5261 | case AA_AFSTATE_ACTIVE_SCAN: | |
5262 | nextState = HAL_AFSTATE_SCANNING; | |
5263 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
40acdcc8 | 5264 | m_ctlInfo.af.m_afTriggerTimeOut = 0; |
0f26b20f SK |
5265 | break; |
5266 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5267 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5268 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
40acdcc8 | 5269 | m_ctlInfo.af.m_afTriggerTimeOut = 0; |
0f26b20f SK |
5270 | break; |
5271 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
cd13bb78 SK |
5272 | //nextState = HAL_AFSTATE_FAILED; |
5273 | //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5274 | nextState = NO_TRANSITION; | |
0f26b20f SK |
5275 | break; |
5276 | default: | |
5277 | bWrongTransition = true; | |
5278 | break; | |
5279 | } | |
5280 | } | |
5281 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5282 | switch (noti) { | |
5283 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 | 5284 | nextState = NO_TRANSITION; |
0f26b20f SK |
5285 | break; |
5286 | case AA_AFSTATE_ACTIVE_SCAN: | |
5287 | nextState = NO_TRANSITION; | |
9900d0c4 | 5288 | m_AfHwStateFailed = false; |
0f26b20f SK |
5289 | break; |
5290 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5291 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
9900d0c4 | 5292 | m_AfHwStateFailed = false; |
0f26b20f SK |
5293 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); |
5294 | break; | |
5295 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
9900d0c4 SK |
5296 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; |
5297 | m_AfHwStateFailed = true; | |
5298 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
0f26b20f SK |
5299 | break; |
5300 | default: | |
5301 | bWrongTransition = true; | |
5302 | break; | |
5303 | } | |
5304 | } | |
5305 | else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) { | |
5306 | switch (noti) { | |
5307 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 | 5308 | nextState = NO_TRANSITION; |
0f26b20f SK |
5309 | break; |
5310 | case AA_AFSTATE_ACTIVE_SCAN: | |
5311 | nextState = HAL_AFSTATE_SCANNING; | |
9900d0c4 | 5312 | m_AfHwStateFailed = false; |
0f26b20f SK |
5313 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); |
5314 | break; | |
5315 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5316 | nextState = NO_TRANSITION; | |
9900d0c4 | 5317 | m_AfHwStateFailed = false; |
0f26b20f SK |
5318 | break; |
5319 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
9900d0c4 SK |
5320 | nextState = NO_TRANSITION; |
5321 | m_AfHwStateFailed = true; | |
0f26b20f SK |
5322 | break; |
5323 | default: | |
5324 | bWrongTransition = true; | |
5325 | break; | |
5326 | } | |
5327 | } | |
5328 | else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) { | |
f7f8d321 YJ |
5329 | //Skip notification in case of flash, wait the end of flash on |
5330 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { | |
5331 | if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE) | |
5332 | return; | |
5333 | } | |
0f26b20f SK |
5334 | switch (noti) { |
5335 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 | 5336 | nextState = NO_TRANSITION; |
0f26b20f SK |
5337 | break; |
5338 | case AA_AFSTATE_ACTIVE_SCAN: | |
5339 | nextState = NO_TRANSITION; | |
5340 | break; | |
5341 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
caea49e6 | 5342 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 | 5343 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
e117f756 YJ |
5344 | switch (m_ctlInfo.flash.m_flashCnt) { |
5345 | case IS_FLASH_STATE_ON_DONE: | |
f7f8d321 | 5346 | ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
e117f756 | 5347 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; |
4a9565ae | 5348 | nextState = NO_TRANSITION; |
e117f756 YJ |
5349 | break; |
5350 | case IS_FLASH_STATE_AUTO_DONE: | |
f7f8d321 | 5351 | ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
4a9565ae | 5352 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
caea49e6 YJ |
5353 | m_IsAfLockRequired = true; |
5354 | nextState = HAL_AFSTATE_LOCKED; | |
5355 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
e117f756 YJ |
5356 | break; |
5357 | default: | |
caea49e6 YJ |
5358 | nextState = NO_TRANSITION; |
5359 | } | |
5360 | } else { | |
5361 | m_IsAfLockRequired = true; | |
5362 | nextState = HAL_AFSTATE_LOCKED; | |
5363 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5364 | } | |
0f26b20f SK |
5365 | break; |
5366 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
caea49e6 | 5367 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 | 5368 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
e117f756 YJ |
5369 | switch (m_ctlInfo.flash.m_flashCnt) { |
5370 | case IS_FLASH_STATE_ON_DONE: | |
f7f8d321 | 5371 | ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
e117f756 | 5372 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; |
4a9565ae | 5373 | nextState = NO_TRANSITION; |
e117f756 YJ |
5374 | break; |
5375 | case IS_FLASH_STATE_AUTO_DONE: | |
f7f8d321 | 5376 | ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
4a9565ae | 5377 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
caea49e6 YJ |
5378 | m_IsAfLockRequired = true; |
5379 | nextState = HAL_AFSTATE_FAILED; | |
5380 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
e117f756 YJ |
5381 | break; |
5382 | default: | |
caea49e6 YJ |
5383 | nextState = NO_TRANSITION; |
5384 | } | |
5385 | } else { | |
5386 | m_IsAfLockRequired = true; | |
5387 | nextState = HAL_AFSTATE_FAILED; | |
5388 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5389 | } | |
0f26b20f SK |
5390 | break; |
5391 | default: | |
5392 | bWrongTransition = true; | |
5393 | break; | |
5394 | } | |
5395 | } | |
5396 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5397 | switch (noti) { | |
5398 | case AA_AFSTATE_INACTIVE: | |
5399 | nextState = NO_TRANSITION; | |
5400 | break; | |
5401 | case AA_AFSTATE_ACTIVE_SCAN: | |
5402 | bWrongTransition = true; | |
5403 | break; | |
5404 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5405 | nextState = NO_TRANSITION; | |
5406 | break; | |
5407 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5408 | default: | |
5409 | bWrongTransition = true; | |
5410 | break; | |
5411 | } | |
5412 | } | |
5413 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5414 | switch (noti) { | |
5415 | case AA_AFSTATE_INACTIVE: | |
9900d0c4 SK |
5416 | bWrongTransition = true; |
5417 | break; | |
0f26b20f | 5418 | case AA_AFSTATE_ACTIVE_SCAN: |
9900d0c4 SK |
5419 | nextState = HAL_AFSTATE_SCANNING; |
5420 | break; | |
0f26b20f SK |
5421 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: |
5422 | bWrongTransition = true; | |
5423 | break; | |
5424 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5425 | nextState = NO_TRANSITION; | |
5426 | break; | |
5427 | default: | |
5428 | bWrongTransition = true; | |
5429 | break; | |
5430 | } | |
5431 | } | |
5432 | if (bWrongTransition) { | |
5433 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5434 | return; | |
5435 | } | |
5436 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5437 | if (nextState != NO_TRANSITION) | |
5438 | m_afState = nextState; | |
5439 | } | |
5440 | ||
5441 | void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti) | |
5442 | { | |
5443 | int nextState = NO_TRANSITION; | |
5444 | bool bWrongTransition = false; | |
5445 | ||
5446 | if (m_afState == HAL_AFSTATE_INACTIVE) { | |
5447 | switch (noti) { | |
5448 | case AA_AFSTATE_INACTIVE: | |
5449 | case AA_AFSTATE_ACTIVE_SCAN: | |
5450 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5451 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5452 | default: | |
5453 | nextState = NO_TRANSITION; | |
5454 | break; | |
5455 | } | |
5456 | } | |
5457 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5458 | switch (noti) { | |
5459 | case AA_AFSTATE_INACTIVE: | |
5460 | nextState = NO_TRANSITION; | |
5461 | break; | |
5462 | case AA_AFSTATE_ACTIVE_SCAN: | |
5463 | nextState = HAL_AFSTATE_SCANNING; | |
5464 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
5465 | break; | |
5466 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5467 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5468 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
5469 | break; | |
5470 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5471 | nextState = HAL_AFSTATE_FAILED; | |
5472 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5473 | break; | |
5474 | default: | |
5475 | bWrongTransition = true; | |
5476 | break; | |
5477 | } | |
5478 | } | |
5479 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5480 | switch (noti) { | |
5481 | case AA_AFSTATE_INACTIVE: | |
5482 | bWrongTransition = true; | |
5483 | break; | |
5484 | case AA_AFSTATE_ACTIVE_SCAN: | |
5485 | nextState = NO_TRANSITION; | |
5486 | break; | |
5487 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5488 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5489 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
5490 | break; | |
5491 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5492 | nextState = NO_TRANSITION; | |
5493 | break; | |
5494 | default: | |
5495 | bWrongTransition = true; | |
5496 | break; | |
5497 | } | |
5498 | } | |
5499 | else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) { | |
5500 | switch (noti) { | |
5501 | case AA_AFSTATE_INACTIVE: | |
5502 | bWrongTransition = true; | |
5503 | break; | |
5504 | case AA_AFSTATE_ACTIVE_SCAN: | |
5505 | nextState = HAL_AFSTATE_SCANNING; | |
5506 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
5507 | break; | |
5508 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5509 | nextState = NO_TRANSITION; | |
5510 | break; | |
5511 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5512 | nextState = HAL_AFSTATE_FAILED; | |
5513 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
8e2c2fdb | 5514 | // TODO : needs NO_TRANSITION ? |
0f26b20f SK |
5515 | break; |
5516 | default: | |
5517 | bWrongTransition = true; | |
5518 | break; | |
5519 | } | |
5520 | } | |
5521 | else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) { | |
5522 | switch (noti) { | |
5523 | case AA_AFSTATE_INACTIVE: | |
5524 | bWrongTransition = true; | |
5525 | break; | |
5526 | case AA_AFSTATE_ACTIVE_SCAN: | |
5527 | nextState = NO_TRANSITION; | |
5528 | break; | |
5529 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5530 | m_IsAfLockRequired = true; | |
5531 | nextState = HAL_AFSTATE_LOCKED; | |
5532 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5533 | break; | |
5534 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5535 | nextState = HAL_AFSTATE_FAILED; | |
5536 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5537 | break; | |
5538 | default: | |
5539 | bWrongTransition = true; | |
5540 | break; | |
5541 | } | |
5542 | } | |
5543 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5544 | switch (noti) { | |
5545 | case AA_AFSTATE_INACTIVE: | |
5546 | nextState = NO_TRANSITION; | |
5547 | break; | |
5548 | case AA_AFSTATE_ACTIVE_SCAN: | |
5549 | bWrongTransition = true; | |
5550 | break; | |
5551 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5552 | nextState = NO_TRANSITION; | |
5553 | break; | |
5554 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5555 | default: | |
5556 | bWrongTransition = true; | |
5557 | break; | |
5558 | } | |
5559 | } | |
5560 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5561 | switch (noti) { | |
5562 | case AA_AFSTATE_INACTIVE: | |
5563 | case AA_AFSTATE_ACTIVE_SCAN: | |
5564 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5565 | bWrongTransition = true; | |
5566 | break; | |
5567 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5568 | nextState = NO_TRANSITION; | |
5569 | break; | |
5570 | default: | |
5571 | bWrongTransition = true; | |
5572 | break; | |
5573 | } | |
5574 | } | |
5575 | if (bWrongTransition) { | |
5576 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5577 | return; | |
5578 | } | |
5579 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5580 | if (nextState != NO_TRANSITION) | |
5581 | m_afState = nextState; | |
5582 | } | |
5583 | ||
5584 | void ExynosCameraHWInterface2::OnAfCancel(int id) | |
5585 | { | |
8e2c2fdb SK |
5586 | m_afTriggerId = id; |
5587 | ||
0f26b20f SK |
5588 | switch (m_afMode) { |
5589 | case AA_AFMODE_AUTO: | |
5590 | case AA_AFMODE_MACRO: | |
8e2c2fdb | 5591 | case AA_AFMODE_OFF: |
c48f0170 | 5592 | case AA_AFMODE_MANUAL: |
0f26b20f SK |
5593 | OnAfCancelAutoMacro(id); |
5594 | break; | |
5595 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
5596 | OnAfCancelCAFVideo(id); | |
5597 | break; | |
5598 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
5599 | OnAfCancelCAFPicture(id); | |
5600 | break; | |
0f26b20f SK |
5601 | default: |
5602 | break; | |
5603 | } | |
5604 | } | |
5605 | ||
1422aff9 | 5606 | void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int /*id*/) |
0f26b20f SK |
5607 | { |
5608 | int nextState = NO_TRANSITION; | |
0f26b20f | 5609 | |
e117f756 YJ |
5610 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
5611 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; | |
6f19b6cf | 5612 | } |
0f26b20f SK |
5613 | switch (m_afState) { |
5614 | case HAL_AFSTATE_INACTIVE: | |
5615 | nextState = NO_TRANSITION; | |
cd13bb78 | 5616 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); |
0f26b20f SK |
5617 | break; |
5618 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5619 | case HAL_AFSTATE_STARTED: | |
5620 | case HAL_AFSTATE_SCANNING: | |
5621 | case HAL_AFSTATE_LOCKED: | |
5622 | case HAL_AFSTATE_FAILED: | |
5623 | SetAfMode(AA_AFMODE_OFF); | |
5624 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5625 | nextState = HAL_AFSTATE_INACTIVE; | |
5626 | break; | |
5627 | default: | |
5628 | break; | |
5629 | } | |
5630 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5631 | if (nextState != NO_TRANSITION) | |
5632 | m_afState = nextState; | |
5633 | } | |
5634 | ||
1422aff9 | 5635 | void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int /*id*/) |
0f26b20f SK |
5636 | { |
5637 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
5638 | |
5639 | switch (m_afState) { | |
5640 | case HAL_AFSTATE_INACTIVE: | |
5641 | nextState = NO_TRANSITION; | |
5642 | break; | |
5643 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5644 | case HAL_AFSTATE_STARTED: | |
5645 | case HAL_AFSTATE_SCANNING: | |
5646 | case HAL_AFSTATE_LOCKED: | |
5647 | case HAL_AFSTATE_FAILED: | |
5648 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
5649 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
5650 | SetAfMode(AA_AFMODE_OFF); | |
5651 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5652 | SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE); | |
5653 | nextState = HAL_AFSTATE_INACTIVE; | |
5654 | break; | |
5655 | default: | |
5656 | break; | |
5657 | } | |
5658 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5659 | if (nextState != NO_TRANSITION) | |
5660 | m_afState = nextState; | |
5661 | } | |
5662 | ||
1422aff9 | 5663 | void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int /*id*/) |
0f26b20f SK |
5664 | { |
5665 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
5666 | |
5667 | switch (m_afState) { | |
5668 | case HAL_AFSTATE_INACTIVE: | |
5669 | nextState = NO_TRANSITION; | |
5670 | break; | |
5671 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5672 | case HAL_AFSTATE_STARTED: | |
5673 | case HAL_AFSTATE_SCANNING: | |
5674 | case HAL_AFSTATE_LOCKED: | |
5675 | case HAL_AFSTATE_FAILED: | |
5676 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
5677 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
5678 | SetAfMode(AA_AFMODE_OFF); | |
5679 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5680 | SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO); | |
5681 | nextState = HAL_AFSTATE_INACTIVE; | |
5682 | break; | |
5683 | default: | |
5684 | break; | |
5685 | } | |
5686 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5687 | if (nextState != NO_TRANSITION) | |
5688 | m_afState = nextState; | |
5689 | } | |
5690 | ||
5691 | void ExynosCameraHWInterface2::SetAfStateForService(int newState) | |
5692 | { | |
8e2c2fdb SK |
5693 | if (m_serviceAfState != newState || newState == 0) |
5694 | m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie); | |
0f26b20f | 5695 | m_serviceAfState = newState; |
0f26b20f SK |
5696 | } |
5697 | ||
5698 | int ExynosCameraHWInterface2::GetAfStateForService() | |
5699 | { | |
5700 | return m_serviceAfState; | |
5701 | } | |
5702 | ||
5703 | void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode) | |
5704 | { | |
5705 | if (m_afMode != afMode) { | |
ca714238 | 5706 | if (m_IsAfModeUpdateRequired && m_afMode != AA_AFMODE_OFF) { |
0f26b20f SK |
5707 | m_afMode2 = afMode; |
5708 | ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode); | |
5709 | } | |
5710 | else { | |
5711 | ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode); | |
5712 | m_IsAfModeUpdateRequired = true; | |
5713 | m_afMode = afMode; | |
c48f0170 SK |
5714 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); |
5715 | m_afState = HAL_AFSTATE_INACTIVE; | |
8e2c2fdb | 5716 | } |
0f26b20f SK |
5717 | } |
5718 | } | |
5719 | ||
54f4971e SK |
5720 | void ExynosCameraHWInterface2::m_setExifFixedAttribute(void) |
5721 | { | |
5722 | char property[PROPERTY_VALUE_MAX]; | |
5723 | ||
5724 | //2 0th IFD TIFF Tags | |
5725 | //3 Maker | |
5726 | property_get("ro.product.brand", property, EXIF_DEF_MAKER); | |
5727 | strncpy((char *)mExifInfo.maker, property, | |
5728 | sizeof(mExifInfo.maker) - 1); | |
5729 | mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0'; | |
5730 | //3 Model | |
5731 | property_get("ro.product.model", property, EXIF_DEF_MODEL); | |
5732 | strncpy((char *)mExifInfo.model, property, | |
5733 | sizeof(mExifInfo.model) - 1); | |
5734 | mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0'; | |
5735 | //3 Software | |
5736 | property_get("ro.build.id", property, EXIF_DEF_SOFTWARE); | |
5737 | strncpy((char *)mExifInfo.software, property, | |
5738 | sizeof(mExifInfo.software) - 1); | |
5739 | mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0'; | |
5740 | ||
5741 | //3 YCbCr Positioning | |
5742 | mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING; | |
5743 | ||
5744 | //2 0th IFD Exif Private Tags | |
5745 | //3 F Number | |
e00f6591 | 5746 | mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN); |
54f4971e SK |
5747 | mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN; |
5748 | //3 Exposure Program | |
5749 | mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM; | |
5750 | //3 Exif Version | |
5751 | memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version)); | |
5752 | //3 Aperture | |
e00f6591 SK |
5753 | double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den); |
5754 | mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN); | |
54f4971e SK |
5755 | mExifInfo.aperture.den = EXIF_DEF_APEX_DEN; |
5756 | //3 Maximum lens aperture | |
5757 | mExifInfo.max_aperture.num = mExifInfo.aperture.num; | |
5758 | mExifInfo.max_aperture.den = mExifInfo.aperture.den; | |
5759 | //3 Lens Focal Length | |
e00f6591 SK |
5760 | mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100); |
5761 | ||
54f4971e SK |
5762 | mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN; |
5763 | //3 User Comments | |
5764 | strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS); | |
5765 | //3 Color Space information | |
5766 | mExifInfo.color_space = EXIF_DEF_COLOR_SPACE; | |
5767 | //3 Exposure Mode | |
5768 | mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE; | |
5769 | ||
5770 | //2 0th IFD GPS Info Tags | |
5771 | unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 }; | |
5772 | memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version)); | |
5773 | ||
5774 | //2 1th IFD TIFF Tags | |
5775 | mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION; | |
5776 | mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM; | |
5777 | mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN; | |
5778 | mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM; | |
5779 | mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN; | |
5780 | mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT; | |
5781 | } | |
5782 | ||
5783 | void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect, | |
1422aff9 | 5784 | camera2_shot_ext *currentEntry) |
54f4971e | 5785 | { |
48728d49 SK |
5786 | camera2_dm *dm = &(currentEntry->shot.dm); |
5787 | camera2_ctl *ctl = &(currentEntry->shot.ctl); | |
54f4971e SK |
5788 | |
5789 | ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue ); | |
5790 | if (!ctl->request.frameCount) | |
5791 | return; | |
5792 | //2 0th IFD TIFF Tags | |
5793 | //3 Width | |
5794 | exifInfo->width = rect->w; | |
5795 | //3 Height | |
5796 | exifInfo->height = rect->h; | |
5797 | //3 Orientation | |
5798 | switch (ctl->jpeg.orientation) { | |
5799 | case 90: | |
5800 | exifInfo->orientation = EXIF_ORIENTATION_90; | |
5801 | break; | |
5802 | case 180: | |
5803 | exifInfo->orientation = EXIF_ORIENTATION_180; | |
5804 | break; | |
5805 | case 270: | |
5806 | exifInfo->orientation = EXIF_ORIENTATION_270; | |
5807 | break; | |
5808 | case 0: | |
5809 | default: | |
5810 | exifInfo->orientation = EXIF_ORIENTATION_UP; | |
5811 | break; | |
5812 | } | |
5813 | ||
5814 | //3 Date time | |
be8daa96 | 5815 | struct timeval rawtime; |
54f4971e | 5816 | struct tm *timeinfo; |
be8daa96 MS |
5817 | gettimeofday(&rawtime, NULL); |
5818 | timeinfo = localtime(&rawtime.tv_sec); | |
54f4971e | 5819 | strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo); |
be8daa96 MS |
5820 | snprintf((char *)exifInfo->sub_sec, sizeof(exifInfo->sub_sec), "%03lu", |
5821 | (unsigned long)rawtime.tv_usec / 1000UL); | |
54f4971e SK |
5822 | |
5823 | //2 0th IFD Exif Private Tags | |
5824 | //3 Exposure Time | |
5825 | int shutterSpeed = (dm->sensor.exposureTime/1000); | |
5826 | ||
9a77d67e SK |
5827 | // To display exposure time just above 500ms as 1/2sec, not 1 sec. |
5828 | if (shutterSpeed > 500000) | |
5829 | shutterSpeed -= 100000; | |
5830 | ||
54f4971e SK |
5831 | if (shutterSpeed < 0) { |
5832 | shutterSpeed = 100; | |
5833 | } | |
5834 | ||
5835 | exifInfo->exposure_time.num = 1; | |
5836 | // x us -> 1/x s */ | |
5837 | //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed); | |
5838 | exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed); | |
5839 | ||
5840 | //3 ISO Speed Rating | |
5841 | exifInfo->iso_speed_rating = dm->aa.isoValue; | |
5842 | ||
5843 | uint32_t av, tv, bv, sv, ev; | |
5844 | av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den); | |
5845 | tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den); | |
5846 | sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating); | |
5847 | bv = av + tv - sv; | |
5848 | ev = av + tv; | |
5849 | //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating); | |
c06b3290 | 5850 | ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv); |
54f4971e SK |
5851 | |
5852 | //3 Shutter Speed | |
5853 | exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN; | |
5854 | exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN; | |
5855 | //3 Brightness | |
5856 | exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN; | |
5857 | exifInfo->brightness.den = EXIF_DEF_APEX_DEN; | |
5858 | //3 Exposure Bias | |
5859 | if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH|| | |
5860 | ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) { | |
5861 | exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN; | |
5862 | exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN; | |
5863 | } else { | |
5864 | exifInfo->exposure_bias.num = 0; | |
5865 | exifInfo->exposure_bias.den = 0; | |
5866 | } | |
5867 | //3 Metering Mode | |
5868 | /*switch (m_curCameraInfo->metering) { | |
5869 | case METERING_MODE_CENTER: | |
5870 | exifInfo->metering_mode = EXIF_METERING_CENTER; | |
5871 | break; | |
5872 | case METERING_MODE_MATRIX: | |
5873 | exifInfo->metering_mode = EXIF_METERING_MULTISPOT; | |
5874 | break; | |
5875 | case METERING_MODE_SPOT: | |
5876 | exifInfo->metering_mode = EXIF_METERING_SPOT; | |
5877 | break; | |
5878 | case METERING_MODE_AVERAGE: | |
5879 | default: | |
5880 | exifInfo->metering_mode = EXIF_METERING_AVERAGE; | |
5881 | break; | |
5882 | }*/ | |
5883 | exifInfo->metering_mode = EXIF_METERING_CENTER; | |
5884 | ||
5885 | //3 Flash | |
9257e29e YJ |
5886 | if (m_ctlInfo.flash.m_flashDecisionResult) |
5887 | exifInfo->flash = 1; | |
5888 | else | |
5889 | exifInfo->flash = EXIF_DEF_FLASH; | |
54f4971e SK |
5890 | |
5891 | //3 White Balance | |
48728d49 | 5892 | if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO) |
54f4971e SK |
5893 | exifInfo->white_balance = EXIF_WB_AUTO; |
5894 | else | |
5895 | exifInfo->white_balance = EXIF_WB_MANUAL; | |
5896 | ||
5897 | //3 Scene Capture Type | |
5898 | switch (ctl->aa.sceneMode) { | |
5899 | case AA_SCENE_MODE_PORTRAIT: | |
5900 | exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; | |
5901 | break; | |
5902 | case AA_SCENE_MODE_LANDSCAPE: | |
5903 | exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE; | |
5904 | break; | |
5905 | case AA_SCENE_MODE_NIGHT_PORTRAIT: | |
5906 | exifInfo->scene_capture_type = EXIF_SCENE_NIGHT; | |
5907 | break; | |
5908 | default: | |
5909 | exifInfo->scene_capture_type = EXIF_SCENE_STANDARD; | |
5910 | break; | |
5911 | } | |
5912 | ||
5913 | //2 0th IFD GPS Info Tags | |
5914 | if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) { | |
5915 | ||
5916 | if (ctl->jpeg.gpsCoordinates[0] > 0) | |
5917 | strcpy((char *)exifInfo->gps_latitude_ref, "N"); | |
5918 | else | |
5919 | strcpy((char *)exifInfo->gps_latitude_ref, "S"); | |
5920 | ||
5921 | if (ctl->jpeg.gpsCoordinates[1] > 0) | |
5922 | strcpy((char *)exifInfo->gps_longitude_ref, "E"); | |
5923 | else | |
5924 | strcpy((char *)exifInfo->gps_longitude_ref, "W"); | |
5925 | ||
5926 | if (ctl->jpeg.gpsCoordinates[2] > 0) | |
5927 | exifInfo->gps_altitude_ref = 0; | |
5928 | else | |
5929 | exifInfo->gps_altitude_ref = 1; | |
5930 | ||
0066b2cb SK |
5931 | double latitude = fabs(ctl->jpeg.gpsCoordinates[0]); |
5932 | double longitude = fabs(ctl->jpeg.gpsCoordinates[1]); | |
5933 | double altitude = fabs(ctl->jpeg.gpsCoordinates[2]); | |
54f4971e SK |
5934 | |
5935 | exifInfo->gps_latitude[0].num = (uint32_t)latitude; | |
5936 | exifInfo->gps_latitude[0].den = 1; | |
5937 | exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60); | |
5938 | exifInfo->gps_latitude[1].den = 1; | |
3db6fe61 | 5939 | exifInfo->gps_latitude[2].num = (uint32_t)round((((latitude - exifInfo->gps_latitude[0].num) * 60) |
54f4971e SK |
5940 | - exifInfo->gps_latitude[1].num) * 60); |
5941 | exifInfo->gps_latitude[2].den = 1; | |
5942 | ||
5943 | exifInfo->gps_longitude[0].num = (uint32_t)longitude; | |
5944 | exifInfo->gps_longitude[0].den = 1; | |
5945 | exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60); | |
5946 | exifInfo->gps_longitude[1].den = 1; | |
3db6fe61 | 5947 | exifInfo->gps_longitude[2].num = (uint32_t)round((((longitude - exifInfo->gps_longitude[0].num) * 60) |
54f4971e SK |
5948 | - exifInfo->gps_longitude[1].num) * 60); |
5949 | exifInfo->gps_longitude[2].den = 1; | |
5950 | ||
3db6fe61 | 5951 | exifInfo->gps_altitude.num = (uint32_t)round(altitude); |
54f4971e SK |
5952 | exifInfo->gps_altitude.den = 1; |
5953 | ||
5954 | struct tm tm_data; | |
5955 | long timestamp; | |
5956 | timestamp = (long)ctl->jpeg.gpsTimestamp; | |
5957 | gmtime_r(×tamp, &tm_data); | |
5958 | exifInfo->gps_timestamp[0].num = tm_data.tm_hour; | |
5959 | exifInfo->gps_timestamp[0].den = 1; | |
5960 | exifInfo->gps_timestamp[1].num = tm_data.tm_min; | |
5961 | exifInfo->gps_timestamp[1].den = 1; | |
5962 | exifInfo->gps_timestamp[2].num = tm_data.tm_sec; | |
5963 | exifInfo->gps_timestamp[2].den = 1; | |
5964 | snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp), | |
5965 | "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday); | |
5966 | ||
07ed0359 | 5967 | memset(exifInfo->gps_processing_method, 0, 100); |
0066b2cb | 5968 | memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32); |
54f4971e SK |
5969 | exifInfo->enableGps = true; |
5970 | } else { | |
5971 | exifInfo->enableGps = false; | |
5972 | } | |
5973 | ||
5974 | //2 1th IFD TIFF Tags | |
5975 | exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0]; | |
5976 | exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1]; | |
5977 | } | |
5978 | ||
13d8c7b4 SK |
5979 | ExynosCameraHWInterface2::MainThread::~MainThread() |
5980 | { | |
ad37861e | 5981 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5982 | } |
5983 | ||
5984 | void ExynosCameraHWInterface2::MainThread::release() | |
5985 | { | |
ad37861e | 5986 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5987 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5988 | } |
5989 | ||
5990 | ExynosCameraHWInterface2::SensorThread::~SensorThread() | |
5991 | { | |
ad37861e | 5992 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5993 | } |
5994 | ||
5995 | void ExynosCameraHWInterface2::SensorThread::release() | |
5996 | { | |
ad37861e | 5997 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5998 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5999 | } |
6000 | ||
13d8c7b4 SK |
6001 | ExynosCameraHWInterface2::StreamThread::~StreamThread() |
6002 | { | |
ad37861e | 6003 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
6004 | } |
6005 | ||
6006 | void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters) | |
6007 | { | |
6008 | ALOGV("DEBUG(%s):", __FUNCTION__); | |
5506cebf | 6009 | memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t)); |
c15a6b00 JS |
6010 | } |
6011 | ||
13d8c7b4 | 6012 | void ExynosCameraHWInterface2::StreamThread::release() |
c15a6b00 | 6013 | { |
9dd63e1f | 6014 | ALOGV("(%s):", __func__); |
13d8c7b4 | 6015 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
6016 | } |
6017 | ||
6018 | int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr) | |
6019 | { | |
6020 | int index; | |
6021 | for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) { | |
6022 | if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr) | |
6023 | return index; | |
6024 | } | |
6025 | return -1; | |
c15a6b00 JS |
6026 | } |
6027 | ||
5506cebf SK |
6028 | int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle) |
6029 | { | |
6030 | int index; | |
6031 | for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) { | |
6032 | if (m_parameters.svcBufHandle[index] == *bufHandle) | |
6033 | return index; | |
6034 | } | |
6035 | return -1; | |
6036 | } | |
6037 | ||
6038 | status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority) | |
9dd63e1f | 6039 | { |
5506cebf SK |
6040 | ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id); |
6041 | int index, vacantIndex; | |
6042 | bool vacancy = false; | |
6043 | ||
6044 | for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) { | |
6045 | if (!vacancy && m_attachedSubStreams[index].streamId == -1) { | |
6046 | vacancy = true; | |
6047 | vacantIndex = index; | |
6048 | } else if (m_attachedSubStreams[index].streamId == stream_id) { | |
6049 | return BAD_VALUE; | |
6050 | } | |
6051 | } | |
6052 | if (!vacancy) | |
6053 | return NO_MEMORY; | |
6054 | m_attachedSubStreams[vacantIndex].streamId = stream_id; | |
6055 | m_attachedSubStreams[vacantIndex].priority = priority; | |
6056 | m_numRegisteredStream++; | |
6057 | return NO_ERROR; | |
9dd63e1f SK |
6058 | } |
6059 | ||
5506cebf | 6060 | status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id) |
74d78ebe | 6061 | { |
5506cebf SK |
6062 | ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id); |
6063 | int index; | |
6064 | bool found = false; | |
6065 | ||
6066 | for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) { | |
6067 | if (m_attachedSubStreams[index].streamId == stream_id) { | |
6068 | found = true; | |
6069 | break; | |
6070 | } | |
6071 | } | |
6072 | if (!found) | |
6073 | return BAD_VALUE; | |
6074 | m_attachedSubStreams[index].streamId = -1; | |
6075 | m_attachedSubStreams[index].priority = 0; | |
6076 | m_numRegisteredStream--; | |
6077 | return NO_ERROR; | |
74d78ebe SK |
6078 | } |
6079 | ||
c15a6b00 JS |
6080 | int ExynosCameraHWInterface2::createIonClient(ion_client ionClient) |
6081 | { | |
6082 | if (ionClient == 0) { | |
6083 | ionClient = ion_client_create(); | |
6084 | if (ionClient < 0) { | |
13d8c7b4 | 6085 | ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient); |
c15a6b00 JS |
6086 | return 0; |
6087 | } | |
6088 | } | |
c15a6b00 JS |
6089 | return ionClient; |
6090 | } | |
6091 | ||
6092 | int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient) | |
6093 | { | |
6094 | if (ionClient != 0) { | |
6095 | if (ionClient > 0) { | |
6096 | ion_client_destroy(ionClient); | |
6097 | } | |
6098 | ionClient = 0; | |
6099 | } | |
c15a6b00 JS |
6100 | return ionClient; |
6101 | } | |
6102 | ||
13d8c7b4 | 6103 | int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum) |
181e425e SK |
6104 | { |
6105 | return allocCameraMemory(ionClient, buf, iMemoryNum, 0); | |
6106 | } | |
6107 | ||
6108 | int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag) | |
c15a6b00 JS |
6109 | { |
6110 | int ret = 0; | |
6111 | int i = 0; | |
181e425e | 6112 | int flag = 0; |
c15a6b00 JS |
6113 | |
6114 | if (ionClient == 0) { | |
13d8c7b4 | 6115 | ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient); |
c15a6b00 JS |
6116 | return -1; |
6117 | } | |
6118 | ||
181e425e | 6119 | for (i = 0 ; i < iMemoryNum ; i++) { |
13d8c7b4 | 6120 | if (buf->size.extS[i] == 0) { |
c15a6b00 JS |
6121 | break; |
6122 | } | |
181e425e | 6123 | if (1 << i & cacheFlag) |
88c9cff4 | 6124 | flag = ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC; |
181e425e SK |
6125 | else |
6126 | flag = 0; | |
13d8c7b4 | 6127 | buf->fd.extFd[i] = ion_alloc(ionClient, \ |
601acb66 | 6128 | buf->size.extS[i], 0, ION_HEAP_SYSTEM_MASK, flag); |
13d8c7b4 SK |
6129 | if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) { |
6130 | ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]); | |
6131 | buf->fd.extFd[i] = -1; | |
c15a6b00 JS |
6132 | freeCameraMemory(buf, iMemoryNum); |
6133 | return -1; | |
6134 | } | |
6135 | ||
13d8c7b4 SK |
6136 | buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \ |
6137 | buf->size.extS[i], 0); | |
6138 | if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) { | |
6139 | ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]); | |
6140 | buf->virt.extP[i] = (char *)MAP_FAILED; | |
c15a6b00 JS |
6141 | freeCameraMemory(buf, iMemoryNum); |
6142 | return -1; | |
6143 | } | |
181e425e | 6144 | ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag); |
c15a6b00 JS |
6145 | } |
6146 | ||
6147 | return ret; | |
6148 | } | |
6149 | ||
13d8c7b4 | 6150 | void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum) |
c15a6b00 | 6151 | { |
13d8c7b4 | 6152 | |
5506cebf | 6153 | int i = 0 ; |
6d8e5b08 | 6154 | int ret = 0; |
c15a6b00 JS |
6155 | |
6156 | for (i=0;i<iMemoryNum;i++) { | |
13d8c7b4 SK |
6157 | if (buf->fd.extFd[i] != -1) { |
6158 | if (buf->virt.extP[i] != (char *)MAP_FAILED) { | |
6d8e5b08 SK |
6159 | ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]); |
6160 | if (ret < 0) | |
6161 | ALOGE("ERR(%s)", __FUNCTION__); | |
c15a6b00 | 6162 | } |
13d8c7b4 | 6163 | ion_free(buf->fd.extFd[i]); |
90e439c1 | 6164 | ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]); |
c15a6b00 | 6165 | } |
13d8c7b4 SK |
6166 | buf->fd.extFd[i] = -1; |
6167 | buf->virt.extP[i] = (char *)MAP_FAILED; | |
6168 | buf->size.extS[i] = 0; | |
c15a6b00 JS |
6169 | } |
6170 | } | |
6171 | ||
13d8c7b4 | 6172 | void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum) |
c15a6b00 JS |
6173 | { |
6174 | int i =0 ; | |
6175 | for (i=0;i<iMemoryNum;i++) { | |
13d8c7b4 SK |
6176 | buf->virt.extP[i] = (char *)MAP_FAILED; |
6177 | buf->fd.extFd[i] = -1; | |
6178 | buf->size.extS[i] = 0; | |
c15a6b00 JS |
6179 | } |
6180 | } | |
6181 | ||
6182 | ||
13d8c7b4 SK |
6183 | |
6184 | ||
9dd63e1f | 6185 | static camera2_device_t *g_cam2_device = NULL; |
b5237e6b | 6186 | static bool g_camera_vaild = false; |
e43660b0 | 6187 | static Mutex g_camera_mutex; |
daa1fcd6 | 6188 | ExynosCamera2 * g_camera2[2] = { NULL, NULL }; |
c15a6b00 JS |
6189 | |
6190 | static int HAL2_camera_device_close(struct hw_device_t* device) | |
6191 | { | |
e43660b0 | 6192 | Mutex::Autolock lock(g_camera_mutex); |
ed4ad5fe | 6193 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 6194 | if (device) { |
9dd63e1f | 6195 | |
c15a6b00 | 6196 | camera2_device_t *cam_device = (camera2_device_t *)device; |
ad37861e SK |
6197 | ALOGV("cam_device(0x%08x):", (unsigned int)cam_device); |
6198 | ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device); | |
c15a6b00 JS |
6199 | delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv); |
6200 | free(cam_device); | |
b5237e6b | 6201 | g_camera_vaild = false; |
053d38cf | 6202 | g_cam2_device = NULL; |
c15a6b00 | 6203 | } |
6d8e5b08 | 6204 | |
ed4ad5fe | 6205 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
6206 | return 0; |
6207 | } | |
6208 | ||
6209 | static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev) | |
6210 | { | |
6211 | return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv); | |
6212 | } | |
6213 | ||
6214 | static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev, | |
6215 | const camera2_request_queue_src_ops_t *request_src_ops) | |
6216 | { | |
13d8c7b4 | 6217 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6218 | return obj(dev)->setRequestQueueSrcOps(request_src_ops); |
6219 | } | |
6220 | ||
6221 | static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev) | |
6222 | { | |
13d8c7b4 | 6223 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6224 | return obj(dev)->notifyRequestQueueNotEmpty(); |
6225 | } | |
6226 | ||
6227 | static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev, | |
6228 | const camera2_frame_queue_dst_ops_t *frame_dst_ops) | |
6229 | { | |
13d8c7b4 | 6230 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6231 | return obj(dev)->setFrameQueueDstOps(frame_dst_ops); |
6232 | } | |
6233 | ||
6234 | static int HAL2_device_get_in_progress_count(const struct camera2_device *dev) | |
6235 | { | |
13d8c7b4 | 6236 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6237 | return obj(dev)->getInProgressCount(); |
6238 | } | |
6239 | ||
6240 | static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev) | |
6241 | { | |
13d8c7b4 | 6242 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6243 | return obj(dev)->flushCapturesInProgress(); |
6244 | } | |
6245 | ||
6246 | static int HAL2_device_construct_default_request(const struct camera2_device *dev, | |
6247 | int request_template, camera_metadata_t **request) | |
6248 | { | |
13d8c7b4 | 6249 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6250 | return obj(dev)->constructDefaultRequest(request_template, request); |
6251 | } | |
6252 | ||
6253 | static int HAL2_device_allocate_stream( | |
6254 | const struct camera2_device *dev, | |
6255 | // inputs | |
6256 | uint32_t width, | |
6257 | uint32_t height, | |
6258 | int format, | |
6259 | const camera2_stream_ops_t *stream_ops, | |
6260 | // outputs | |
6261 | uint32_t *stream_id, | |
6262 | uint32_t *format_actual, | |
6263 | uint32_t *usage, | |
6264 | uint32_t *max_buffers) | |
6265 | { | |
9dd63e1f | 6266 | ALOGV("(%s): ", __FUNCTION__); |
c15a6b00 JS |
6267 | return obj(dev)->allocateStream(width, height, format, stream_ops, |
6268 | stream_id, format_actual, usage, max_buffers); | |
6269 | } | |
6270 | ||
c15a6b00 JS |
6271 | static int HAL2_device_register_stream_buffers(const struct camera2_device *dev, |
6272 | uint32_t stream_id, | |
6273 | int num_buffers, | |
6274 | buffer_handle_t *buffers) | |
6275 | { | |
13d8c7b4 | 6276 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6277 | return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers); |
6278 | } | |
6279 | ||
6280 | static int HAL2_device_release_stream( | |
6281 | const struct camera2_device *dev, | |
6282 | uint32_t stream_id) | |
6283 | { | |
ad37861e | 6284 | ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id); |
b5237e6b SK |
6285 | if (!g_camera_vaild) |
6286 | return 0; | |
c15a6b00 JS |
6287 | return obj(dev)->releaseStream(stream_id); |
6288 | } | |
6289 | ||
6290 | static int HAL2_device_allocate_reprocess_stream( | |
6291 | const struct camera2_device *dev, | |
6292 | uint32_t width, | |
6293 | uint32_t height, | |
6294 | uint32_t format, | |
6295 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
6296 | // outputs | |
6297 | uint32_t *stream_id, | |
6298 | uint32_t *consumer_usage, | |
6299 | uint32_t *max_buffers) | |
6300 | { | |
13d8c7b4 | 6301 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6302 | return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops, |
6303 | stream_id, consumer_usage, max_buffers); | |
6304 | } | |
6305 | ||
2b0421d1 EVT |
6306 | static int HAL2_device_allocate_reprocess_stream_from_stream( |
6307 | const struct camera2_device *dev, | |
6308 | uint32_t output_stream_id, | |
6309 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
6310 | // outputs | |
6311 | uint32_t *stream_id) | |
6312 | { | |
6313 | ALOGV("DEBUG(%s):", __FUNCTION__); | |
5506cebf SK |
6314 | return obj(dev)->allocateReprocessStreamFromStream(output_stream_id, |
6315 | reprocess_stream_ops, stream_id); | |
2b0421d1 EVT |
6316 | } |
6317 | ||
c15a6b00 JS |
6318 | static int HAL2_device_release_reprocess_stream( |
6319 | const struct camera2_device *dev, | |
6320 | uint32_t stream_id) | |
6321 | { | |
13d8c7b4 | 6322 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6323 | return obj(dev)->releaseReprocessStream(stream_id); |
6324 | } | |
6325 | ||
6326 | static int HAL2_device_trigger_action(const struct camera2_device *dev, | |
6327 | uint32_t trigger_id, | |
6328 | int ext1, | |
6329 | int ext2) | |
6330 | { | |
13d8c7b4 | 6331 | ALOGV("DEBUG(%s):", __FUNCTION__); |
b8d41ae2 SK |
6332 | if (!g_camera_vaild) |
6333 | return 0; | |
c15a6b00 JS |
6334 | return obj(dev)->triggerAction(trigger_id, ext1, ext2); |
6335 | } | |
6336 | ||
6337 | static int HAL2_device_set_notify_callback(const struct camera2_device *dev, | |
6338 | camera2_notify_callback notify_cb, | |
6339 | void *user) | |
6340 | { | |
13d8c7b4 | 6341 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6342 | return obj(dev)->setNotifyCallback(notify_cb, user); |
6343 | } | |
6344 | ||
6345 | static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev, | |
6346 | vendor_tag_query_ops_t **ops) | |
6347 | { | |
13d8c7b4 | 6348 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6349 | return obj(dev)->getMetadataVendorTagOps(ops); |
6350 | } | |
6351 | ||
6352 | static int HAL2_device_dump(const struct camera2_device *dev, int fd) | |
6353 | { | |
13d8c7b4 | 6354 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6355 | return obj(dev)->dump(fd); |
6356 | } | |
6357 | ||
6358 | ||
6359 | ||
6360 | ||
6361 | ||
6362 | static int HAL2_getNumberOfCameras() | |
6363 | { | |
9dd63e1f SK |
6364 | ALOGV("(%s): returning 2", __FUNCTION__); |
6365 | return 2; | |
c15a6b00 JS |
6366 | } |
6367 | ||
6368 | ||
c15a6b00 JS |
6369 | static int HAL2_getCameraInfo(int cameraId, struct camera_info *info) |
6370 | { | |
ad37861e | 6371 | ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId); |
9dd63e1f | 6372 | static camera_metadata_t * mCameraInfo[2] = {NULL, NULL}; |
ad37861e | 6373 | |
c15a6b00 | 6374 | status_t res; |
13d8c7b4 | 6375 | |
daa1fcd6 | 6376 | if (cameraId == 0) { |
9dd63e1f | 6377 | info->facing = CAMERA_FACING_BACK; |
daa1fcd6 SK |
6378 | if (!g_camera2[0]) |
6379 | g_camera2[0] = new ExynosCamera2(0); | |
6380 | } | |
6381 | else if (cameraId == 1) { | |
9dd63e1f | 6382 | info->facing = CAMERA_FACING_FRONT; |
daa1fcd6 SK |
6383 | if (!g_camera2[1]) |
6384 | g_camera2[1] = new ExynosCamera2(1); | |
6385 | } | |
6386 | else | |
6387 | return BAD_VALUE; | |
6388 | ||
c15a6b00 JS |
6389 | info->orientation = 0; |
6390 | info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0); | |
9dd63e1f | 6391 | if (mCameraInfo[cameraId] == NULL) { |
daa1fcd6 | 6392 | res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true); |
c15a6b00 JS |
6393 | if (res != OK) { |
6394 | ALOGE("%s: Unable to allocate static info: %s (%d)", | |
13d8c7b4 | 6395 | __FUNCTION__, strerror(-res), res); |
c15a6b00 JS |
6396 | return res; |
6397 | } | |
daa1fcd6 | 6398 | res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false); |
c15a6b00 JS |
6399 | if (res != OK) { |
6400 | ALOGE("%s: Unable to fill in static info: %s (%d)", | |
13d8c7b4 | 6401 | __FUNCTION__, strerror(-res), res); |
c15a6b00 JS |
6402 | return res; |
6403 | } | |
6404 | } | |
9dd63e1f | 6405 | info->static_camera_characteristics = mCameraInfo[cameraId]; |
13d8c7b4 | 6406 | return NO_ERROR; |
c15a6b00 JS |
6407 | } |
6408 | ||
6409 | #define SET_METHOD(m) m : HAL2_device_##m | |
6410 | ||
6411 | static camera2_device_ops_t camera2_device_ops = { | |
6412 | SET_METHOD(set_request_queue_src_ops), | |
6413 | SET_METHOD(notify_request_queue_not_empty), | |
6414 | SET_METHOD(set_frame_queue_dst_ops), | |
6415 | SET_METHOD(get_in_progress_count), | |
6416 | SET_METHOD(flush_captures_in_progress), | |
6417 | SET_METHOD(construct_default_request), | |
6418 | SET_METHOD(allocate_stream), | |
6419 | SET_METHOD(register_stream_buffers), | |
6420 | SET_METHOD(release_stream), | |
6421 | SET_METHOD(allocate_reprocess_stream), | |
2b0421d1 | 6422 | SET_METHOD(allocate_reprocess_stream_from_stream), |
c15a6b00 JS |
6423 | SET_METHOD(release_reprocess_stream), |
6424 | SET_METHOD(trigger_action), | |
6425 | SET_METHOD(set_notify_callback), | |
6426 | SET_METHOD(get_metadata_vendor_tag_ops), | |
6427 | SET_METHOD(dump), | |
1422aff9 | 6428 | get_instance_metadata : NULL |
c15a6b00 JS |
6429 | }; |
6430 | ||
6431 | #undef SET_METHOD | |
6432 | ||
6433 | ||
6434 | static int HAL2_camera_device_open(const struct hw_module_t* module, | |
6435 | const char *id, | |
6436 | struct hw_device_t** device) | |
6437 | { | |
c15a6b00 | 6438 | int cameraId = atoi(id); |
6044e509 | 6439 | int openInvalid = 0; |
9dd63e1f | 6440 | |
e43660b0 AR |
6441 | Mutex::Autolock lock(g_camera_mutex); |
6442 | if (g_camera_vaild) { | |
6443 | ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__); | |
a6302fad | 6444 | return -EUSERS; |
e43660b0 | 6445 | } |
b5237e6b | 6446 | g_camera_vaild = false; |
0eb27a9d | 6447 | ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId); |
c15a6b00 | 6448 | if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) { |
13d8c7b4 | 6449 | ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id); |
c15a6b00 JS |
6450 | return -EINVAL; |
6451 | } | |
6452 | ||
0eb27a9d | 6453 | ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device); |
c15a6b00 JS |
6454 | if (g_cam2_device) { |
6455 | if (obj(g_cam2_device)->getCameraId() == cameraId) { | |
0eb27a9d | 6456 | ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id); |
c15a6b00 JS |
6457 | goto done; |
6458 | } else { | |
0eb27a9d | 6459 | ALOGD("(%s): START waiting for cam device free", __FUNCTION__); |
9dd63e1f | 6460 | while (g_cam2_device) |
041f38de | 6461 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 6462 | ALOGD("(%s): END waiting for cam device free", __FUNCTION__); |
c15a6b00 JS |
6463 | } |
6464 | } | |
6465 | ||
6466 | g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t)); | |
ad37861e | 6467 | ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device); |
9dd63e1f | 6468 | |
c15a6b00 JS |
6469 | if (!g_cam2_device) |
6470 | return -ENOMEM; | |
6471 | ||
6472 | g_cam2_device->common.tag = HARDWARE_DEVICE_TAG; | |
6473 | g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0; | |
6474 | g_cam2_device->common.module = const_cast<hw_module_t *>(module); | |
6475 | g_cam2_device->common.close = HAL2_camera_device_close; | |
6476 | ||
6477 | g_cam2_device->ops = &camera2_device_ops; | |
6478 | ||
13d8c7b4 | 6479 | ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id); |
c15a6b00 | 6480 | |
6044e509 SK |
6481 | g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid); |
6482 | if (!openInvalid) { | |
5506cebf | 6483 | ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__); |
6f19b6cf | 6484 | return -ENODEV; |
6044e509 | 6485 | } |
c15a6b00 JS |
6486 | done: |
6487 | *device = (hw_device_t *)g_cam2_device; | |
13d8c7b4 | 6488 | ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device); |
b5237e6b | 6489 | g_camera_vaild = true; |
c15a6b00 JS |
6490 | |
6491 | return 0; | |
6492 | } | |
6493 | ||
6494 | ||
6495 | static hw_module_methods_t camera_module_methods = { | |
6496 | open : HAL2_camera_device_open | |
6497 | }; | |
6498 | ||
6499 | extern "C" { | |
6500 | struct camera_module HAL_MODULE_INFO_SYM = { | |
6501 | common : { | |
6502 | tag : HARDWARE_MODULE_TAG, | |
6503 | module_api_version : CAMERA_MODULE_API_VERSION_2_0, | |
6504 | hal_api_version : HARDWARE_HAL_API_VERSION, | |
6505 | id : CAMERA_HARDWARE_MODULE_ID, | |
6506 | name : "Exynos Camera HAL2", | |
6507 | author : "Samsung Corporation", | |
6508 | methods : &camera_module_methods, | |
1422aff9 MS |
6509 | dso : NULL, |
6510 | reserved : {0}, | |
c15a6b00 JS |
6511 | }, |
6512 | get_number_of_cameras : HAL2_getNumberOfCameras, | |
1422aff9 MS |
6513 | get_camera_info : HAL2_getCameraInfo, |
6514 | set_callbacks : NULL, | |
6515 | get_vendor_tag_ops : NULL, | |
6516 | open_legacy : NULL, | |
6517 | reserved : {0} | |
c15a6b00 JS |
6518 | }; |
6519 | } | |
6520 | ||
6521 | }; // namespace android |