Commit | Line | Data |
---|---|---|
c15a6b00 JS |
1 | /* |
2 | ** | |
3 | ** Copyright 2008, The Android Open Source Project | |
4 | ** Copyright 2012, Samsung Electronics Co. LTD | |
5 | ** | |
6 | ** Licensed under the Apache License, Version 2.0 (the "License"); | |
7 | ** you may not use this file except in compliance with the License. | |
8 | ** You may obtain a copy of the License at | |
9 | ** | |
10 | ** http://www.apache.org/licenses/LICENSE-2.0 | |
11 | ** | |
12 | ** Unless required by applicable law or agreed to in writing, software | |
13 | ** distributed under the License is distributed on an "AS IS" BASIS, | |
14 | ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. | |
15 | ** See the License for the specific language governing permissions and | |
16 | ** limitations under the License. | |
17 | */ | |
18 | ||
19 | /*! | |
20 | * \file ExynosCameraHWInterface2.cpp | |
21 | * \brief source file for Android Camera API 2.0 HAL | |
22 | * \author Sungjoong Kang(sj3.kang@samsung.com) | |
13d8c7b4 | 23 | * \date 2012/07/10 |
c15a6b00 JS |
24 | * |
25 | * <b>Revision History: </b> | |
26 | * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n | |
27 | * Initial Release | |
13d8c7b4 SK |
28 | * |
29 | * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n | |
30 | * 2nd Release | |
31 | * | |
c15a6b00 JS |
32 | */ |
33 | ||
34 | //#define LOG_NDEBUG 0 | |
9dd63e1f | 35 | #define LOG_TAG "ExynosCameraHAL2" |
c15a6b00 JS |
36 | #include <utils/Log.h> |
37 | ||
38 | #include "ExynosCameraHWInterface2.h" | |
39 | #include "exynos_format.h" | |
40 | ||
c15a6b00 JS |
41 | namespace android { |
42 | ||
9dd63e1f SK |
43 | void m_savePostView(const char *fname, uint8_t *buf, uint32_t size) |
44 | { | |
45 | int nw; | |
46 | int cnt = 0; | |
47 | uint32_t written = 0; | |
48 | ||
ad37861e | 49 | ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size); |
9dd63e1f SK |
50 | int fd = open(fname, O_RDWR | O_CREAT, 0644); |
51 | if (fd < 0) { | |
52 | ALOGE("failed to create file [%s]: %s", fname, strerror(errno)); | |
53 | return; | |
54 | } | |
55 | ||
ad37861e | 56 | ALOGV("writing %d bytes to file [%s]", size, fname); |
9dd63e1f SK |
57 | while (written < size) { |
58 | nw = ::write(fd, buf + written, size - written); | |
59 | if (nw < 0) { | |
60 | ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno)); | |
61 | break; | |
62 | } | |
63 | written += nw; | |
64 | cnt++; | |
65 | } | |
ad37861e | 66 | ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt); |
9dd63e1f SK |
67 | ::close(fd); |
68 | } | |
69 | ||
c15a6b00 JS |
70 | int get_pixel_depth(uint32_t fmt) |
71 | { | |
72 | int depth = 0; | |
73 | ||
74 | switch (fmt) { | |
75 | case V4L2_PIX_FMT_JPEG: | |
76 | depth = 8; | |
77 | break; | |
78 | ||
79 | case V4L2_PIX_FMT_NV12: | |
80 | case V4L2_PIX_FMT_NV21: | |
81 | case V4L2_PIX_FMT_YUV420: | |
82 | case V4L2_PIX_FMT_YVU420M: | |
83 | case V4L2_PIX_FMT_NV12M: | |
84 | case V4L2_PIX_FMT_NV12MT: | |
85 | depth = 12; | |
86 | break; | |
87 | ||
88 | case V4L2_PIX_FMT_RGB565: | |
89 | case V4L2_PIX_FMT_YUYV: | |
90 | case V4L2_PIX_FMT_YVYU: | |
91 | case V4L2_PIX_FMT_UYVY: | |
92 | case V4L2_PIX_FMT_VYUY: | |
93 | case V4L2_PIX_FMT_NV16: | |
94 | case V4L2_PIX_FMT_NV61: | |
95 | case V4L2_PIX_FMT_YUV422P: | |
96 | case V4L2_PIX_FMT_SBGGR10: | |
97 | case V4L2_PIX_FMT_SBGGR12: | |
98 | case V4L2_PIX_FMT_SBGGR16: | |
99 | depth = 16; | |
100 | break; | |
101 | ||
102 | case V4L2_PIX_FMT_RGB32: | |
103 | depth = 32; | |
104 | break; | |
105 | default: | |
106 | ALOGE("Get depth failed(format : %d)", fmt); | |
107 | break; | |
108 | } | |
109 | ||
110 | return depth; | |
13d8c7b4 | 111 | } |
c15a6b00 JS |
112 | |
113 | int cam_int_s_fmt(node_info_t *node) | |
114 | { | |
115 | struct v4l2_format v4l2_fmt; | |
116 | unsigned int framesize; | |
117 | int ret; | |
118 | ||
119 | memset(&v4l2_fmt, 0, sizeof(struct v4l2_format)); | |
120 | ||
121 | v4l2_fmt.type = node->type; | |
122 | framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8; | |
123 | ||
124 | if (node->planes >= 1) { | |
125 | v4l2_fmt.fmt.pix_mp.width = node->width; | |
126 | v4l2_fmt.fmt.pix_mp.height = node->height; | |
127 | v4l2_fmt.fmt.pix_mp.pixelformat = node->format; | |
128 | v4l2_fmt.fmt.pix_mp.field = V4L2_FIELD_ANY; | |
129 | } else { | |
13d8c7b4 | 130 | ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__); |
c15a6b00 JS |
131 | } |
132 | ||
133 | /* Set up for capture */ | |
134 | ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt); | |
135 | ||
136 | if (ret < 0) | |
13d8c7b4 | 137 | ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret); |
c15a6b00 | 138 | |
be494d19 | 139 | |
c15a6b00 JS |
140 | return ret; |
141 | } | |
142 | ||
143 | int cam_int_reqbufs(node_info_t *node) | |
144 | { | |
145 | struct v4l2_requestbuffers req; | |
146 | int ret; | |
147 | ||
148 | req.count = node->buffers; | |
149 | req.type = node->type; | |
150 | req.memory = node->memory; | |
151 | ||
152 | ret = exynos_v4l2_reqbufs(node->fd, &req); | |
153 | ||
154 | if (ret < 0) | |
13d8c7b4 | 155 | ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret); |
c15a6b00 JS |
156 | |
157 | return req.count; | |
158 | } | |
159 | ||
160 | int cam_int_qbuf(node_info_t *node, int index) | |
161 | { | |
162 | struct v4l2_buffer v4l2_buf; | |
163 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
164 | int i; | |
165 | int ret = 0; | |
166 | ||
167 | v4l2_buf.m.planes = planes; | |
168 | v4l2_buf.type = node->type; | |
169 | v4l2_buf.memory = node->memory; | |
170 | v4l2_buf.index = index; | |
171 | v4l2_buf.length = node->planes; | |
172 | ||
173 | for(i = 0; i < node->planes; i++){ | |
13d8c7b4 SK |
174 | v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]); |
175 | v4l2_buf.m.planes[i].length = (unsigned long)(node->buffer[index].size.extS[i]); | |
c15a6b00 JS |
176 | } |
177 | ||
178 | ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf); | |
179 | ||
180 | if (ret < 0) | |
13d8c7b4 | 181 | ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret); |
c15a6b00 JS |
182 | |
183 | return ret; | |
184 | } | |
185 | ||
186 | int cam_int_streamon(node_info_t *node) | |
187 | { | |
188 | enum v4l2_buf_type type = node->type; | |
189 | int ret; | |
190 | ||
be494d19 | 191 | |
c15a6b00 JS |
192 | ret = exynos_v4l2_streamon(node->fd, type); |
193 | ||
194 | if (ret < 0) | |
ad37861e | 195 | ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret); |
c15a6b00 JS |
196 | |
197 | ALOGV("On streaming I/O... ... fd(%d)", node->fd); | |
198 | ||
199 | return ret; | |
200 | } | |
201 | ||
13d8c7b4 SK |
202 | int cam_int_streamoff(node_info_t *node) |
203 | { | |
ad37861e SK |
204 | enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
205 | int ret; | |
13d8c7b4 | 206 | |
be494d19 | 207 | |
ad37861e SK |
208 | ALOGV("Off streaming I/O... fd(%d)", node->fd); |
209 | ret = exynos_v4l2_streamoff(node->fd, type); | |
13d8c7b4 SK |
210 | |
211 | if (ret < 0) | |
212 | ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret); | |
213 | ||
ad37861e | 214 | return ret; |
13d8c7b4 SK |
215 | } |
216 | ||
9dd63e1f SK |
217 | int isp_int_streamoff(node_info_t *node) |
218 | { | |
ad37861e SK |
219 | enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; |
220 | int ret; | |
9dd63e1f | 221 | |
ad37861e SK |
222 | ALOGV("Off streaming I/O... fd(%d)", node->fd); |
223 | ret = exynos_v4l2_streamoff(node->fd, type); | |
9dd63e1f SK |
224 | |
225 | if (ret < 0) | |
226 | ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret); | |
227 | ||
ad37861e | 228 | return ret; |
9dd63e1f SK |
229 | } |
230 | ||
c15a6b00 JS |
231 | int cam_int_dqbuf(node_info_t *node) |
232 | { | |
233 | struct v4l2_buffer v4l2_buf; | |
234 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
235 | int ret; | |
236 | ||
237 | v4l2_buf.type = node->type; | |
238 | v4l2_buf.memory = node->memory; | |
239 | v4l2_buf.m.planes = planes; | |
240 | v4l2_buf.length = node->planes; | |
241 | ||
242 | ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf); | |
243 | if (ret < 0) | |
13d8c7b4 | 244 | ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret); |
c15a6b00 JS |
245 | |
246 | return v4l2_buf.index; | |
247 | } | |
248 | ||
feb7df4c SK |
249 | int cam_int_dqbuf(node_info_t *node, int num_plane) |
250 | { | |
251 | struct v4l2_buffer v4l2_buf; | |
252 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
253 | int ret; | |
254 | ||
255 | v4l2_buf.type = node->type; | |
256 | v4l2_buf.memory = node->memory; | |
257 | v4l2_buf.m.planes = planes; | |
258 | v4l2_buf.length = num_plane; | |
259 | ||
260 | ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf); | |
261 | if (ret < 0) | |
262 | ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret); | |
263 | ||
264 | return v4l2_buf.index; | |
265 | } | |
266 | ||
c15a6b00 JS |
267 | int cam_int_s_input(node_info_t *node, int index) |
268 | { | |
269 | int ret; | |
13d8c7b4 | 270 | |
c15a6b00 JS |
271 | ret = exynos_v4l2_s_input(node->fd, index); |
272 | if (ret < 0) | |
13d8c7b4 | 273 | ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret); |
c15a6b00 JS |
274 | |
275 | return ret; | |
276 | } | |
277 | ||
278 | ||
279 | gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal; | |
280 | ||
281 | RequestManager::RequestManager(SignalDrivenThread* main_thread): | |
2bdec060 SK |
282 | m_lastAeMode(0), |
283 | m_lastAaMode(0), | |
284 | m_lastAwbMode(0), | |
5c88d1f2 | 285 | m_vdisBubbleEn(false), |
2bdec060 | 286 | m_lastAeComp(0), |
041f38de | 287 | m_lastCompletedFrameCnt(-1) |
c15a6b00 JS |
288 | { |
289 | m_metadataConverter = new MetadataConverter; | |
290 | m_mainThread = main_thread; | |
2adfa429 | 291 | ResetEntry(); |
ad37861e | 292 | m_sensorPipelineSkipCnt = 0; |
c15a6b00 JS |
293 | return; |
294 | } | |
295 | ||
296 | RequestManager::~RequestManager() | |
297 | { | |
15fd8231 SK |
298 | ALOGV("%s", __FUNCTION__); |
299 | if (m_metadataConverter != NULL) { | |
300 | delete m_metadataConverter; | |
301 | m_metadataConverter = NULL; | |
302 | } | |
303 | ||
52f54308 | 304 | releaseSensorQ(); |
c15a6b00 JS |
305 | return; |
306 | } | |
307 | ||
2adfa429 JS |
308 | void RequestManager::ResetEntry() |
309 | { | |
310 | Mutex::Autolock lock(m_requestMutex); | |
311 | for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) { | |
312 | memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t)); | |
313 | entries[i].internal_shot.shot.ctl.request.frameCount = -1; | |
314 | } | |
315 | m_numOfEntries = 0; | |
316 | m_entryInsertionIndex = -1; | |
317 | m_entryProcessingIndex = -1; | |
318 | m_entryFrameOutputIndex = -1; | |
319 | } | |
320 | ||
c15a6b00 JS |
321 | int RequestManager::GetNumEntries() |
322 | { | |
323 | return m_numOfEntries; | |
324 | } | |
325 | ||
9dd63e1f SK |
326 | void RequestManager::SetDefaultParameters(int cropX) |
327 | { | |
328 | m_cropX = cropX; | |
329 | } | |
330 | ||
c15a6b00 JS |
331 | bool RequestManager::IsRequestQueueFull() |
332 | { | |
333 | Mutex::Autolock lock(m_requestMutex); | |
334 | if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY) | |
335 | return true; | |
336 | else | |
337 | return false; | |
338 | } | |
339 | ||
340 | void RequestManager::RegisterRequest(camera_metadata_t * new_request) | |
341 | { | |
13d8c7b4 SK |
342 | ALOGV("DEBUG(%s):", __FUNCTION__); |
343 | ||
c15a6b00 | 344 | Mutex::Autolock lock(m_requestMutex); |
13d8c7b4 | 345 | |
c15a6b00 | 346 | request_manager_entry * newEntry = NULL; |
9dd63e1f | 347 | int newInsertionIndex = GetNextIndex(m_entryInsertionIndex); |
13d8c7b4 SK |
348 | ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries ); |
349 | ||
c15a6b00 | 350 | |
c15a6b00 JS |
351 | newEntry = &(entries[newInsertionIndex]); |
352 | ||
353 | if (newEntry->status!=EMPTY) { | |
13d8c7b4 SK |
354 | ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__); |
355 | return; | |
c15a6b00 JS |
356 | } |
357 | newEntry->status = REGISTERED; | |
358 | newEntry->original_request = new_request; | |
ad37861e | 359 | memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext)); |
13d8c7b4 | 360 | m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot)); |
a85ec381 SK |
361 | newEntry->output_stream_count = 0; |
362 | if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP) | |
363 | newEntry->output_stream_count++; | |
364 | ||
365 | if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC) | |
366 | newEntry->output_stream_count++; | |
c15a6b00 JS |
367 | |
368 | m_numOfEntries++; | |
369 | m_entryInsertionIndex = newInsertionIndex; | |
370 | ||
13d8c7b4 | 371 | |
c15a6b00 | 372 | ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))", |
be494d19 | 373 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount); |
c15a6b00 JS |
374 | } |
375 | ||
376 | void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request) | |
377 | { | |
13d8c7b4 | 378 | ALOGV("DEBUG(%s):", __FUNCTION__); |
ad37861e SK |
379 | int frame_index; |
380 | request_manager_entry * currentEntry; | |
c15a6b00 | 381 | |
ad37861e | 382 | Mutex::Autolock lock(m_requestMutex); |
13d8c7b4 | 383 | |
f9a06609 | 384 | frame_index = GetCompletedIndex(); |
ad37861e | 385 | currentEntry = &(entries[frame_index]); |
f9a06609 | 386 | if (currentEntry->status != COMPLETED) { |
5c88d1f2 C |
387 | CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__, |
388 | m_entryProcessingIndex, frame_index,(int)(currentEntry->status)); | |
13d8c7b4 | 389 | return; |
c15a6b00 | 390 | } |
13d8c7b4 SK |
391 | if (deregistered_request) *deregistered_request = currentEntry->original_request; |
392 | ||
041f38de SK |
393 | m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount; |
394 | ||
c15a6b00 JS |
395 | currentEntry->status = EMPTY; |
396 | currentEntry->original_request = NULL; | |
be494d19 SK |
397 | memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext)); |
398 | currentEntry->internal_shot.shot.ctl.request.frameCount = -1; | |
c15a6b00 JS |
399 | currentEntry->output_stream_count = 0; |
400 | m_numOfEntries--; | |
401 | ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)", | |
402 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); | |
13d8c7b4 | 403 | |
041f38de | 404 | CheckCompleted(GetNextIndex(frame_index)); |
c15a6b00 | 405 | return; |
c15a6b00 JS |
406 | } |
407 | ||
13d8c7b4 | 408 | bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size, |
0f26b20f | 409 | camera_metadata_t ** prepared_frame, int afState) |
c15a6b00 | 410 | { |
13d8c7b4 | 411 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
412 | Mutex::Autolock lock(m_requestMutex); |
413 | status_t res = NO_ERROR; | |
f9a06609 | 414 | int tempFrameOutputIndex = GetCompletedIndex(); |
13d8c7b4 SK |
415 | request_manager_entry * currentEntry = &(entries[tempFrameOutputIndex]); |
416 | ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__, | |
417 | m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex); | |
418 | ||
f9a06609 | 419 | if (currentEntry->status != COMPLETED) { |
ad37861e | 420 | ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status)); |
13d8c7b4 SK |
421 | |
422 | return false; | |
423 | } | |
424 | m_entryFrameOutputIndex = tempFrameOutputIndex; | |
a8be0011 | 425 | m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 25, 500); //estimated |
0f26b20f | 426 | add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1); |
13d8c7b4 | 427 | res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot), |
c15a6b00 JS |
428 | m_tempFrameMetadata); |
429 | if (res!=NO_ERROR) { | |
13d8c7b4 SK |
430 | ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res); |
431 | return false; | |
c15a6b00 JS |
432 | } |
433 | *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata); | |
434 | *frame_size = get_camera_metadata_size(m_tempFrameMetadata); | |
435 | *prepared_frame = m_tempFrameMetadata; | |
5506cebf SK |
436 | ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex, |
437 | currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp); | |
9dd63e1f | 438 | // Dump(); |
13d8c7b4 | 439 | return true; |
c15a6b00 JS |
440 | } |
441 | ||
0f26b20f | 442 | int RequestManager::MarkProcessingRequest(ExynosBuffer* buf, int *afMode) |
c15a6b00 | 443 | { |
13d8c7b4 | 444 | struct camera2_shot_ext * shot_ext; |
b56dcc00 | 445 | struct camera2_shot_ext * request_shot; |
13d8c7b4 | 446 | int targetStreamIndex = 0; |
ad37861e | 447 | request_manager_entry * newEntry = NULL; |
0f26b20f | 448 | static int count = 0; |
13d8c7b4 | 449 | |
52f54308 | 450 | Mutex::Autolock lock(m_requestMutex); |
13d8c7b4 | 451 | if (m_numOfEntries == 0) { |
4aa4d739 | 452 | CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__); |
13d8c7b4 SK |
453 | return -1; |
454 | } | |
455 | ||
456 | if ((m_entryProcessingIndex == m_entryInsertionIndex) | |
be494d19 | 457 | && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) { |
da7ca692 | 458 | ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)", |
13d8c7b4 SK |
459 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); |
460 | return -1; | |
461 | } | |
c15a6b00 | 462 | |
9dd63e1f | 463 | int newProcessingIndex = GetNextIndex(m_entryProcessingIndex); |
ad37861e | 464 | ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex); |
c15a6b00 | 465 | |
c15a6b00 | 466 | newEntry = &(entries[newProcessingIndex]); |
ad37861e | 467 | request_shot = &(newEntry->internal_shot); |
0f26b20f | 468 | *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode); |
be494d19 | 469 | if (newEntry->status != REGISTERED) { |
5c88d1f2 C |
470 | CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status); |
471 | for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) { | |
472 | CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount); | |
473 | } | |
13d8c7b4 | 474 | return -1; |
c15a6b00 | 475 | } |
ad37861e | 476 | |
be494d19 | 477 | newEntry->status = REQUESTED; |
c15a6b00 | 478 | |
ad37861e | 479 | shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1]; |
13d8c7b4 | 480 | |
ad37861e SK |
481 | memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext)); |
482 | shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount; | |
13d8c7b4 | 483 | shot_ext->request_sensor = 1; |
9dd63e1f SK |
484 | shot_ext->dis_bypass = 1; |
485 | shot_ext->dnr_bypass = 1; | |
ad37861e SK |
486 | shot_ext->fd_bypass = 1; |
487 | shot_ext->setfile = 0; | |
488 | ||
5506cebf SK |
489 | targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0]; |
490 | shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex; | |
491 | if (targetStreamIndex & MASK_OUTPUT_SCP) | |
492 | shot_ext->request_scp = 1; | |
13d8c7b4 | 493 | |
5506cebf SK |
494 | if (targetStreamIndex & MASK_OUTPUT_SCC) |
495 | shot_ext->request_scc = 1; | |
496 | ||
497 | if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF) | |
498 | shot_ext->fd_bypass = 0; | |
13d8c7b4 | 499 | |
0f26b20f SK |
500 | if (count == 0){ |
501 | shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO; | |
502 | } else | |
503 | shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE; | |
504 | ||
505 | count++; | |
ad37861e SK |
506 | shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL; |
507 | shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL; | |
508 | shot_ext->shot.magicNumber = 0x23456789; | |
509 | shot_ext->shot.ctl.sensor.exposureTime = 0; | |
510 | shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000; | |
511 | shot_ext->shot.ctl.sensor.sensitivity = 0; | |
512 | ||
e4657e32 SK |
513 | |
514 | shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0]; | |
515 | shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1]; | |
516 | shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2]; | |
13d8c7b4 | 517 | |
ad37861e SK |
518 | m_entryProcessingIndex = newProcessingIndex; |
519 | return newProcessingIndex; | |
c15a6b00 JS |
520 | } |
521 | ||
2adfa429 | 522 | void RequestManager::NotifyStreamOutput(int frameCnt) |
c15a6b00 | 523 | { |
9dd63e1f SK |
524 | int index; |
525 | ||
2adfa429 JS |
526 | Mutex::Autolock lock(m_requestMutex); |
527 | ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt); | |
9dd63e1f SK |
528 | |
529 | index = FindEntryIndexByFrameCnt(frameCnt); | |
530 | if (index == -1) { | |
531 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
532 | return; | |
533 | } | |
2adfa429 | 534 | ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt, entries[index].output_stream_count); |
9dd63e1f | 535 | |
be494d19 SK |
536 | entries[index].output_stream_count--; //TODO : match stream id also |
537 | CheckCompleted(index); | |
13d8c7b4 SK |
538 | } |
539 | ||
540 | void RequestManager::CheckCompleted(int index) | |
541 | { | |
041f38de SK |
542 | if ((entries[index].status == METADONE || entries[index].status == COMPLETED) |
543 | && (entries[index].output_stream_count <= 0)){ | |
544 | ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__, | |
2adfa429 | 545 | index, entries[index].internal_shot.shot.ctl.request.frameCount ); |
041f38de SK |
546 | entries[index].status = COMPLETED; |
547 | if (m_lastCompletedFrameCnt + 1 == entries[index].internal_shot.shot.ctl.request.frameCount) | |
548 | m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE); | |
2adfa429 | 549 | } |
c15a6b00 | 550 | } |
9dd63e1f | 551 | |
f9a06609 | 552 | int RequestManager::GetCompletedIndex() |
ad37861e | 553 | { |
041f38de | 554 | return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1); |
ad37861e SK |
555 | } |
556 | ||
52f54308 SK |
557 | void RequestManager::pushSensorQ(int index) |
558 | { | |
559 | Mutex::Autolock lock(m_requestMutex); | |
560 | m_sensorQ.push_back(index); | |
561 | } | |
562 | ||
563 | int RequestManager::popSensorQ() | |
564 | { | |
565 | List<int>::iterator sensor_token; | |
566 | int index; | |
567 | ||
568 | Mutex::Autolock lock(m_requestMutex); | |
569 | ||
570 | if(m_sensorQ.size() == 0) | |
571 | return -1; | |
572 | ||
573 | sensor_token = m_sensorQ.begin()++; | |
574 | index = *sensor_token; | |
575 | m_sensorQ.erase(sensor_token); | |
576 | ||
577 | return (index); | |
578 | } | |
579 | ||
580 | void RequestManager::releaseSensorQ() | |
581 | { | |
582 | List<int>::iterator r; | |
583 | ||
584 | Mutex::Autolock lock(m_requestMutex); | |
0eb27a9d | 585 | ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size()); |
52f54308 SK |
586 | |
587 | while(m_sensorQ.size() > 0){ | |
588 | r = m_sensorQ.begin()++; | |
589 | m_sensorQ.erase(r); | |
590 | } | |
591 | return; | |
592 | } | |
593 | ||
ad37861e | 594 | void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext) |
13d8c7b4 | 595 | { |
9dd63e1f | 596 | int index; |
b56dcc00 SK |
597 | struct camera2_shot_ext * request_shot; |
598 | nsecs_t timeStamp; | |
ad37861e | 599 | int i; |
13d8c7b4 | 600 | |
52f54308 | 601 | Mutex::Autolock lock(m_requestMutex); |
ad37861e | 602 | ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
9dd63e1f | 603 | |
ad37861e SK |
604 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { |
605 | if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount) | |
2adfa429 | 606 | && (entries[i].status == CAPTURED)){ |
f9a06609 | 607 | entries[i].status = METADONE; |
ad37861e | 608 | break; |
2adfa429 | 609 | } |
ad37861e SK |
610 | } |
611 | ||
612 | if (i == NUM_MAX_REQUEST_MGR_ENTRY){ | |
613 | ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); | |
9dd63e1f | 614 | return; |
13d8c7b4 | 615 | } |
9dd63e1f | 616 | |
ad37861e | 617 | request_manager_entry * newEntry = &(entries[i]); |
b56dcc00 | 618 | request_shot = &(newEntry->internal_shot); |
9dd63e1f | 619 | |
b56dcc00 | 620 | timeStamp = request_shot->shot.dm.sensor.timeStamp; |
ad37861e | 621 | memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm)); |
b56dcc00 | 622 | request_shot->shot.dm.sensor.timeStamp = timeStamp; |
5506cebf | 623 | m_lastTimeStamp = timeStamp; |
ad37861e | 624 | CheckCompleted(i); |
13d8c7b4 SK |
625 | } |
626 | ||
53f62ad9 | 627 | void RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info) |
13d8c7b4 | 628 | { |
9dd63e1f | 629 | int index, targetStreamIndex; |
b56dcc00 | 630 | struct camera2_shot_ext * request_shot; |
9dd63e1f SK |
631 | |
632 | ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt); | |
633 | if (frameCnt < 0) | |
13d8c7b4 | 634 | return; |
9dd63e1f SK |
635 | |
636 | index = FindEntryIndexByFrameCnt(frameCnt); | |
637 | if (index == -1) { | |
638 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
639 | return; | |
640 | } | |
641 | ||
13d8c7b4 | 642 | request_manager_entry * newEntry = &(entries[index]); |
ad37861e | 643 | request_shot = &(newEntry->internal_shot); |
2bdec060 | 644 | memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl)); |
2adfa429 | 645 | shot_ext->shot.ctl.request.frameCount = frameCnt; |
13d8c7b4 | 646 | shot_ext->request_sensor = 1; |
ad37861e SK |
647 | shot_ext->dis_bypass = 1; |
648 | shot_ext->dnr_bypass = 1; | |
649 | shot_ext->fd_bypass = 1; | |
650 | shot_ext->setfile = 0; | |
651 | ||
13d8c7b4 SK |
652 | shot_ext->request_scc = 0; |
653 | shot_ext->request_scp = 0; | |
ad37861e | 654 | |
5506cebf SK |
655 | shot_ext->isReprocessing = request_shot->isReprocessing; |
656 | shot_ext->reprocessInput = request_shot->reprocessInput; | |
9dd63e1f | 657 | shot_ext->shot.ctl.request.outputStreams[0] = 0; |
9dd63e1f | 658 | |
e4657e32 SK |
659 | shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0]; |
660 | shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1]; | |
661 | shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2]; | |
662 | ||
53f62ad9 YJ |
663 | // mapping flash UI mode from aeMode |
664 | if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) { | |
4a9565ae | 665 | if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW) |
73f5ad60 | 666 | ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode; |
53f62ad9 YJ |
667 | request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON; |
668 | } | |
669 | // mapping awb UI mode form awbMode | |
4a9565ae YJ |
670 | if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW) |
671 | ctl_info->awb.i_awbMode = request_shot->shot.ctl.aa.awbMode; | |
53f62ad9 YJ |
672 | |
673 | // Apply ae/awb lock or unlock | |
e117f756 YJ |
674 | if (request_shot->ae_lock == AEMODE_LOCK_ON) |
675 | request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED; | |
676 | if (request_shot->awb_lock == AWBMODE_LOCK_ON) | |
677 | request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED; | |
678 | ||
2bdec060 SK |
679 | if (m_lastAaMode == request_shot->shot.ctl.aa.mode) { |
680 | shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0); | |
681 | } | |
682 | else { | |
683 | shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode; | |
684 | m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode); | |
685 | } | |
686 | if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) { | |
687 | shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0); | |
688 | } | |
689 | else { | |
690 | shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode; | |
691 | m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode); | |
692 | } | |
693 | if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) { | |
694 | shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0); | |
695 | } | |
696 | else { | |
697 | shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode; | |
698 | m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode); | |
699 | } | |
700 | if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) { | |
701 | shot_ext->shot.ctl.aa.aeExpCompensation = 0; | |
702 | } | |
703 | else { | |
704 | shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation; | |
705 | m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation); | |
706 | } | |
ef6f83ca | 707 | |
5c88d1f2 C |
708 | if (request_shot->shot.ctl.aa.videoStabilizationMode) { |
709 | m_vdisBubbleEn = true; | |
710 | shot_ext->dis_bypass = 0; | |
711 | } else { | |
712 | m_vdisBubbleEn = false; | |
713 | shot_ext->dis_bypass = 1; | |
714 | } | |
5c88d1f2 | 715 | |
ef6f83ca SK |
716 | shot_ext->shot.ctl.aa.afTrigger = 0; |
717 | ||
5506cebf SK |
718 | targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0]; |
719 | shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex; | |
720 | if (targetStreamIndex & MASK_OUTPUT_SCP) | |
721 | shot_ext->request_scp = 1; | |
13d8c7b4 | 722 | |
5506cebf SK |
723 | if (targetStreamIndex & MASK_OUTPUT_SCC) |
724 | shot_ext->request_scc = 1; | |
725 | ||
726 | if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF) | |
727 | shot_ext->fd_bypass = 0; | |
728 | ||
729 | if (targetStreamIndex & STREAM_MASK_RECORD) { | |
730 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30; | |
731 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
13d8c7b4 | 732 | } |
5506cebf SK |
733 | |
734 | ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__, | |
735 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
736 | (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode), | |
737 | (int)(shot_ext->shot.ctl.aa.afMode)); | |
13d8c7b4 SK |
738 | } |
739 | ||
5c88d1f2 C |
740 | bool RequestManager::IsVdisEnable(void) |
741 | { | |
742 | return m_vdisBubbleEn; | |
743 | } | |
5c88d1f2 | 744 | |
9dd63e1f SK |
745 | int RequestManager::FindEntryIndexByFrameCnt(int frameCnt) |
746 | { | |
747 | for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { | |
be494d19 | 748 | if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt) |
9dd63e1f SK |
749 | return i; |
750 | } | |
751 | return -1; | |
752 | } | |
753 | ||
754 | void RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime) | |
13d8c7b4 | 755 | { |
9dd63e1f SK |
756 | int index = FindEntryIndexByFrameCnt(frameCnt); |
757 | if (index == -1) { | |
758 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
759 | return; | |
760 | } | |
761 | ||
13d8c7b4 | 762 | request_manager_entry * currentEntry = &(entries[index]); |
a8be0011 SK |
763 | if (currentEntry->internal_shot.isReprocessing == 1) { |
764 | ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__, | |
be494d19 | 765 | index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp); |
a8be0011 SK |
766 | } else { |
767 | currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime); | |
768 | ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__, | |
769 | index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp); | |
770 | } | |
13d8c7b4 SK |
771 | } |
772 | ||
5506cebf SK |
773 | |
774 | nsecs_t RequestManager::GetTimestampByFrameCnt(int frameCnt) | |
775 | { | |
776 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
777 | if (index == -1) { | |
778 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp); | |
779 | return m_lastTimeStamp; | |
780 | } | |
781 | else | |
782 | return GetTimestamp(index); | |
783 | } | |
784 | ||
785 | nsecs_t RequestManager::GetTimestamp(int index) | |
13d8c7b4 | 786 | { |
5f643a75 | 787 | Mutex::Autolock lock(m_requestMutex); |
eed7ed1b SK |
788 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { |
789 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
9dd63e1f SK |
790 | return 0; |
791 | } | |
792 | ||
13d8c7b4 | 793 | request_manager_entry * currentEntry = &(entries[index]); |
5f643a75 | 794 | nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp; |
5506cebf SK |
795 | if (frameTime == 0) { |
796 | ALOGV("DEBUG(%s): timestamp null, returning saved value", __FUNCTION__); | |
797 | frameTime = m_lastTimeStamp; | |
798 | } | |
9dd63e1f | 799 | ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime); |
13d8c7b4 SK |
800 | return frameTime; |
801 | } | |
802 | ||
2f4d175d SK |
803 | uint8_t RequestManager::GetOutputStreamByFrameCnt(int frameCnt) |
804 | { | |
805 | int index = FindEntryIndexByFrameCnt(frameCnt); | |
806 | if (index == -1) { | |
807 | ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt); | |
808 | return 0; | |
809 | } | |
810 | else | |
811 | return GetOutputStream(index); | |
812 | } | |
813 | ||
814 | uint8_t RequestManager::GetOutputStream(int index) | |
815 | { | |
816 | Mutex::Autolock lock(m_requestMutex); | |
817 | if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) { | |
818 | ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index); | |
819 | return 0; | |
820 | } | |
821 | ||
822 | request_manager_entry * currentEntry = &(entries[index]); | |
823 | return currentEntry->internal_shot.shot.ctl.request.outputStreams[0]; | |
824 | } | |
825 | ||
9dd63e1f SK |
826 | int RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext) |
827 | { | |
041f38de | 828 | Mutex::Autolock lock(m_requestMutex); |
ad37861e SK |
829 | int i; |
830 | ||
be494d19 | 831 | if (m_numOfEntries == 0) { |
5c88d1f2 | 832 | CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__); |
be494d19 SK |
833 | return -1; |
834 | } | |
ad37861e | 835 | |
be494d19 | 836 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { |
ad37861e | 837 | if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount) |
be494d19 | 838 | continue; |
ad37861e SK |
839 | |
840 | if (entries[i].status == REQUESTED) { | |
841 | entries[i].status = CAPTURED; | |
842 | return entries[i].internal_shot.shot.ctl.request.frameCount; | |
be494d19 | 843 | } |
5c88d1f2 | 844 | CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status); |
ad37861e | 845 | |
be494d19 | 846 | } |
5c88d1f2 | 847 | CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount); |
ad37861e | 848 | |
be494d19 | 849 | return -1; |
9dd63e1f | 850 | } |
13d8c7b4 | 851 | |
b5237e6b SK |
852 | void RequestManager::SetInitialSkip(int count) |
853 | { | |
854 | ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt); | |
855 | if (count > m_sensorPipelineSkipCnt) | |
856 | m_sensorPipelineSkipCnt = count; | |
857 | } | |
858 | ||
ad37861e SK |
859 | int RequestManager::GetSkipCnt() |
860 | { | |
861 | ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt); | |
862 | if (m_sensorPipelineSkipCnt == 0) | |
863 | return m_sensorPipelineSkipCnt; | |
864 | else | |
865 | return --m_sensorPipelineSkipCnt; | |
866 | } | |
867 | ||
13d8c7b4 SK |
868 | void RequestManager::Dump(void) |
869 | { | |
13d8c7b4 SK |
870 | int i = 0; |
871 | request_manager_entry * currentEntry; | |
ad37861e | 872 | ALOGD("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)", |
13d8c7b4 SK |
873 | m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex); |
874 | ||
875 | for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) { | |
876 | currentEntry = &(entries[i]); | |
5506cebf | 877 | ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i, |
be494d19 | 878 | currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount, |
ad37861e | 879 | currentEntry->output_stream_count, |
5506cebf | 880 | currentEntry->internal_shot.shot.ctl.request.outputStreams[0]); |
13d8c7b4 SK |
881 | } |
882 | } | |
c15a6b00 | 883 | |
9dd63e1f SK |
884 | int RequestManager::GetNextIndex(int index) |
885 | { | |
886 | index++; | |
887 | if (index >= NUM_MAX_REQUEST_MGR_ENTRY) | |
888 | index = 0; | |
889 | ||
890 | return index; | |
891 | } | |
892 | ||
f9a06609 SK |
893 | int RequestManager::GetPrevIndex(int index) |
894 | { | |
895 | index--; | |
896 | if (index < 0) | |
897 | index = NUM_MAX_REQUEST_MGR_ENTRY-1; | |
898 | ||
899 | return index; | |
900 | } | |
901 | ||
6044e509 | 902 | ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid): |
c15a6b00 JS |
903 | m_requestQueueOps(NULL), |
904 | m_frameQueueOps(NULL), | |
905 | m_callbackCookie(NULL), | |
906 | m_numOfRemainingReqInSvc(0), | |
907 | m_isRequestQueuePending(false), | |
13d8c7b4 | 908 | m_isRequestQueueNull(true), |
ad37861e | 909 | m_isIspStarted(false), |
13d8c7b4 | 910 | m_ionCameraClient(0), |
308291de | 911 | m_zoomRatio(1), |
9dd63e1f SK |
912 | m_scp_closing(false), |
913 | m_scp_closed(false), | |
0f26b20f SK |
914 | m_afState(HAL_AFSTATE_INACTIVE), |
915 | m_afMode(NO_CHANGE), | |
916 | m_afMode2(NO_CHANGE), | |
5c88d1f2 C |
917 | m_vdisBubbleCnt(0), |
918 | m_vdisDupFrame(0), | |
0f26b20f SK |
919 | m_IsAfModeUpdateRequired(false), |
920 | m_IsAfTriggerRequired(false), | |
921 | m_IsAfLockRequired(false), | |
90e439c1 | 922 | m_sccLocalBufferValid(false), |
15fd8231 | 923 | m_wideAspect(false), |
b55ed664 SK |
924 | m_scpOutputSignalCnt(0), |
925 | m_scpOutputImageCnt(0), | |
0f26b20f | 926 | m_afTriggerId(0), |
8e2c2fdb SK |
927 | m_afPendingTriggerId(0), |
928 | m_afModeWaitingCnt(0), | |
f9a06609 | 929 | m_scpForceSuspended(false), |
9dd63e1f | 930 | m_halDevice(dev), |
a15b4e3f | 931 | m_nightCaptureCnt(0), |
2f4d175d | 932 | m_nightCaptureFrameCnt(0), |
2d5e6ec2 SK |
933 | m_cameraId(cameraId), |
934 | m_thumbNailW(160), | |
935 | m_thumbNailH(120) | |
13d8c7b4 | 936 | { |
ed4ad5fe | 937 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 938 | int ret = 0; |
6044e509 | 939 | int res = 0; |
c15a6b00 | 940 | |
13d8c7b4 | 941 | m_exynosPictureCSC = NULL; |
9dd63e1f | 942 | m_exynosVideoCSC = NULL; |
13d8c7b4 | 943 | |
c15a6b00 JS |
944 | if (!m_grallocHal) { |
945 | ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal); | |
946 | if (ret) | |
13d8c7b4 SK |
947 | ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__); |
948 | } | |
c15a6b00 | 949 | |
daa1fcd6 | 950 | m_camera2 = camera; |
c15a6b00 JS |
951 | m_ionCameraClient = createIonClient(m_ionCameraClient); |
952 | if(m_ionCameraClient == 0) | |
13d8c7b4 | 953 | ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__); |
c15a6b00 | 954 | |
9dd63e1f SK |
955 | |
956 | m_BayerManager = new BayerBufManager(); | |
c15a6b00 | 957 | m_mainThread = new MainThread(this); |
52f54308 | 958 | m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get())); |
6044e509 SK |
959 | *openInvalid = InitializeISPChain(); |
960 | if (*openInvalid < 0) { | |
ed4ad5fe | 961 | ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__); |
6044e509 SK |
962 | // clean process |
963 | // 1. close video nodes | |
964 | // SCP | |
5506cebf | 965 | res = exynos_v4l2_close(m_camera_info.scp.fd); |
6044e509 SK |
966 | if (res != NO_ERROR ) { |
967 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
968 | } | |
969 | // SCC | |
970 | res = exynos_v4l2_close(m_camera_info.capture.fd); | |
971 | if (res != NO_ERROR ) { | |
972 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
973 | } | |
974 | // Sensor | |
975 | res = exynos_v4l2_close(m_camera_info.sensor.fd); | |
976 | if (res != NO_ERROR ) { | |
977 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
978 | } | |
979 | // ISP | |
980 | res = exynos_v4l2_close(m_camera_info.isp.fd); | |
981 | if (res != NO_ERROR ) { | |
982 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); | |
983 | } | |
984 | } else { | |
985 | m_sensorThread = new SensorThread(this); | |
986 | m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0); | |
053d38cf | 987 | m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0); |
ed4ad5fe | 988 | ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__); |
52f54308 | 989 | |
5506cebf SK |
990 | for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++) |
991 | m_subStreams[i].type = SUBSTREAM_TYPE_NONE; | |
6044e509 SK |
992 | CSC_METHOD cscMethod = CSC_METHOD_HW; |
993 | m_exynosPictureCSC = csc_init(cscMethod); | |
994 | if (m_exynosPictureCSC == NULL) | |
995 | ALOGE("ERR(%s): csc_init() fail", __FUNCTION__); | |
996 | csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM); | |
15fd8231 | 997 | |
6044e509 SK |
998 | m_exynosVideoCSC = csc_init(cscMethod); |
999 | if (m_exynosVideoCSC == NULL) | |
1000 | ALOGE("ERR(%s): csc_init() fail", __FUNCTION__); | |
1001 | csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM); | |
9dd63e1f | 1002 | |
6044e509 | 1003 | m_setExifFixedAttribute(); |
9a710a45 YJ |
1004 | |
1005 | // contol information clear | |
1006 | // flash | |
1007 | m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON; | |
1008 | m_ctlInfo.flash.m_afFlashDoneFlg= false; | |
9a710a45 | 1009 | m_ctlInfo.flash.m_flashEnableFlg = false; |
9a710a45 YJ |
1010 | m_ctlInfo.flash.m_flashFrameCount = 0; |
1011 | m_ctlInfo.flash.m_flashCnt = 0; | |
1012 | m_ctlInfo.flash.m_flashTimeOut = 0; | |
caea49e6 YJ |
1013 | m_ctlInfo.flash.m_flashDecisionResult = false; |
1014 | m_ctlInfo.flash.m_flashTorchMode = false; | |
e117f756 YJ |
1015 | m_ctlInfo.flash.m_precaptureState = 0; |
1016 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
e117f756 YJ |
1017 | // awb |
1018 | m_ctlInfo.awb.i_awbMode = AA_AWBMODE_OFF; | |
73f5ad60 YJ |
1019 | // ae |
1020 | m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE; | |
4a9565ae YJ |
1021 | // af |
1022 | m_ctlInfo.af.m_afTriggerTimeOut = 0; | |
275c9744 YJ |
1023 | // scene |
1024 | m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX; | |
6044e509 | 1025 | } |
ed4ad5fe | 1026 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
1027 | } |
1028 | ||
1029 | ExynosCameraHWInterface2::~ExynosCameraHWInterface2() | |
1030 | { | |
ed4ad5fe | 1031 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 1032 | this->release(); |
ed4ad5fe | 1033 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
1034 | } |
1035 | ||
1036 | void ExynosCameraHWInterface2::release() | |
1037 | { | |
13d8c7b4 | 1038 | int i, res; |
ed4ad5fe | 1039 | ALOGD("(HAL2::release): ENTER"); |
9dd63e1f | 1040 | |
ad37861e SK |
1041 | if (m_streamThreads[1] != NULL) { |
1042 | m_streamThreads[1]->release(); | |
1043 | m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE); | |
1044 | } | |
1045 | ||
1046 | if (m_streamThreads[0] != NULL) { | |
1047 | m_streamThreads[0]->release(); | |
1048 | m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE); | |
1049 | } | |
1050 | ||
13d8c7b4 SK |
1051 | if (m_sensorThread != NULL) { |
1052 | m_sensorThread->release(); | |
13d8c7b4 | 1053 | } |
c15a6b00 JS |
1054 | |
1055 | if (m_mainThread != NULL) { | |
13d8c7b4 | 1056 | m_mainThread->release(); |
13d8c7b4 SK |
1057 | } |
1058 | ||
13d8c7b4 SK |
1059 | if (m_exynosPictureCSC) |
1060 | csc_deinit(m_exynosPictureCSC); | |
1061 | m_exynosPictureCSC = NULL; | |
1062 | ||
9dd63e1f SK |
1063 | if (m_exynosVideoCSC) |
1064 | csc_deinit(m_exynosVideoCSC); | |
1065 | m_exynosVideoCSC = NULL; | |
1066 | ||
ad37861e | 1067 | if (m_streamThreads[1] != NULL) { |
0eb27a9d | 1068 | ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination"); |
ad37861e | 1069 | while (!m_streamThreads[1]->IsTerminated()) |
041f38de | 1070 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1071 | ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 1 termination"); |
ad37861e SK |
1072 | m_streamThreads[1] = NULL; |
1073 | } | |
1074 | ||
1075 | if (m_streamThreads[0] != NULL) { | |
0eb27a9d | 1076 | ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination"); |
ad37861e | 1077 | while (!m_streamThreads[0]->IsTerminated()) |
041f38de | 1078 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1079 | ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 0 termination"); |
ad37861e SK |
1080 | m_streamThreads[0] = NULL; |
1081 | } | |
1082 | ||
9dd63e1f | 1083 | if (m_sensorThread != NULL) { |
0eb27a9d | 1084 | ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination"); |
9dd63e1f | 1085 | while (!m_sensorThread->IsTerminated()) |
041f38de | 1086 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1087 | ALOGD("(HAL2::release): END Waiting for (indirect) sensor thread termination"); |
9dd63e1f SK |
1088 | m_sensorThread = NULL; |
1089 | } | |
1090 | ||
ad37861e | 1091 | if (m_mainThread != NULL) { |
0eb27a9d | 1092 | ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination"); |
9dd63e1f | 1093 | while (!m_mainThread->IsTerminated()) |
041f38de | 1094 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 1095 | ALOGD("(HAL2::release): END Waiting for (indirect) main thread termination"); |
9dd63e1f SK |
1096 | m_mainThread = NULL; |
1097 | } | |
1098 | ||
15fd8231 SK |
1099 | if (m_requestManager != NULL) { |
1100 | delete m_requestManager; | |
1101 | m_requestManager = NULL; | |
1102 | } | |
1103 | ||
1104 | if (m_BayerManager != NULL) { | |
1105 | delete m_BayerManager; | |
1106 | m_BayerManager = NULL; | |
1107 | } | |
15fd8231 | 1108 | for (i = 0; i < NUM_BAYER_BUFFERS; i++) |
c15a6b00 JS |
1109 | freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes); |
1110 | ||
90e439c1 SK |
1111 | if (m_sccLocalBufferValid) { |
1112 | for (i = 0; i < NUM_SCC_BUFFERS; i++) | |
1113 | #ifdef ENABLE_FRAME_SYNC | |
1114 | freeCameraMemory(&m_sccLocalBuffer[i], 2); | |
1115 | #else | |
1116 | freeCameraMemory(&m_sccLocalBuffer[i], 1); | |
1117 | #endif | |
1118 | } | |
1119 | else { | |
1120 | for (i = 0; i < NUM_SCC_BUFFERS; i++) | |
1121 | freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes); | |
1122 | } | |
c15a6b00 | 1123 | |
9dd63e1f | 1124 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__); |
13d8c7b4 SK |
1125 | res = exynos_v4l2_close(m_camera_info.sensor.fd); |
1126 | if (res != NO_ERROR ) { | |
9dd63e1f | 1127 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1128 | } |
1129 | ||
9dd63e1f | 1130 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__); |
13d8c7b4 SK |
1131 | res = exynos_v4l2_close(m_camera_info.isp.fd); |
1132 | if (res != NO_ERROR ) { | |
9dd63e1f | 1133 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1134 | } |
1135 | ||
9dd63e1f | 1136 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__); |
13d8c7b4 SK |
1137 | res = exynos_v4l2_close(m_camera_info.capture.fd); |
1138 | if (res != NO_ERROR ) { | |
9dd63e1f | 1139 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 SK |
1140 | } |
1141 | ||
9dd63e1f | 1142 | ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__); |
5506cebf | 1143 | res = exynos_v4l2_close(m_camera_info.scp.fd); |
13d8c7b4 | 1144 | if (res != NO_ERROR ) { |
9dd63e1f | 1145 | ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res); |
13d8c7b4 | 1146 | } |
9dd63e1f | 1147 | ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__); |
c15a6b00 | 1148 | deleteIonClient(m_ionCameraClient); |
ad37861e | 1149 | |
ed4ad5fe | 1150 | ALOGD("(HAL2::release): EXIT"); |
ad37861e SK |
1151 | } |
1152 | ||
6044e509 | 1153 | int ExynosCameraHWInterface2::InitializeISPChain() |
ad37861e SK |
1154 | { |
1155 | char node_name[30]; | |
1156 | int fd = 0; | |
1157 | int i; | |
6044e509 | 1158 | int ret = 0; |
ad37861e SK |
1159 | |
1160 | /* Open Sensor */ | |
1161 | memset(&node_name, 0x00, sizeof(char[30])); | |
1162 | sprintf(node_name, "%s%d", NODE_PREFIX, 40); | |
1163 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1164 | ||
1165 | if (fd < 0) { | |
1166 | ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1167 | } | |
1168 | else { | |
1169 | ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1170 | } | |
1171 | m_camera_info.sensor.fd = fd; | |
1172 | ||
1173 | /* Open ISP */ | |
1174 | memset(&node_name, 0x00, sizeof(char[30])); | |
1175 | sprintf(node_name, "%s%d", NODE_PREFIX, 41); | |
1176 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1177 | ||
1178 | if (fd < 0) { | |
1179 | ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1180 | } | |
1181 | else { | |
1182 | ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1183 | } | |
1184 | m_camera_info.isp.fd = fd; | |
1185 | ||
1186 | /* Open ScalerC */ | |
1187 | memset(&node_name, 0x00, sizeof(char[30])); | |
1188 | sprintf(node_name, "%s%d", NODE_PREFIX, 42); | |
1189 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1190 | ||
1191 | if (fd < 0) { | |
1192 | ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1193 | } | |
1194 | else { | |
1195 | ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1196 | } | |
1197 | m_camera_info.capture.fd = fd; | |
1198 | ||
1199 | /* Open ScalerP */ | |
1200 | memset(&node_name, 0x00, sizeof(char[30])); | |
1201 | sprintf(node_name, "%s%d", NODE_PREFIX, 44); | |
1202 | fd = exynos_v4l2_open(node_name, O_RDWR, 0); | |
1203 | if (fd < 0) { | |
1204 | ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1205 | } | |
1206 | else { | |
1207 | ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd); | |
1208 | } | |
5506cebf | 1209 | m_camera_info.scp.fd = fd; |
ad37861e SK |
1210 | |
1211 | if(m_cameraId == 0) | |
1212 | m_camera_info.sensor_id = SENSOR_NAME_S5K4E5; | |
1213 | else | |
1214 | m_camera_info.sensor_id = SENSOR_NAME_S5K6A3; | |
1215 | ||
1216 | memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext)); | |
1217 | m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL; | |
1218 | m_camera_info.dummy_shot.shot.magicNumber = 0x23456789; | |
1219 | ||
1220 | m_camera_info.dummy_shot.dis_bypass = 1; | |
1221 | m_camera_info.dummy_shot.dnr_bypass = 1; | |
1222 | m_camera_info.dummy_shot.fd_bypass = 1; | |
1223 | ||
1224 | /*sensor setting*/ | |
1225 | m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0; | |
1226 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0; | |
1227 | m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0; | |
1228 | ||
1229 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0; | |
1230 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0; | |
1231 | ||
1232 | /*request setting*/ | |
1233 | m_camera_info.dummy_shot.request_sensor = 1; | |
1234 | m_camera_info.dummy_shot.request_scc = 0; | |
1235 | m_camera_info.dummy_shot.request_scp = 0; | |
1236 | m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0; | |
ad37861e SK |
1237 | |
1238 | m_camera_info.sensor.width = m_camera2->getSensorRawW(); | |
1239 | m_camera_info.sensor.height = m_camera2->getSensorRawH(); | |
1240 | ||
1241 | m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16; | |
1242 | m_camera_info.sensor.planes = 2; | |
1243 | m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS; | |
1244 | m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1245 | m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF; | |
ad37861e SK |
1246 | |
1247 | for(i = 0; i < m_camera_info.sensor.buffers; i++){ | |
1248 | initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes); | |
1249 | m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2; | |
1250 | m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value | |
5506cebf | 1251 | allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1); |
ad37861e SK |
1252 | } |
1253 | ||
1254 | m_camera_info.isp.width = m_camera_info.sensor.width; | |
1255 | m_camera_info.isp.height = m_camera_info.sensor.height; | |
1256 | m_camera_info.isp.format = m_camera_info.sensor.format; | |
1257 | m_camera_info.isp.planes = m_camera_info.sensor.planes; | |
1258 | m_camera_info.isp.buffers = m_camera_info.sensor.buffers; | |
1259 | m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE; | |
1260 | m_camera_info.isp.memory = V4L2_MEMORY_DMABUF; | |
ad37861e SK |
1261 | |
1262 | for(i = 0; i < m_camera_info.isp.buffers; i++){ | |
1263 | initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes); | |
1264 | m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0]; | |
1265 | m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1]; | |
1266 | m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0]; | |
1267 | m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1]; | |
1268 | m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0]; | |
1269 | m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1]; | |
1270 | }; | |
1271 | ||
1272 | /* init ISP */ | |
6044e509 SK |
1273 | ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id); |
1274 | if (ret < 0) { | |
1275 | ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ", __FUNCTION__, m_camera_info.sensor_id); | |
1276 | return false; | |
1277 | } | |
ad37861e SK |
1278 | cam_int_s_fmt(&(m_camera_info.isp)); |
1279 | ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__); | |
1280 | cam_int_reqbufs(&(m_camera_info.isp)); | |
1281 | ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__); | |
1282 | ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__); | |
1283 | ||
1284 | /* init Sensor */ | |
1285 | cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id); | |
1286 | ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__); | |
1287 | if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) { | |
1288 | ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__); | |
1289 | } | |
1290 | ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__); | |
1291 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
1292 | ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__); | |
1293 | for (i = 0; i < m_camera_info.sensor.buffers; i++) { | |
1294 | ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i); | |
ad37861e SK |
1295 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1 |
1296 | m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1; | |
52f54308 SK |
1297 | memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot), |
1298 | sizeof(struct camera2_shot_ext)); | |
ad37861e | 1299 | } |
52f54308 SK |
1300 | |
1301 | for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++) | |
1302 | cam_int_qbuf(&(m_camera_info.sensor), i); | |
1303 | ||
1304 | for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++) | |
1305 | m_requestManager->pushSensorQ(i); | |
1306 | ||
5506cebf | 1307 | ALOGV("== stream_on :: sensor"); |
ad37861e | 1308 | cam_int_streamon(&(m_camera_info.sensor)); |
5506cebf | 1309 | m_camera_info.sensor.status = true; |
ad37861e SK |
1310 | |
1311 | /* init Capture */ | |
1312 | m_camera_info.capture.width = m_camera2->getSensorW(); | |
1313 | m_camera_info.capture.height = m_camera2->getSensorH(); | |
1314 | m_camera_info.capture.format = V4L2_PIX_FMT_YUYV; | |
feb7df4c SK |
1315 | #ifdef ENABLE_FRAME_SYNC |
1316 | m_camera_info.capture.planes = 2; | |
1317 | #else | |
ad37861e | 1318 | m_camera_info.capture.planes = 1; |
feb7df4c | 1319 | #endif |
ac8c2060 | 1320 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
ad37861e SK |
1321 | m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; |
1322 | m_camera_info.capture.memory = V4L2_MEMORY_DMABUF; | |
ad37861e | 1323 | |
5506cebf SK |
1324 | m_camera_info.capture.status = false; |
1325 | ||
1326 | return true; | |
1327 | } | |
1328 | ||
1329 | void ExynosCameraHWInterface2::StartSCCThread(bool threadExists) | |
1330 | { | |
1331 | ALOGV("(%s)", __FUNCTION__); | |
1332 | StreamThread *AllocatedStream; | |
1333 | stream_parameters_t newParameters; | |
1334 | uint32_t format_actual; | |
5506cebf SK |
1335 | |
1336 | ||
1337 | if (!threadExists) { | |
1338 | m_streamThreads[1] = new StreamThread(this, 1); | |
1339 | } | |
1340 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); | |
b8d41ae2 | 1341 | if (!threadExists) { |
053d38cf | 1342 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf | 1343 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
b8d41ae2 SK |
1344 | AllocatedStream->m_numRegisteredStream = 1; |
1345 | } | |
5506cebf SK |
1346 | AllocatedStream->m_index = 1; |
1347 | ||
1348 | format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1349 | ||
1350 | newParameters.width = m_camera2->getSensorW(); | |
1351 | newParameters.height = m_camera2->getSensorH(); | |
1352 | newParameters.format = format_actual; | |
1353 | newParameters.streamOps = NULL; | |
ac8c2060 | 1354 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; |
feb7df4c | 1355 | #ifdef ENABLE_FRAME_SYNC |
5506cebf | 1356 | newParameters.planes = 2; |
2adfa429 | 1357 | #else |
5506cebf | 1358 | newParameters.planes = 1; |
2adfa429 | 1359 | #endif |
ad37861e | 1360 | |
5506cebf SK |
1361 | newParameters.numSvcBufsInHal = 0; |
1362 | ||
1363 | newParameters.node = &m_camera_info.capture; | |
1364 | ||
1365 | AllocatedStream->streamType = STREAM_TYPE_INDIRECT; | |
5506cebf SK |
1366 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); |
1367 | ||
90e439c1 SK |
1368 | if (!threadExists) { |
1369 | if (!m_sccLocalBufferValid) { | |
1370 | for (int i = 0; i < m_camera_info.capture.buffers; i++){ | |
1371 | initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes); | |
1372 | m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2; | |
5506cebf | 1373 | #ifdef ENABLE_FRAME_SYNC |
90e439c1 SK |
1374 | m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value |
1375 | allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1); | |
5506cebf | 1376 | #else |
90e439c1 | 1377 | allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes); |
5506cebf | 1378 | #endif |
90e439c1 SK |
1379 | m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i]; |
1380 | } | |
1381 | m_sccLocalBufferValid = true; | |
1382 | } | |
1383 | } else { | |
1384 | if (m_sccLocalBufferValid) { | |
1385 | for (int i = 0; i < m_camera_info.capture.buffers; i++) | |
1386 | m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i]; | |
1387 | } else { | |
1388 | ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__); | |
5506cebf SK |
1389 | } |
1390 | } | |
1391 | cam_int_s_input(newParameters.node, m_camera_info.sensor_id); | |
ac8c2060 | 1392 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
5506cebf | 1393 | cam_int_s_fmt(newParameters.node); |
ad37861e | 1394 | ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__); |
5506cebf | 1395 | cam_int_reqbufs(newParameters.node); |
ad37861e SK |
1396 | ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__); |
1397 | ||
5506cebf | 1398 | for (int i = 0; i < newParameters.node->buffers; i++) { |
ad37861e | 1399 | ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i); |
5506cebf SK |
1400 | cam_int_qbuf(newParameters.node, i); |
1401 | newParameters.svcBufStatus[i] = ON_DRIVER; | |
ad37861e SK |
1402 | } |
1403 | ||
1404 | ALOGV("== stream_on :: capture"); | |
5506cebf | 1405 | if (cam_int_streamon(newParameters.node) < 0) { |
15fd8231 SK |
1406 | ALOGE("ERR(%s): capture stream on fail", __FUNCTION__); |
1407 | } else { | |
1408 | m_camera_info.capture.status = true; | |
1409 | } | |
6044e509 | 1410 | |
5506cebf SK |
1411 | AllocatedStream->setParameter(&newParameters); |
1412 | AllocatedStream->m_activated = true; | |
1413 | AllocatedStream->m_isBufferInit = true; | |
ad37861e SK |
1414 | } |
1415 | ||
1416 | void ExynosCameraHWInterface2::StartISP() | |
1417 | { | |
ad37861e SK |
1418 | ALOGV("== stream_on :: isp"); |
1419 | cam_int_streamon(&(m_camera_info.isp)); | |
ad37861e | 1420 | exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM); |
13d8c7b4 SK |
1421 | } |
1422 | ||
c15a6b00 JS |
1423 | int ExynosCameraHWInterface2::getCameraId() const |
1424 | { | |
9dd63e1f | 1425 | return m_cameraId; |
c15a6b00 | 1426 | } |
c15a6b00 JS |
1427 | |
1428 | int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops) | |
1429 | { | |
13d8c7b4 | 1430 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
1431 | if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request) |
1432 | && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) { | |
1433 | m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops; | |
1434 | return 0; | |
1435 | } | |
1436 | else { | |
13d8c7b4 | 1437 | ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__); |
c15a6b00 JS |
1438 | return 1; |
1439 | } | |
1440 | } | |
1441 | ||
1442 | int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty() | |
1443 | { | |
5506cebf SK |
1444 | int i = 0; |
1445 | ||
b5237e6b | 1446 | ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries()); |
c15a6b00 | 1447 | if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) { |
13d8c7b4 | 1448 | ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__); |
c15a6b00 JS |
1449 | return 0; |
1450 | } | |
13d8c7b4 | 1451 | m_isRequestQueueNull = false; |
0f26b20f SK |
1452 | if (m_requestManager->GetNumEntries() == 0) |
1453 | m_requestManager->SetInitialSkip(5); | |
5506cebf SK |
1454 | |
1455 | if (m_isIspStarted == false) { | |
1456 | /* isp */ | |
1457 | m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS; | |
1458 | m_camera_info.isp.buffers = m_camera_info.sensor.buffers; | |
1459 | cam_int_s_fmt(&(m_camera_info.isp)); | |
1460 | cam_int_reqbufs(&(m_camera_info.isp)); | |
1461 | ||
1462 | /* sensor */ | |
1463 | if (m_camera_info.sensor.status == false) { | |
1464 | cam_int_s_fmt(&(m_camera_info.sensor)); | |
1465 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
1466 | ||
1467 | for (i = 0; i < m_camera_info.sensor.buffers; i++) { | |
1468 | ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i); | |
1469 | m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1 | |
1470 | m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1; | |
1471 | memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot), | |
1472 | sizeof(struct camera2_shot_ext)); | |
1473 | } | |
1474 | for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++) | |
1475 | cam_int_qbuf(&(m_camera_info.sensor), i); | |
1476 | ||
1477 | for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++) | |
1478 | m_requestManager->pushSensorQ(i); | |
1479 | ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__); | |
1480 | cam_int_streamon(&(m_camera_info.sensor)); | |
1481 | m_camera_info.sensor.status = true; | |
1482 | } | |
1483 | } | |
1484 | if (!(m_streamThreads[1].get())) { | |
1485 | ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__); | |
1486 | StartSCCThread(false); | |
1487 | } else { | |
1488 | if (m_streamThreads[1]->m_activated == false) { | |
1489 | ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__); | |
1490 | StartSCCThread(true); | |
1491 | } else { | |
1492 | if (m_camera_info.capture.status == false) { | |
ac8c2060 | 1493 | m_camera_info.capture.buffers = NUM_SCC_BUFFERS; |
5506cebf SK |
1494 | cam_int_s_fmt(&(m_camera_info.capture)); |
1495 | ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__); | |
1496 | cam_int_reqbufs(&(m_camera_info.capture)); | |
1497 | ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__); | |
1498 | ||
b8d41ae2 SK |
1499 | if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) { |
1500 | StreamThread * targetStream = m_streamThreads[1].get(); | |
1501 | stream_parameters_t *targetStreamParms = &(targetStream->m_parameters); | |
1502 | node_info_t *currentNode = targetStreamParms->node; | |
1503 | ||
1504 | struct v4l2_buffer v4l2_buf; | |
1505 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
1506 | ||
1507 | for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) { | |
1508 | v4l2_buf.m.planes = planes; | |
1509 | v4l2_buf.type = currentNode->type; | |
1510 | v4l2_buf.memory = currentNode->memory; | |
1511 | ||
1512 | v4l2_buf.length = currentNode->planes; | |
1513 | v4l2_buf.index = i; | |
1514 | ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i]; | |
1515 | ||
1516 | if (i < currentNode->buffers) { | |
1517 | #ifdef ENABLE_FRAME_SYNC | |
1518 | v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0]; | |
1519 | v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1]; | |
1520 | v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2]; | |
1521 | v4l2_buf.length += targetStreamParms->metaPlanes; | |
1522 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0]; | |
1523 | v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0]; | |
1524 | ||
1525 | ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length); | |
1526 | #endif | |
1527 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { | |
1528 | ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd); | |
1529 | } | |
1530 | ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd); | |
1531 | targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC; | |
1532 | } | |
1533 | else { | |
1534 | targetStreamParms->svcBufStatus[i] = ON_SERVICE; | |
1535 | } | |
1536 | ||
1537 | } | |
1538 | ||
1539 | } else { | |
1540 | for (int i = 0; i < m_camera_info.capture.buffers; i++) { | |
1541 | ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i); | |
1542 | cam_int_qbuf(&(m_camera_info.capture), i); | |
1543 | } | |
5506cebf SK |
1544 | } |
1545 | ALOGV("== stream_on :: capture"); | |
1546 | if (cam_int_streamon(&(m_camera_info.capture)) < 0) { | |
1547 | ALOGE("ERR(%s): capture stream on fail", __FUNCTION__); | |
1548 | } else { | |
1549 | m_camera_info.capture.status = true; | |
1550 | } | |
1551 | } | |
f9a06609 SK |
1552 | if (m_scpForceSuspended) { |
1553 | m_scpForceSuspended = false; | |
1554 | } | |
5506cebf SK |
1555 | } |
1556 | } | |
1557 | if (m_isIspStarted == false) { | |
1558 | StartISP(); | |
1559 | ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__); | |
1560 | m_requestManager->SetInitialSkip(5); | |
1561 | m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0); | |
1562 | m_isIspStarted = true; | |
1563 | } | |
c15a6b00 JS |
1564 | m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); |
1565 | return 0; | |
1566 | } | |
1567 | ||
1568 | int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops) | |
1569 | { | |
13d8c7b4 | 1570 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
1571 | if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame) |
1572 | && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) { | |
1573 | m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops; | |
1574 | return 0; | |
1575 | } | |
1576 | else { | |
13d8c7b4 | 1577 | ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__); |
c15a6b00 JS |
1578 | return 1; |
1579 | } | |
1580 | } | |
1581 | ||
1582 | int ExynosCameraHWInterface2::getInProgressCount() | |
1583 | { | |
1584 | int inProgressCount = m_requestManager->GetNumEntries(); | |
13d8c7b4 | 1585 | ALOGV("DEBUG(%s): # of dequeued req (%d)", __FUNCTION__, inProgressCount); |
c15a6b00 JS |
1586 | return inProgressCount; |
1587 | } | |
1588 | ||
1589 | int ExynosCameraHWInterface2::flushCapturesInProgress() | |
1590 | { | |
1591 | return 0; | |
1592 | } | |
1593 | ||
c15a6b00 JS |
1594 | int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request) |
1595 | { | |
13d8c7b4 | 1596 | ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template); |
c15a6b00 JS |
1597 | |
1598 | if (request == NULL) return BAD_VALUE; | |
1599 | if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) { | |
1600 | return BAD_VALUE; | |
1601 | } | |
1602 | status_t res; | |
1603 | // Pass 1, calculate size and allocate | |
daa1fcd6 | 1604 | res = m_camera2->constructDefaultRequest(request_template, |
c15a6b00 JS |
1605 | request, |
1606 | true); | |
1607 | if (res != OK) { | |
1608 | return res; | |
1609 | } | |
1610 | // Pass 2, build request | |
daa1fcd6 | 1611 | res = m_camera2->constructDefaultRequest(request_template, |
c15a6b00 JS |
1612 | request, |
1613 | false); | |
1614 | if (res != OK) { | |
1615 | ALOGE("Unable to populate new request for template %d", | |
1616 | request_template); | |
1617 | } | |
1618 | ||
1619 | return res; | |
1620 | } | |
1621 | ||
1622 | int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops, | |
1623 | uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers) | |
1624 | { | |
ed4ad5fe | 1625 | ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__, width, height, format); |
5506cebf | 1626 | bool useDirectOutput = false; |
13d8c7b4 SK |
1627 | StreamThread *AllocatedStream; |
1628 | stream_parameters_t newParameters; | |
5506cebf SK |
1629 | substream_parameters_t *subParameters; |
1630 | StreamThread *parentStream; | |
1631 | status_t res; | |
1632 | int allocCase = 0; | |
c15a6b00 | 1633 | |
5506cebf SK |
1634 | if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) && |
1635 | m_camera2->isSupportedResolution(width, height)) { | |
9dd63e1f SK |
1636 | if (!(m_streamThreads[0].get())) { |
1637 | ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__); | |
1638 | allocCase = 0; | |
13d8c7b4 SK |
1639 | } |
1640 | else { | |
6bbb593a | 1641 | if ((m_streamThreads[0].get())->m_activated == true) { |
9dd63e1f SK |
1642 | ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__); |
1643 | allocCase = 1; | |
1644 | } | |
1645 | else { | |
1646 | ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__); | |
1647 | allocCase = 2; | |
1648 | } | |
13d8c7b4 | 1649 | } |
5506cebf SK |
1650 | |
1651 | // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio. | |
1652 | if ((width == 1920 && height == 1080) || (width == 1280 && height == 720) | |
1653 | || (width == 720 && height == 480) || (width == 1440 && height == 960) | |
1654 | || (width == 1344 && height == 896)) { | |
15fd8231 | 1655 | m_wideAspect = true; |
5506cebf | 1656 | } else { |
15fd8231 SK |
1657 | m_wideAspect = false; |
1658 | } | |
1659 | ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect); | |
1660 | ||
9dd63e1f | 1661 | if (allocCase == 0 || allocCase == 2) { |
5506cebf | 1662 | *stream_id = STREAM_ID_PREVIEW; |
9dd63e1f | 1663 | |
5506cebf | 1664 | m_streamThreads[0] = new StreamThread(this, *stream_id); |
9dd63e1f | 1665 | |
5506cebf | 1666 | AllocatedStream = (StreamThread*)(m_streamThreads[0].get()); |
053d38cf | 1667 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf SK |
1668 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
1669 | ||
1670 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
1671 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
1672 | *max_buffers = 6; | |
1673 | ||
1674 | newParameters.width = width; | |
1675 | newParameters.height = height; | |
1676 | newParameters.format = *format_actual; | |
1677 | newParameters.streamOps = stream_ops; | |
1678 | newParameters.usage = *usage; | |
ac8c2060 | 1679 | newParameters.numHwBuffers = NUM_SCP_BUFFERS; |
5506cebf SK |
1680 | newParameters.numOwnSvcBuffers = *max_buffers; |
1681 | newParameters.planes = NUM_PLANES(*format_actual); | |
1682 | newParameters.metaPlanes = 1; | |
1683 | newParameters.numSvcBufsInHal = 0; | |
a85ec381 | 1684 | newParameters.minUndequedBuffer = 3; |
5506cebf SK |
1685 | |
1686 | newParameters.node = &m_camera_info.scp; | |
1687 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1688 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1689 | ||
1690 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1691 | AllocatedStream->m_index = 0; | |
9dd63e1f SK |
1692 | AllocatedStream->setParameter(&newParameters); |
1693 | AllocatedStream->m_activated = true; | |
5506cebf SK |
1694 | AllocatedStream->m_numRegisteredStream = 1; |
1695 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); | |
ad37861e SK |
1696 | m_requestManager->SetDefaultParameters(m_camera2->getSensorW()); |
1697 | m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW(); | |
5506cebf SK |
1698 | if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE) |
1699 | AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10); | |
1700 | if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE) | |
1701 | AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70); | |
9dd63e1f | 1702 | return 0; |
5506cebf SK |
1703 | } else if (allocCase == 1) { |
1704 | *stream_id = STREAM_ID_RECORD; | |
1705 | ||
1706 | subParameters = &m_subStreams[STREAM_ID_RECORD]; | |
1707 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
1708 | ||
9dd63e1f SK |
1709 | parentStream = (StreamThread*)(m_streamThreads[0].get()); |
1710 | if (!parentStream) { | |
1711 | return 1; | |
9dd63e1f | 1712 | } |
9dd63e1f | 1713 | |
804236a7 | 1714 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M |
6bbb593a | 1715 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; |
2630679a | 1716 | *max_buffers = 6; |
9dd63e1f | 1717 | |
5506cebf SK |
1718 | subParameters->type = SUBSTREAM_TYPE_RECORD; |
1719 | subParameters->width = width; | |
1720 | subParameters->height = height; | |
1721 | subParameters->format = *format_actual; | |
1722 | subParameters->svcPlanes = NUM_PLANES(*format_actual); | |
1723 | subParameters->streamOps = stream_ops; | |
1724 | subParameters->usage = *usage; | |
1725 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1726 | subParameters->numSvcBufsInHal = 0; | |
1727 | subParameters->needBufferInit = false; | |
1728 | subParameters->minUndequedBuffer = 2; | |
1729 | ||
1730 | res = parentStream->attachSubStream(STREAM_ID_RECORD, 20); | |
1731 | if (res != NO_ERROR) { | |
1732 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
1733 | return 1; | |
1734 | } | |
1735 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); | |
1736 | ALOGV("(%s): Enabling Record", __FUNCTION__); | |
9dd63e1f SK |
1737 | return 0; |
1738 | } | |
13d8c7b4 | 1739 | } |
b8d41ae2 | 1740 | else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL) |
5506cebf | 1741 | && (width == m_camera2->getSensorW()) && (height == m_camera2->getSensorH())) { |
13d8c7b4 | 1742 | |
5506cebf SK |
1743 | if (!(m_streamThreads[1].get())) { |
1744 | ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__); | |
1745 | useDirectOutput = true; | |
15fd8231 SK |
1746 | } |
1747 | else { | |
5506cebf | 1748 | ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__); |
90e439c1 | 1749 | useDirectOutput = false; |
5506cebf SK |
1750 | } |
1751 | if (useDirectOutput) { | |
1752 | *stream_id = STREAM_ID_ZSL; | |
1753 | ||
053d38cf | 1754 | m_streamThreads[1] = new StreamThread(this, *stream_id); |
5506cebf | 1755 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); |
053d38cf | 1756 | AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0); |
5506cebf SK |
1757 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); |
1758 | ||
1759 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
1760 | *max_buffers = 6; | |
1761 | ||
1762 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1763 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
1764 | *max_buffers = 6; | |
1765 | ||
1766 | newParameters.width = width; | |
1767 | newParameters.height = height; | |
1768 | newParameters.format = *format_actual; | |
1769 | newParameters.streamOps = stream_ops; | |
1770 | newParameters.usage = *usage; | |
ac8c2060 | 1771 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; |
5506cebf SK |
1772 | newParameters.numOwnSvcBuffers = *max_buffers; |
1773 | newParameters.planes = NUM_PLANES(*format_actual); | |
1774 | newParameters.metaPlanes = 1; | |
1775 | ||
1776 | newParameters.numSvcBufsInHal = 0; | |
a85ec381 | 1777 | newParameters.minUndequedBuffer = 2; |
5506cebf SK |
1778 | |
1779 | newParameters.node = &m_camera_info.capture; | |
1780 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1781 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1782 | ||
1783 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1784 | AllocatedStream->m_index = 1; | |
1785 | AllocatedStream->setParameter(&newParameters); | |
1786 | AllocatedStream->m_activated = true; | |
b8d41ae2 | 1787 | AllocatedStream->m_numRegisteredStream = 1; |
5506cebf SK |
1788 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); |
1789 | return 0; | |
90e439c1 SK |
1790 | } else { |
1791 | bool bJpegExists = false; | |
1792 | AllocatedStream = (StreamThread*)(m_streamThreads[1].get()); | |
1793 | subParameters = &m_subStreams[STREAM_ID_JPEG]; | |
1794 | if (subParameters->type == SUBSTREAM_TYPE_JPEG) { | |
1795 | ALOGD("(%s): jpeg stream exists", __FUNCTION__); | |
1796 | bJpegExists = true; | |
1797 | AllocatedStream->detachSubStream(STREAM_ID_JPEG); | |
1798 | } | |
1799 | AllocatedStream->m_releasing = true; | |
1800 | ALOGD("START stream thread 1 release %d", __LINE__); | |
1801 | do { | |
1802 | AllocatedStream->release(); | |
041f38de | 1803 | usleep(SIG_WAITING_TICK); |
90e439c1 SK |
1804 | } while (AllocatedStream->m_releasing); |
1805 | ALOGD("END stream thread 1 release %d", __LINE__); | |
1806 | ||
1807 | *stream_id = STREAM_ID_ZSL; | |
1808 | ||
1809 | m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream); | |
1810 | ||
1811 | *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
1812 | *max_buffers = 6; | |
1813 | ||
1814 | *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV | |
1815 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
1816 | *max_buffers = 6; | |
1817 | ||
1818 | newParameters.width = width; | |
1819 | newParameters.height = height; | |
1820 | newParameters.format = *format_actual; | |
1821 | newParameters.streamOps = stream_ops; | |
1822 | newParameters.usage = *usage; | |
1823 | newParameters.numHwBuffers = NUM_SCC_BUFFERS; | |
1824 | newParameters.numOwnSvcBuffers = *max_buffers; | |
1825 | newParameters.planes = NUM_PLANES(*format_actual); | |
1826 | newParameters.metaPlanes = 1; | |
1827 | ||
1828 | newParameters.numSvcBufsInHal = 0; | |
1829 | newParameters.minUndequedBuffer = 4; | |
1830 | ||
1831 | newParameters.node = &m_camera_info.capture; | |
1832 | newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE; | |
1833 | newParameters.node->memory = V4L2_MEMORY_DMABUF; | |
1834 | ||
1835 | AllocatedStream->streamType = STREAM_TYPE_DIRECT; | |
1836 | AllocatedStream->m_index = 1; | |
1837 | AllocatedStream->setParameter(&newParameters); | |
1838 | AllocatedStream->m_activated = true; | |
1839 | AllocatedStream->m_numRegisteredStream = 1; | |
1840 | if (bJpegExists) { | |
1841 | AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10); | |
1842 | } | |
1843 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream); | |
1844 | return 0; | |
1845 | ||
15fd8231 | 1846 | } |
5506cebf SK |
1847 | } |
1848 | else if (format == HAL_PIXEL_FORMAT_BLOB | |
1849 | && m_camera2->isSupportedJpegResolution(width, height)) { | |
1850 | *stream_id = STREAM_ID_JPEG; | |
15fd8231 | 1851 | |
5506cebf SK |
1852 | subParameters = &m_subStreams[*stream_id]; |
1853 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
9dd63e1f | 1854 | |
5506cebf SK |
1855 | if (!(m_streamThreads[1].get())) { |
1856 | ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__); | |
1857 | StartSCCThread(false); | |
1858 | } | |
1859 | else if (m_streamThreads[1]->m_activated == false) { | |
1860 | ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__); | |
1861 | StartSCCThread(true); | |
1862 | } | |
1863 | parentStream = (StreamThread*)(m_streamThreads[1].get()); | |
13d8c7b4 SK |
1864 | |
1865 | *format_actual = HAL_PIXEL_FORMAT_BLOB; | |
13d8c7b4 | 1866 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; |
37e122d5 | 1867 | *max_buffers = 4; |
13d8c7b4 | 1868 | |
5506cebf SK |
1869 | subParameters->type = SUBSTREAM_TYPE_JPEG; |
1870 | subParameters->width = width; | |
1871 | subParameters->height = height; | |
1872 | subParameters->format = *format_actual; | |
1873 | subParameters->svcPlanes = 1; | |
1874 | subParameters->streamOps = stream_ops; | |
1875 | subParameters->usage = *usage; | |
1876 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1877 | subParameters->numSvcBufsInHal = 0; | |
1878 | subParameters->needBufferInit = false; | |
1879 | subParameters->minUndequedBuffer = 2; | |
1880 | ||
1881 | res = parentStream->attachSubStream(STREAM_ID_JPEG, 10); | |
1882 | if (res != NO_ERROR) { | |
1883 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
1884 | return 1; | |
1885 | } | |
1886 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); | |
1887 | ALOGV("(%s): Enabling Jpeg", __FUNCTION__); | |
13d8c7b4 SK |
1888 | return 0; |
1889 | } | |
74d78ebe | 1890 | else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) { |
5506cebf SK |
1891 | *stream_id = STREAM_ID_PRVCB; |
1892 | ||
1893 | subParameters = &m_subStreams[STREAM_ID_PRVCB]; | |
1894 | memset(subParameters, 0, sizeof(substream_parameters_t)); | |
1895 | ||
74d78ebe SK |
1896 | parentStream = (StreamThread*)(m_streamThreads[0].get()); |
1897 | if (!parentStream) { | |
74d78ebe SK |
1898 | return 1; |
1899 | } | |
74d78ebe SK |
1900 | |
1901 | *format_actual = format; | |
1902 | *usage = GRALLOC_USAGE_SW_WRITE_OFTEN; | |
5506cebf SK |
1903 | *max_buffers = 6; |
1904 | ||
1905 | subParameters->type = SUBSTREAM_TYPE_PRVCB; | |
1906 | subParameters->width = width; | |
1907 | subParameters->height = height; | |
1908 | subParameters->format = *format_actual; | |
1909 | subParameters->svcPlanes = NUM_PLANES(*format_actual); | |
1910 | subParameters->streamOps = stream_ops; | |
1911 | subParameters->usage = *usage; | |
1912 | subParameters->numOwnSvcBuffers = *max_buffers; | |
1913 | subParameters->numSvcBufsInHal = 0; | |
1914 | subParameters->needBufferInit = false; | |
1915 | subParameters->minUndequedBuffer = 2; | |
1916 | ||
1917 | if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { | |
1918 | subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP; | |
1919 | subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP); | |
1920 | } | |
1921 | else { | |
1922 | subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12; | |
1923 | subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12); | |
1924 | } | |
74d78ebe | 1925 | |
5506cebf SK |
1926 | res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20); |
1927 | if (res != NO_ERROR) { | |
1928 | ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res); | |
1929 | return 1; | |
74d78ebe | 1930 | } |
5506cebf SK |
1931 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream); |
1932 | ALOGV("(%s): Enabling previewcb", __FUNCTION__); | |
74d78ebe SK |
1933 | return 0; |
1934 | } | |
ed4ad5fe | 1935 | ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__); |
5506cebf | 1936 | return 1; |
c15a6b00 JS |
1937 | } |
1938 | ||
13d8c7b4 SK |
1939 | int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id, |
1940 | int num_buffers, buffer_handle_t *registeringBuffers) | |
c15a6b00 | 1941 | { |
13d8c7b4 SK |
1942 | int i,j; |
1943 | void *virtAddr[3]; | |
5506cebf SK |
1944 | int plane_index = 0; |
1945 | StreamThread * targetStream; | |
13d8c7b4 SK |
1946 | stream_parameters_t *targetStreamParms; |
1947 | node_info_t *currentNode; | |
1948 | ||
c15a6b00 JS |
1949 | struct v4l2_buffer v4l2_buf; |
1950 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
13d8c7b4 | 1951 | |
ed4ad5fe | 1952 | ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__, |
13d8c7b4 SK |
1953 | stream_id, num_buffers, (uint32_t)registeringBuffers); |
1954 | ||
5506cebf SK |
1955 | if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) { |
1956 | targetStream = m_streamThreads[0].get(); | |
13d8c7b4 | 1957 | targetStreamParms = &(m_streamThreads[0]->m_parameters); |
5c88d1f2 | 1958 | |
13d8c7b4 | 1959 | } |
5506cebf SK |
1960 | else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) { |
1961 | substream_parameters_t *targetParms; | |
1962 | targetParms = &m_subStreams[stream_id]; | |
9dd63e1f | 1963 | |
5506cebf | 1964 | targetParms->numSvcBuffers = num_buffers; |
9dd63e1f | 1965 | |
5506cebf SK |
1966 | for (i = 0 ; i < targetParms->numSvcBuffers ; i++) { |
1967 | ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__, | |
1968 | i, stream_id, (uint32_t)(registeringBuffers[i])); | |
9dd63e1f SK |
1969 | if (m_grallocHal) { |
1970 | if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i], | |
5506cebf SK |
1971 | targetParms->usage, 0, 0, |
1972 | targetParms->width, targetParms->height, virtAddr) != 0) { | |
9dd63e1f SK |
1973 | ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__); |
1974 | } | |
1975 | else { | |
1976 | ExynosBuffer currentBuf; | |
1977 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]); | |
5506cebf SK |
1978 | if (targetParms->svcPlanes == 1) { |
1979 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
1980 | currentBuf.size.extS[0] = priv_handle->size; | |
1981 | currentBuf.size.extS[1] = 0; | |
1982 | currentBuf.size.extS[2] = 0; | |
1983 | } else if (targetParms->svcPlanes == 2) { | |
1984 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
1985 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
1986 | ||
1987 | } else if (targetParms->svcPlanes == 3) { | |
1988 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
1989 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
1990 | currentBuf.fd.extFd[2] = priv_handle->fd2; | |
1991 | } | |
1992 | for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) { | |
9dd63e1f | 1993 | currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index]; |
0d220b42 | 1994 | CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)", |
804236a7 | 1995 | __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index], |
5506cebf | 1996 | (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]); |
9dd63e1f | 1997 | } |
5506cebf SK |
1998 | targetParms->svcBufStatus[i] = ON_SERVICE; |
1999 | targetParms->svcBuffers[i] = currentBuf; | |
2000 | targetParms->svcBufHandle[i] = registeringBuffers[i]; | |
9dd63e1f SK |
2001 | } |
2002 | } | |
2003 | } | |
5506cebf | 2004 | targetParms->needBufferInit = true; |
9dd63e1f SK |
2005 | return 0; |
2006 | } | |
5506cebf SK |
2007 | else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) { |
2008 | targetStream = m_streamThreads[1].get(); | |
2009 | targetStreamParms = &(m_streamThreads[1]->m_parameters); | |
74d78ebe | 2010 | } |
13d8c7b4 | 2011 | else { |
ed4ad5fe | 2012 | ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id); |
be494d19 | 2013 | return 1; |
13d8c7b4 | 2014 | } |
c15a6b00 | 2015 | |
5506cebf | 2016 | if (targetStream->streamType == STREAM_TYPE_DIRECT) { |
13d8c7b4 SK |
2017 | if (num_buffers < targetStreamParms->numHwBuffers) { |
2018 | ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)", | |
2019 | __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers); | |
be494d19 | 2020 | return 1; |
13d8c7b4 SK |
2021 | } |
2022 | } | |
0d220b42 | 2023 | CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)", |
5506cebf SK |
2024 | __FUNCTION__, targetStreamParms->format, targetStreamParms->width, |
2025 | targetStreamParms->height, targetStreamParms->planes); | |
13d8c7b4 | 2026 | targetStreamParms->numSvcBuffers = num_buffers; |
5506cebf SK |
2027 | currentNode = targetStreamParms->node; |
2028 | currentNode->width = targetStreamParms->width; | |
2029 | currentNode->height = targetStreamParms->height; | |
2030 | currentNode->format = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format); | |
2031 | currentNode->planes = targetStreamParms->planes; | |
13d8c7b4 | 2032 | currentNode->buffers = targetStreamParms->numHwBuffers; |
5506cebf SK |
2033 | cam_int_s_input(currentNode, m_camera_info.sensor_id); |
2034 | cam_int_s_fmt(currentNode); | |
2035 | cam_int_reqbufs(currentNode); | |
2036 | for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) { | |
13d8c7b4 SK |
2037 | ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__, |
2038 | i, (uint32_t)(registeringBuffers[i])); | |
13d8c7b4 SK |
2039 | v4l2_buf.m.planes = planes; |
2040 | v4l2_buf.type = currentNode->type; | |
2041 | v4l2_buf.memory = currentNode->memory; | |
2042 | v4l2_buf.index = i; | |
2043 | v4l2_buf.length = currentNode->planes; | |
c15a6b00 | 2044 | |
13d8c7b4 | 2045 | ExynosBuffer currentBuf; |
feb7df4c | 2046 | ExynosBuffer metaBuf; |
13d8c7b4 SK |
2047 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]); |
2048 | ||
2049 | m_getAlignedYUVSize(currentNode->format, | |
2050 | currentNode->width, currentNode->height, ¤tBuf); | |
24231221 | 2051 | |
37e122d5 SK |
2052 | ALOGV("DEBUG(%s): ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride); |
2053 | if (currentNode->planes == 1) { | |
74d78ebe SK |
2054 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; |
2055 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
37e122d5 SK |
2056 | currentBuf.size.extS[0] = priv_handle->size; |
2057 | currentBuf.size.extS[1] = 0; | |
2058 | currentBuf.size.extS[2] = 0; | |
74d78ebe SK |
2059 | } else if (currentNode->planes == 2) { |
2060 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; | |
2061 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd1; | |
2062 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2063 | currentBuf.fd.extFd[1] = priv_handle->fd1; | |
2064 | ||
2065 | } else if (currentNode->planes == 3) { | |
2066 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; | |
2067 | v4l2_buf.m.planes[2].m.fd = priv_handle->fd1; | |
2068 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd2; | |
2069 | currentBuf.fd.extFd[0] = priv_handle->fd; | |
2070 | currentBuf.fd.extFd[2] = priv_handle->fd1; | |
2071 | currentBuf.fd.extFd[1] = priv_handle->fd2; | |
37e122d5 | 2072 | } |
0d220b42 | 2073 | |
5506cebf | 2074 | for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) { |
0d220b42 | 2075 | currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0); |
13d8c7b4 | 2076 | v4l2_buf.m.planes[plane_index].length = currentBuf.size.extS[plane_index]; |
0d220b42 | 2077 | CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)", |
13d8c7b4 SK |
2078 | __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd, |
2079 | (unsigned int)currentBuf.virt.extP[plane_index], | |
2080 | v4l2_buf.m.planes[plane_index].length); | |
2081 | } | |
c15a6b00 | 2082 | |
5506cebf | 2083 | if (i < currentNode->buffers) { |
feb7df4c SK |
2084 | |
2085 | ||
2086 | #ifdef ENABLE_FRAME_SYNC | |
5506cebf SK |
2087 | /* add plane for metadata*/ |
2088 | metaBuf.size.extS[0] = 4*1024; | |
2089 | allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0); | |
feb7df4c | 2090 | |
5506cebf SK |
2091 | v4l2_buf.length += targetStreamParms->metaPlanes; |
2092 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0]; | |
2093 | v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0]; | |
feb7df4c | 2094 | |
5506cebf | 2095 | ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length); |
feb7df4c | 2096 | #endif |
5506cebf SK |
2097 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { |
2098 | ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)", | |
2099 | __FUNCTION__, stream_id, currentNode->fd); | |
13d8c7b4 | 2100 | } |
5506cebf SK |
2101 | ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)", |
2102 | __FUNCTION__, stream_id, currentNode->fd); | |
2103 | targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC; | |
13d8c7b4 | 2104 | } |
5506cebf | 2105 | else { |
13d8c7b4 | 2106 | targetStreamParms->svcBufStatus[i] = ON_SERVICE; |
c15a6b00 | 2107 | } |
5506cebf | 2108 | |
13d8c7b4 | 2109 | targetStreamParms->svcBuffers[i] = currentBuf; |
feb7df4c | 2110 | targetStreamParms->metaBuffers[i] = metaBuf; |
13d8c7b4 SK |
2111 | targetStreamParms->svcBufHandle[i] = registeringBuffers[i]; |
2112 | } | |
15fd8231 | 2113 | |
5506cebf SK |
2114 | ALOGV("DEBUG(%s): calling streamon stream id = %d", __FUNCTION__, stream_id); |
2115 | cam_int_streamon(targetStreamParms->node); | |
ad37861e | 2116 | ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__); |
5506cebf | 2117 | currentNode->status = true; |
13d8c7b4 | 2118 | ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__); |
ad37861e | 2119 | |
c15a6b00 JS |
2120 | return 0; |
2121 | } | |
2122 | ||
2123 | int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id) | |
2124 | { | |
13d8c7b4 | 2125 | StreamThread *targetStream; |
5506cebf | 2126 | status_t res = NO_ERROR; |
ed4ad5fe | 2127 | ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id); |
5506cebf | 2128 | bool releasingScpMain = false; |
13d8c7b4 | 2129 | |
5506cebf | 2130 | if (stream_id == STREAM_ID_PREVIEW) { |
13d8c7b4 | 2131 | targetStream = (StreamThread*)(m_streamThreads[0].get()); |
5506cebf SK |
2132 | targetStream->m_numRegisteredStream--; |
2133 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
2134 | releasingScpMain = true; | |
0d220b42 C |
2135 | for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) { |
2136 | for (int j = 0; j < targetStream->m_parameters.planes; j++) { | |
2137 | ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j], | |
2138 | targetStream->m_parameters.svcBuffers[i].size.extS[j]); | |
2139 | CAM_LOGD("DBG(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j, | |
2140 | targetStream->m_parameters.svcBuffers[i].fd.extFd[j], targetStream->m_parameters.svcBuffers[i].virt.extP[j]); | |
2141 | } | |
2142 | } | |
5506cebf | 2143 | } else if (stream_id == STREAM_ID_JPEG) { |
13d8c7b4 | 2144 | targetStream = (StreamThread*)(m_streamThreads[1].get()); |
5506cebf SK |
2145 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); |
2146 | if (m_resizeBuf.size.s != 0) { | |
2147 | freeCameraMemory(&m_resizeBuf, 1); | |
2148 | } | |
2149 | if (targetStream) | |
2150 | res = targetStream->detachSubStream(stream_id); | |
2151 | if (res != NO_ERROR) { | |
2152 | ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res); | |
2153 | return 1; | |
2154 | } | |
2155 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
74d78ebe | 2156 | return 0; |
5506cebf SK |
2157 | } else if (stream_id == STREAM_ID_RECORD) { |
2158 | targetStream = (StreamThread*)(m_streamThreads[0].get()); | |
2159 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); | |
2160 | if (targetStream) | |
2161 | res = targetStream->detachSubStream(stream_id); | |
2162 | else | |
2163 | return 0; | |
2164 | } else if (stream_id == STREAM_ID_PRVCB) { | |
2165 | targetStream = (StreamThread*)(m_streamThreads[0].get()); | |
2166 | if (m_resizeBuf.size.s != 0) { | |
2167 | freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes); | |
2168 | } | |
2169 | memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t)); | |
2170 | if (targetStream) | |
2171 | res = targetStream->detachSubStream(stream_id); | |
2172 | else | |
2173 | return 0; | |
2174 | } else if (stream_id == STREAM_ID_ZSL) { | |
2175 | targetStream = (StreamThread*)(m_streamThreads[1].get()); | |
2176 | targetStream->m_numRegisteredStream--; | |
2177 | ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream); | |
2178 | } else { | |
13d8c7b4 | 2179 | ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id); |
be494d19 | 2180 | return 1; |
13d8c7b4 SK |
2181 | } |
2182 | ||
2d5655e1 SK |
2183 | if (m_sensorThread != NULL) { |
2184 | m_sensorThread->release(); | |
2185 | ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__); | |
2186 | while (!m_sensorThread->IsTerminated()) | |
2187 | usleep(10000); | |
2188 | ALOGD("(%s): END Waiting for (indirect) sensor thread termination", __FUNCTION__); | |
2189 | } | |
2190 | else { | |
2191 | ALOGE("+++++++ sensor thread is NULL %d", __LINE__); | |
2192 | } | |
2193 | ||
5506cebf SK |
2194 | if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) { |
2195 | ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__); | |
2196 | targetStream = (StreamThread*)(m_streamThreads[1].get()); | |
2197 | targetStream->m_releasing = true; | |
0eb27a9d | 2198 | ALOGD("START stream thread release %d", __LINE__); |
5506cebf | 2199 | do { |
5506cebf | 2200 | targetStream->release(); |
041f38de | 2201 | usleep(SIG_WAITING_TICK); |
5506cebf | 2202 | } while (targetStream->m_releasing); |
a8be0011 | 2203 | m_camera_info.capture.status = false; |
0eb27a9d | 2204 | ALOGD("END stream thread release %d", __LINE__); |
5506cebf SK |
2205 | } |
2206 | ||
a8be0011 | 2207 | if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) { |
5506cebf SK |
2208 | ALOGV("(%s): deactivating stream thread 0", __FUNCTION__); |
2209 | targetStream = (StreamThread*)(m_streamThreads[0].get()); | |
2210 | targetStream->m_releasing = true; | |
0eb27a9d | 2211 | ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__); |
5506cebf | 2212 | do { |
5506cebf | 2213 | targetStream->release(); |
041f38de | 2214 | usleep(SIG_WAITING_TICK); |
5506cebf | 2215 | } while (targetStream->m_releasing); |
0eb27a9d | 2216 | ALOGD("(%s): END Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__); |
5506cebf SK |
2217 | targetStream->SetSignal(SIGNAL_THREAD_TERMINATE); |
2218 | ||
2219 | if (targetStream != NULL) { | |
0eb27a9d SK |
2220 | ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__); |
2221 | while (!targetStream->IsTerminated()) | |
041f38de | 2222 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 2223 | ALOGD("(%s): END Waiting for (indirect) stream thread termination", __FUNCTION__); |
5506cebf SK |
2224 | m_streamThreads[0] = NULL; |
2225 | } | |
5506cebf | 2226 | if (m_camera_info.capture.status == true) { |
f9a06609 | 2227 | m_scpForceSuspended = true; |
5506cebf SK |
2228 | } |
2229 | m_isIspStarted = false; | |
2230 | } | |
2231 | ALOGV("(%s): END", __FUNCTION__); | |
c15a6b00 JS |
2232 | return 0; |
2233 | } | |
2234 | ||
2235 | int ExynosCameraHWInterface2::allocateReprocessStream( | |
13d8c7b4 SK |
2236 | uint32_t width, uint32_t height, uint32_t format, |
2237 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
c15a6b00 JS |
2238 | uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers) |
2239 | { | |
13d8c7b4 | 2240 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
2241 | return 0; |
2242 | } | |
2243 | ||
5506cebf SK |
2244 | int ExynosCameraHWInterface2::allocateReprocessStreamFromStream( |
2245 | uint32_t output_stream_id, | |
2246 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
2247 | // outputs | |
2248 | uint32_t *stream_id) | |
2249 | { | |
ed4ad5fe | 2250 | ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id); |
5506cebf SK |
2251 | *stream_id = STREAM_ID_JPEG_REPROCESS; |
2252 | ||
2253 | m_reprocessStreamId = *stream_id; | |
2254 | m_reprocessOps = reprocess_stream_ops; | |
2255 | m_reprocessOutputStreamId = output_stream_id; | |
2256 | return 0; | |
2257 | } | |
2258 | ||
c15a6b00 JS |
2259 | int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id) |
2260 | { | |
ed4ad5fe | 2261 | ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id); |
5506cebf SK |
2262 | if (stream_id == STREAM_ID_JPEG_REPROCESS) { |
2263 | m_reprocessStreamId = 0; | |
2264 | m_reprocessOps = NULL; | |
2265 | m_reprocessOutputStreamId = 0; | |
2266 | return 0; | |
2267 | } | |
2268 | return 1; | |
c15a6b00 JS |
2269 | } |
2270 | ||
2271 | int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2) | |
2272 | { | |
0f26b20f SK |
2273 | ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2); |
2274 | ||
2275 | switch (trigger_id) { | |
2276 | case CAMERA2_TRIGGER_AUTOFOCUS: | |
2277 | ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1); | |
8e2c2fdb | 2278 | OnAfTriggerStart(ext1); |
0f26b20f SK |
2279 | break; |
2280 | ||
2281 | case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS: | |
2282 | ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1); | |
2283 | OnAfCancel(ext1); | |
2284 | break; | |
e117f756 YJ |
2285 | case CAMERA2_TRIGGER_PRECAPTURE_METERING: |
2286 | ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1); | |
2287 | OnPrecaptureMeteringTriggerStart(ext1); | |
2288 | break; | |
0f26b20f SK |
2289 | default: |
2290 | break; | |
2291 | } | |
c15a6b00 JS |
2292 | return 0; |
2293 | } | |
2294 | ||
2295 | int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user) | |
2296 | { | |
0f26b20f | 2297 | ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb); |
c15a6b00 JS |
2298 | m_notifyCb = notify_cb; |
2299 | m_callbackCookie = user; | |
2300 | return 0; | |
2301 | } | |
2302 | ||
2303 | int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops) | |
2304 | { | |
13d8c7b4 | 2305 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
2306 | return 0; |
2307 | } | |
2308 | ||
2309 | int ExynosCameraHWInterface2::dump(int fd) | |
2310 | { | |
13d8c7b4 | 2311 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
2312 | return 0; |
2313 | } | |
2314 | ||
13d8c7b4 SK |
2315 | void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf) |
2316 | { | |
2317 | switch (colorFormat) { | |
2318 | // 1p | |
2319 | case V4L2_PIX_FMT_RGB565 : | |
2320 | case V4L2_PIX_FMT_YUYV : | |
2321 | case V4L2_PIX_FMT_UYVY : | |
2322 | case V4L2_PIX_FMT_VYUY : | |
2323 | case V4L2_PIX_FMT_YVYU : | |
2324 | buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h); | |
2325 | buf->size.extS[1] = 0; | |
2326 | buf->size.extS[2] = 0; | |
2327 | break; | |
2328 | // 2p | |
2329 | case V4L2_PIX_FMT_NV12 : | |
2330 | case V4L2_PIX_FMT_NV12T : | |
2331 | case V4L2_PIX_FMT_NV21 : | |
2332 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); | |
2333 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16); | |
2334 | buf->size.extS[2] = 0; | |
2335 | break; | |
2336 | case V4L2_PIX_FMT_NV12M : | |
2337 | case V4L2_PIX_FMT_NV12MT_16X16 : | |
9dd63e1f | 2338 | case V4L2_PIX_FMT_NV21M: |
13d8c7b4 SK |
2339 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); |
2340 | buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256); | |
2341 | buf->size.extS[2] = 0; | |
2342 | break; | |
2343 | case V4L2_PIX_FMT_NV16 : | |
2344 | case V4L2_PIX_FMT_NV61 : | |
2345 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); | |
2346 | buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h, 16); | |
2347 | buf->size.extS[2] = 0; | |
2348 | break; | |
2349 | // 3p | |
2350 | case V4L2_PIX_FMT_YUV420 : | |
2351 | case V4L2_PIX_FMT_YVU420 : | |
2352 | buf->size.extS[0] = (w * h); | |
2353 | buf->size.extS[1] = (w * h) >> 2; | |
2354 | buf->size.extS[2] = (w * h) >> 2; | |
2355 | break; | |
2356 | case V4L2_PIX_FMT_YUV420M: | |
2357 | case V4L2_PIX_FMT_YVU420M : | |
2358 | case V4L2_PIX_FMT_YUV422P : | |
0d220b42 | 2359 | buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16); |
13d8c7b4 SK |
2360 | buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8); |
2361 | buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8); | |
2362 | break; | |
2363 | default: | |
2364 | ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat); | |
2365 | return; | |
2366 | break; | |
2367 | } | |
2368 | } | |
c15a6b00 | 2369 | |
13d8c7b4 SK |
2370 | bool ExynosCameraHWInterface2::m_getRatioSize(int src_w, int src_h, |
2371 | int dst_w, int dst_h, | |
2372 | int *crop_x, int *crop_y, | |
2373 | int *crop_w, int *crop_h, | |
2374 | int zoom) | |
c15a6b00 | 2375 | { |
13d8c7b4 SK |
2376 | *crop_w = src_w; |
2377 | *crop_h = src_h; | |
2378 | ||
2379 | if ( src_w != dst_w | |
2380 | || src_h != dst_h) { | |
2381 | float src_ratio = 1.0f; | |
2382 | float dst_ratio = 1.0f; | |
2383 | ||
2384 | // ex : 1024 / 768 | |
2385 | src_ratio = (float)src_w / (float)src_h; | |
2386 | ||
2387 | // ex : 352 / 288 | |
2388 | dst_ratio = (float)dst_w / (float)dst_h; | |
2389 | ||
2390 | if (dst_w * dst_h < src_w * src_h) { | |
2391 | if (dst_ratio <= src_ratio) { | |
2392 | // shrink w | |
2393 | *crop_w = src_h * dst_ratio; | |
2394 | *crop_h = src_h; | |
2395 | } else { | |
2396 | // shrink h | |
2397 | *crop_w = src_w; | |
2398 | *crop_h = src_w / dst_ratio; | |
c15a6b00 | 2399 | } |
13d8c7b4 SK |
2400 | } else { |
2401 | if (dst_ratio <= src_ratio) { | |
2402 | // shrink w | |
2403 | *crop_w = src_h * dst_ratio; | |
2404 | *crop_h = src_h; | |
2405 | } else { | |
2406 | // shrink h | |
2407 | *crop_w = src_w; | |
2408 | *crop_h = src_w / dst_ratio; | |
c15a6b00 JS |
2409 | } |
2410 | } | |
c15a6b00 JS |
2411 | } |
2412 | ||
13d8c7b4 SK |
2413 | if (zoom != 0) { |
2414 | float zoomLevel = ((float)zoom + 10.0) / 10.0; | |
2415 | *crop_w = (int)((float)*crop_w / zoomLevel); | |
2416 | *crop_h = (int)((float)*crop_h / zoomLevel); | |
2417 | } | |
2418 | ||
2419 | #define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2) | |
2420 | unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1)); | |
2421 | if (w_align != 0) { | |
2422 | if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align | |
2423 | && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) { | |
2424 | *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align); | |
2425 | } | |
2426 | else | |
2427 | *crop_w -= w_align; | |
2428 | } | |
2429 | ||
2430 | #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2) | |
2431 | unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1)); | |
2432 | if (h_align != 0) { | |
2433 | if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align | |
2434 | && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) { | |
2435 | *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align); | |
2436 | } | |
2437 | else | |
2438 | *crop_h -= h_align; | |
2439 | } | |
2440 | ||
2441 | *crop_x = (src_w - *crop_w) >> 1; | |
2442 | *crop_y = (src_h - *crop_h) >> 1; | |
2443 | ||
2444 | if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1)) | |
2445 | *crop_x -= 1; | |
2446 | ||
2447 | if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1)) | |
2448 | *crop_y -= 1; | |
2449 | ||
2450 | return true; | |
2451 | } | |
2452 | ||
9dd63e1f | 2453 | BayerBufManager::BayerBufManager() |
13d8c7b4 | 2454 | { |
9dd63e1f SK |
2455 | ALOGV("DEBUG(%s): ", __FUNCTION__); |
2456 | for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) { | |
2457 | entries[i].status = BAYER_ON_HAL_EMPTY; | |
2458 | entries[i].reqFrameCnt = 0; | |
13d8c7b4 | 2459 | } |
9dd63e1f SK |
2460 | sensorEnqueueHead = 0; |
2461 | sensorDequeueHead = 0; | |
2462 | ispEnqueueHead = 0; | |
2463 | ispDequeueHead = 0; | |
2464 | numOnSensor = 0; | |
2465 | numOnIsp = 0; | |
2466 | numOnHalFilled = 0; | |
2467 | numOnHalEmpty = NUM_BAYER_BUFFERS; | |
13d8c7b4 SK |
2468 | } |
2469 | ||
15fd8231 SK |
2470 | BayerBufManager::~BayerBufManager() |
2471 | { | |
2472 | ALOGV("%s", __FUNCTION__); | |
2473 | } | |
2474 | ||
9dd63e1f | 2475 | int BayerBufManager::GetIndexForSensorEnqueue() |
13d8c7b4 | 2476 | { |
9dd63e1f SK |
2477 | int ret = 0; |
2478 | if (numOnHalEmpty == 0) | |
2479 | ret = -1; | |
2480 | else | |
2481 | ret = sensorEnqueueHead; | |
2482 | ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret); | |
2483 | return ret; | |
13d8c7b4 SK |
2484 | } |
2485 | ||
9dd63e1f | 2486 | int BayerBufManager::MarkSensorEnqueue(int index) |
13d8c7b4 | 2487 | { |
9dd63e1f SK |
2488 | ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index); |
2489 | ||
2490 | // sanity check | |
2491 | if (index != sensorEnqueueHead) { | |
2492 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead); | |
2493 | return -1; | |
2494 | } | |
2495 | if (entries[index].status != BAYER_ON_HAL_EMPTY) { | |
2496 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2497 | index, entries[index].status, BAYER_ON_HAL_EMPTY); | |
2498 | return -1; | |
13d8c7b4 | 2499 | } |
13d8c7b4 | 2500 | |
9dd63e1f SK |
2501 | entries[index].status = BAYER_ON_SENSOR; |
2502 | entries[index].reqFrameCnt = 0; | |
2503 | numOnHalEmpty--; | |
2504 | numOnSensor++; | |
2505 | sensorEnqueueHead = GetNextIndex(index); | |
2506 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2507 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2508 | return 0; | |
2509 | } | |
13d8c7b4 | 2510 | |
9dd63e1f | 2511 | int BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp) |
13d8c7b4 | 2512 | { |
9dd63e1f SK |
2513 | ALOGV("DEBUG(%s) : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt); |
2514 | ||
9dd63e1f | 2515 | if (entries[index].status != BAYER_ON_SENSOR) { |
ad37861e | 2516 | ALOGE("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, |
9dd63e1f SK |
2517 | index, entries[index].status, BAYER_ON_SENSOR); |
2518 | return -1; | |
13d8c7b4 | 2519 | } |
13d8c7b4 | 2520 | |
9dd63e1f | 2521 | entries[index].status = BAYER_ON_HAL_FILLED; |
9dd63e1f SK |
2522 | numOnHalFilled++; |
2523 | numOnSensor--; | |
ad37861e | 2524 | |
9dd63e1f SK |
2525 | return 0; |
2526 | } | |
2527 | ||
2528 | int BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt) | |
2529 | { | |
2530 | int ret = 0; | |
2531 | if (numOnHalFilled == 0) | |
2532 | ret = -1; | |
2533 | else { | |
2534 | *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt; | |
2535 | ret = ispEnqueueHead; | |
13d8c7b4 | 2536 | } |
9dd63e1f | 2537 | ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret); |
13d8c7b4 SK |
2538 | return ret; |
2539 | } | |
2540 | ||
9dd63e1f SK |
2541 | int BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt) |
2542 | { | |
2543 | int ret = 0; | |
2544 | if (numOnIsp == 0) | |
2545 | ret = -1; | |
2546 | else { | |
2547 | *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt; | |
2548 | ret = ispDequeueHead; | |
2549 | } | |
2550 | ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret); | |
2551 | return ret; | |
2552 | } | |
13d8c7b4 | 2553 | |
9dd63e1f | 2554 | int BayerBufManager::MarkIspEnqueue(int index) |
13d8c7b4 | 2555 | { |
9dd63e1f SK |
2556 | ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index); |
2557 | ||
2558 | // sanity check | |
2559 | if (index != ispEnqueueHead) { | |
2560 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead); | |
13d8c7b4 SK |
2561 | return -1; |
2562 | } | |
9dd63e1f SK |
2563 | if (entries[index].status != BAYER_ON_HAL_FILLED) { |
2564 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2565 | index, entries[index].status, BAYER_ON_HAL_FILLED); | |
2566 | return -1; | |
13d8c7b4 SK |
2567 | } |
2568 | ||
9dd63e1f SK |
2569 | entries[index].status = BAYER_ON_ISP; |
2570 | numOnHalFilled--; | |
2571 | numOnIsp++; | |
2572 | ispEnqueueHead = GetNextIndex(index); | |
2573 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2574 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2575 | return 0; | |
2576 | } | |
2577 | ||
2578 | int BayerBufManager::MarkIspDequeue(int index) | |
2579 | { | |
2580 | ALOGV("DEBUG(%s) : BayerIndex[%d]", __FUNCTION__, index); | |
2581 | ||
2582 | // sanity check | |
2583 | if (index != ispDequeueHead) { | |
2584 | ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead); | |
2585 | return -1; | |
13d8c7b4 | 2586 | } |
9dd63e1f SK |
2587 | if (entries[index].status != BAYER_ON_ISP) { |
2588 | ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__, | |
2589 | index, entries[index].status, BAYER_ON_ISP); | |
13d8c7b4 SK |
2590 | return -1; |
2591 | } | |
2592 | ||
9dd63e1f SK |
2593 | entries[index].status = BAYER_ON_HAL_EMPTY; |
2594 | entries[index].reqFrameCnt = 0; | |
2595 | numOnHalEmpty++; | |
2596 | numOnIsp--; | |
2597 | ispDequeueHead = GetNextIndex(index); | |
2598 | ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ", | |
2599 | __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp); | |
2600 | return 0; | |
2601 | } | |
13d8c7b4 | 2602 | |
9dd63e1f SK |
2603 | int BayerBufManager::GetNumOnSensor() |
2604 | { | |
2605 | return numOnSensor; | |
13d8c7b4 SK |
2606 | } |
2607 | ||
9dd63e1f | 2608 | int BayerBufManager::GetNumOnHalFilled() |
13d8c7b4 | 2609 | { |
9dd63e1f SK |
2610 | return numOnHalFilled; |
2611 | } | |
2612 | ||
2613 | int BayerBufManager::GetNumOnIsp() | |
2614 | { | |
2615 | return numOnIsp; | |
2616 | } | |
2617 | ||
2618 | int BayerBufManager::GetNextIndex(int index) | |
2619 | { | |
2620 | index++; | |
2621 | if (index >= NUM_BAYER_BUFFERS) | |
2622 | index = 0; | |
2623 | ||
2624 | return index; | |
2625 | } | |
2626 | ||
2627 | void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self) | |
2628 | { | |
2629 | camera_metadata_t *currentRequest = NULL; | |
2630 | camera_metadata_t *currentFrame = NULL; | |
2631 | size_t numEntries = 0; | |
2632 | size_t frameSize = 0; | |
2633 | camera_metadata_t * preparedFrame = NULL; | |
13d8c7b4 SK |
2634 | camera_metadata_t *deregisteredRequest = NULL; |
2635 | uint32_t currentSignal = self->GetProcessingSignal(); | |
2636 | MainThread * selfThread = ((MainThread*)self); | |
2637 | int res = 0; | |
2638 | ||
ad37861e SK |
2639 | int ret; |
2640 | ||
13d8c7b4 SK |
2641 | ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal); |
2642 | ||
2643 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
2644 | ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__); | |
2645 | ||
2646 | ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__); | |
2647 | selfThread->SetSignal(SIGNAL_THREAD_TERMINATE); | |
2648 | return; | |
2649 | } | |
2650 | ||
2651 | if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) { | |
2652 | ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__); | |
ad37861e | 2653 | if (m_requestManager->IsRequestQueueFull()==false) { |
13d8c7b4 SK |
2654 | m_requestQueueOps->dequeue_request(m_requestQueueOps, ¤tRequest); |
2655 | if (NULL == currentRequest) { | |
1c5e692d | 2656 | ALOGD("DEBUG(%s)(0x%x): dequeue_request returned NULL ", __FUNCTION__, currentSignal); |
13d8c7b4 | 2657 | m_isRequestQueueNull = true; |
5c88d1f2 C |
2658 | if (m_requestManager->IsVdisEnable()) |
2659 | m_vdisBubbleCnt = 1; | |
13d8c7b4 SK |
2660 | } |
2661 | else { | |
2662 | m_requestManager->RegisterRequest(currentRequest); | |
2663 | ||
2664 | m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps); | |
2665 | ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc); | |
ad37861e | 2666 | if (m_requestManager->IsRequestQueueFull()==false) |
13d8c7b4 | 2667 | selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly |
9dd63e1f | 2668 | |
13d8c7b4 SK |
2669 | m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); |
2670 | } | |
c15a6b00 JS |
2671 | } |
2672 | else { | |
13d8c7b4 SK |
2673 | m_isRequestQueuePending = true; |
2674 | } | |
2675 | } | |
2676 | ||
2677 | if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) { | |
2678 | ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__); | |
2679 | /*while (1)*/ { | |
0f26b20f | 2680 | ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService()); |
ad37861e | 2681 | if (ret == false) |
0d220b42 | 2682 | CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret); |
ad37861e | 2683 | |
13d8c7b4 | 2684 | m_requestManager->DeregisterRequest(&deregisteredRequest); |
ad37861e SK |
2685 | |
2686 | ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest); | |
2687 | if (ret < 0) | |
0d220b42 | 2688 | CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret); |
ad37861e SK |
2689 | |
2690 | ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, ¤tFrame); | |
2691 | if (ret < 0) | |
0d220b42 | 2692 | CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret); |
ad37861e | 2693 | |
13d8c7b4 | 2694 | if (currentFrame==NULL) { |
ad37861e | 2695 | ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ ); |
13d8c7b4 SK |
2696 | } |
2697 | else { | |
daa1fcd6 | 2698 | ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize); |
13d8c7b4 SK |
2699 | } |
2700 | res = append_camera_metadata(currentFrame, preparedFrame); | |
2701 | if (res==0) { | |
2702 | ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__); | |
2703 | m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame); | |
ad37861e SK |
2704 | } |
2705 | else { | |
2706 | ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res); | |
2707 | } | |
2708 | } | |
2709 | if (!m_isRequestQueueNull) { | |
2710 | selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); | |
2711 | } | |
c15a6b00 | 2712 | |
ad37861e SK |
2713 | if (getInProgressCount()>0) { |
2714 | ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__); | |
2715 | m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); | |
2716 | } | |
c15a6b00 | 2717 | } |
ad37861e SK |
2718 | ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__); |
2719 | return; | |
2720 | } | |
c15a6b00 | 2721 | |
13d8c7b4 SK |
2722 | void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext) |
2723 | { | |
ad37861e SK |
2724 | ALOGD("#### common Section"); |
2725 | ALOGD("#### magic(%x) ", | |
13d8c7b4 | 2726 | shot_ext->shot.magicNumber); |
ad37861e SK |
2727 | ALOGD("#### ctl Section"); |
2728 | ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)", | |
13d8c7b4 | 2729 | shot_ext->shot.ctl.request.metadataMode, |
b56dcc00 | 2730 | shot_ext->shot.ctl.lens.aperture, |
13d8c7b4 SK |
2731 | shot_ext->shot.ctl.sensor.exposureTime, |
2732 | shot_ext->shot.ctl.sensor.frameDuration, | |
b56dcc00 SK |
2733 | shot_ext->shot.ctl.sensor.sensitivity, |
2734 | shot_ext->shot.ctl.aa.awbMode); | |
13d8c7b4 | 2735 | |
5506cebf | 2736 | ALOGD("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)", |
9dd63e1f | 2737 | shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc, |
5506cebf | 2738 | shot_ext->shot.ctl.request.outputStreams[0]); |
13d8c7b4 | 2739 | |
ad37861e SK |
2740 | ALOGD("#### DM Section"); |
2741 | ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)", | |
13d8c7b4 | 2742 | shot_ext->shot.dm.request.metadataMode, |
b56dcc00 | 2743 | shot_ext->shot.dm.lens.aperture, |
13d8c7b4 SK |
2744 | shot_ext->shot.dm.sensor.exposureTime, |
2745 | shot_ext->shot.dm.sensor.frameDuration, | |
2746 | shot_ext->shot.dm.sensor.sensitivity, | |
b56dcc00 SK |
2747 | shot_ext->shot.dm.sensor.timeStamp, |
2748 | shot_ext->shot.dm.aa.awbMode, | |
2749 | shot_ext->shot.dm.request.frameCount ); | |
13d8c7b4 SK |
2750 | } |
2751 | ||
e117f756 | 2752 | void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext) |
6f19b6cf | 2753 | { |
e117f756 YJ |
2754 | // Flash |
2755 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2756 | case IS_FLASH_STATE_ON: | |
73f5ad60 | 2757 | ALOGV("(%s): [Flash] Flash ON for Capture", __FUNCTION__); |
4a9565ae YJ |
2758 | // check AF locked |
2759 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
2760 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { | |
2761 | if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) { | |
2762 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS; | |
2763 | m_ctlInfo.flash.m_flashTimeOut = 5; | |
2764 | } else | |
2765 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON; | |
2766 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT; | |
2767 | } else { | |
2768 | m_ctlInfo.flash.m_flashTimeOut--; | |
2769 | } | |
2770 | } else { | |
2771 | if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) { | |
2772 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS; | |
2773 | m_ctlInfo.flash.m_flashTimeOut = 5; | |
2774 | } else | |
2775 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON; | |
2776 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT; | |
2777 | } | |
9a710a45 | 2778 | break; |
e117f756 | 2779 | case IS_FLASH_STATE_ON_WAIT: |
6f19b6cf | 2780 | break; |
e117f756 YJ |
2781 | case IS_FLASH_STATE_ON_DONE: |
2782 | if (!m_ctlInfo.flash.m_afFlashDoneFlg) | |
2783 | // auto transition at pre-capture trigger | |
2784 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
6f19b6cf | 2785 | break; |
e117f756 | 2786 | case IS_FLASH_STATE_AUTO_AE_AWB_LOCK: |
73f5ad60 | 2787 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision); |
e117f756 YJ |
2788 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO; |
2789 | //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED; | |
2790 | shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED; | |
2791 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT; | |
2792 | break; | |
2793 | case IS_FLASH_STATE_AE_AWB_LOCK_WAIT: | |
2794 | case IS_FLASH_STATE_AUTO_WAIT: | |
2795 | shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0; | |
2796 | shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0; | |
2797 | break; | |
2798 | case IS_FLASH_STATE_AUTO_DONE: | |
73f5ad60 | 2799 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE", __FUNCTION__); |
d91c0269 | 2800 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
e117f756 YJ |
2801 | break; |
2802 | case IS_FLASH_STATE_AUTO_OFF: | |
73f5ad60 | 2803 | ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear", __FUNCTION__); |
e117f756 YJ |
2804 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
2805 | m_ctlInfo.flash.m_afFlashDoneFlg = false; | |
2806 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
2807 | break; | |
2808 | case IS_FLASH_STATE_CAPTURE: | |
73f5ad60 | 2809 | ALOGV("(%s): [Flash] IS_FLASH_CAPTURE", __FUNCTION__); |
e117f756 YJ |
2810 | m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT; |
2811 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE; | |
2812 | shot_ext->request_scc = 0; | |
2813 | shot_ext->request_scp = 0; | |
2814 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition | |
2815 | break; | |
2816 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
2817 | shot_ext->request_scc = 0; | |
2818 | shot_ext->request_scp = 0; | |
2819 | break; | |
2820 | case IS_FLASH_STATE_CAPTURE_JPEG: | |
73f5ad60 | 2821 | ALOGV("(%s): [Flash] Flash Capture (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut)); |
e117f756 YJ |
2822 | shot_ext->request_scc = 1; |
2823 | shot_ext->request_scp = 1; | |
2824 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END; // auto transition | |
2825 | break; | |
2826 | case IS_FLASH_STATE_CAPTURE_END: | |
73f5ad60 | 2827 | ALOGV("(%s): [Flash] Flash Capture END", __FUNCTION__); |
e117f756 YJ |
2828 | shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF; |
2829 | shot_ext->request_scc = 0; | |
2830 | shot_ext->request_scp = 0; | |
2831 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
2832 | m_ctlInfo.flash.m_flashCnt = 0; | |
2833 | m_ctlInfo.flash.m_afFlashDoneFlg= false; | |
2834 | break; | |
2835 | default: | |
2836 | ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); | |
6f19b6cf YJ |
2837 | } |
2838 | } | |
2839 | ||
e117f756 | 2840 | void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext) |
6f19b6cf | 2841 | { |
e117f756 YJ |
2842 | // Flash |
2843 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2844 | case IS_FLASH_STATE_AUTO_WAIT: | |
2845 | if (m_ctlInfo.flash.m_flashDecisionResult) { | |
2846 | if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) { | |
2847 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
73f5ad60 | 2848 | ALOGV("(%s): [Flash] Lis : AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode); |
9257e29e | 2849 | } else { |
73f5ad60 | 2850 | ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__); |
9257e29e | 2851 | } |
e117f756 YJ |
2852 | } else { |
2853 | //If flash isn't activated at flash auto mode, skip flash auto control | |
2854 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
73f5ad60 | 2855 | ALOGV("(%s): [Flash] Skip : AUTO -> OFF", __FUNCTION__); |
9257e29e | 2856 | } |
e117f756 | 2857 | break; |
9257e29e | 2858 | } |
9257e29e YJ |
2859 | } |
2860 | ||
e117f756 | 2861 | void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext) |
9257e29e | 2862 | { |
e117f756 YJ |
2863 | // Flash |
2864 | switch (m_ctlInfo.flash.m_flashCnt) { | |
2865 | case IS_FLASH_STATE_ON_WAIT: | |
2866 | if (shot_ext->shot.dm.flash.decision > 0) { | |
2867 | // store decision result to skip capture sequenece | |
73f5ad60 | 2868 | ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision); |
e117f756 YJ |
2869 | if (shot_ext->shot.dm.flash.decision == 2) |
2870 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
2871 | else | |
2872 | m_ctlInfo.flash.m_flashDecisionResult = true; | |
2873 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE; | |
2874 | } else { | |
2875 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { | |
73f5ad60 | 2876 | ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__); |
e117f756 YJ |
2877 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE; |
2878 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
caea49e6 | 2879 | } else { |
e117f756 | 2880 | m_ctlInfo.flash.m_flashTimeOut--; |
6f19b6cf | 2881 | } |
6f19b6cf | 2882 | } |
e117f756 YJ |
2883 | break; |
2884 | case IS_FLASH_STATE_AE_AWB_LOCK_WAIT: | |
2885 | if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) { | |
73f5ad60 | 2886 | ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode); |
e117f756 YJ |
2887 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT; |
2888 | } else { | |
73f5ad60 | 2889 | ALOGV("(%s): [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__); |
e117f756 YJ |
2890 | } |
2891 | break; | |
2892 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
2893 | if (m_ctlInfo.flash.m_flashDecisionResult) { | |
2894 | if (shot_ext->shot.dm.flash.firingStable) { | |
2895 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
6f19b6cf | 2896 | } else { |
9257e29e | 2897 | if (m_ctlInfo.flash.m_flashTimeOut == 0) { |
e117f756 YJ |
2898 | ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__); |
2899 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
9257e29e | 2900 | } else { |
e117f756 | 2901 | ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut); |
9257e29e YJ |
2902 | m_ctlInfo.flash.m_flashTimeOut--; |
2903 | } | |
6f19b6cf | 2904 | } |
e117f756 YJ |
2905 | } else { |
2906 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG; | |
6f19b6cf | 2907 | } |
e117f756 | 2908 | break; |
6f19b6cf | 2909 | } |
6f19b6cf YJ |
2910 | } |
2911 | ||
4a9565ae YJ |
2912 | void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext) |
2913 | { | |
2914 | if (0 == shot_ext->shot.ctl.aa.afRegions[0] && 0 == shot_ext->shot.ctl.aa.afRegions[1] | |
2915 | && 0 == shot_ext->shot.ctl.aa.afRegions[2] && 0 == shot_ext->shot.ctl.aa.afRegions[3]) { | |
2916 | ALOGV("(%s): AF region resetting", __FUNCTION__); | |
2917 | lastAfRegion[0] = 0; | |
2918 | lastAfRegion[1] = 0; | |
2919 | lastAfRegion[2] = 0; | |
2920 | lastAfRegion[3] = 0; | |
2921 | } else { | |
275c9744 YJ |
2922 | // clear region infos in case of CAF mode |
2923 | if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) { | |
2924 | shot_ext->shot.ctl.aa.afRegions[0] = lastAfRegion[0] = 0; | |
2925 | shot_ext->shot.ctl.aa.afRegions[1] = lastAfRegion[1] = 0; | |
2926 | shot_ext->shot.ctl.aa.afRegions[2] = lastAfRegion[2] = 0; | |
2927 | shot_ext->shot.ctl.aa.afRegions[3] = lastAfRegion[3] = 0; | |
2928 | } else if (!(lastAfRegion[0] == shot_ext->shot.ctl.aa.afRegions[0] && lastAfRegion[1] == shot_ext->shot.ctl.aa.afRegions[1] | |
4a9565ae YJ |
2929 | && lastAfRegion[2] == shot_ext->shot.ctl.aa.afRegions[2] && lastAfRegion[3] == shot_ext->shot.ctl.aa.afRegions[3])) { |
2930 | ALOGD("(%s): AF region changed : triggering", __FUNCTION__); | |
2931 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
2932 | shot_ext->shot.ctl.aa.afMode = m_afMode; | |
2933 | m_afState = HAL_AFSTATE_STARTED; | |
2934 | lastAfRegion[0] = shot_ext->shot.ctl.aa.afRegions[0]; | |
2935 | lastAfRegion[1] = shot_ext->shot.ctl.aa.afRegions[1]; | |
2936 | lastAfRegion[2] = shot_ext->shot.ctl.aa.afRegions[2]; | |
2937 | lastAfRegion[3] = shot_ext->shot.ctl.aa.afRegions[3]; | |
2938 | m_IsAfTriggerRequired = false; | |
2939 | } | |
4a9565ae YJ |
2940 | } |
2941 | } | |
2942 | ||
2943 | void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext) | |
2944 | { | |
2945 | if (m_afState == HAL_AFSTATE_SCANNING) { | |
2946 | ALOGD("(%s): restarting trigger ", __FUNCTION__); | |
2947 | } else { | |
2948 | if (m_afState != HAL_AFSTATE_NEEDS_COMMAND) | |
2949 | ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState); | |
2950 | else | |
2951 | m_afState = HAL_AFSTATE_STARTED; | |
2952 | } | |
2953 | ALOGD("### AF Triggering with mode (%d)", m_afMode); | |
2954 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
2955 | shot_ext->shot.ctl.aa.afMode = m_afMode; | |
2956 | m_IsAfTriggerRequired = false; | |
2957 | } | |
2958 | ||
275c9744 YJ |
2959 | void ExynosCameraHWInterface2::m_sceneModeFaceSetter(struct camera2_shot_ext * shot_ext, int mode) |
2960 | { | |
2961 | switch (mode) { | |
2962 | case 0: | |
2963 | // af face setting based on scene mode | |
2964 | if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_FACE_PRIORITY) { | |
2965 | if(m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) { | |
2966 | ALOGV("(%s): AA_AFMODE_CONTINUOUS_PICTURE_FACE", __FUNCTION__); | |
2967 | m_afState = HAL_AFSTATE_STARTED; | |
2968 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
2969 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE_FACE; | |
2970 | } else if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO) { | |
2971 | ALOGV("(%s): AA_AFMODE_CONTINUOUS_VIDEO_FACE", __FUNCTION__); | |
2972 | m_afState = HAL_AFSTATE_STARTED; | |
2973 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
2974 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_VIDEO_FACE; | |
2975 | ||
2976 | } | |
2977 | } else { | |
2978 | if(m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) { | |
2979 | ALOGV("(%s): AA_AFMODE_CONTINUOUS_PICTURE", __FUNCTION__); | |
2980 | m_afState = HAL_AFSTATE_STARTED; | |
2981 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
2982 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE; | |
2983 | } else if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO) { | |
2984 | ALOGV("(%s): AA_AFMODE_CONTINUOUS_VIDEO", __FUNCTION__); | |
2985 | m_afState = HAL_AFSTATE_STARTED; | |
2986 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
2987 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_VIDEO; | |
2988 | ||
2989 | } | |
2990 | } | |
2991 | break; | |
2992 | case 1: | |
2993 | // face af re-setting after single AF | |
2994 | if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_FACE_PRIORITY) { | |
2995 | ALOGV("(%s): Face af setting", __FUNCTION__); | |
2996 | if(m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) | |
2997 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE_FACE; | |
2998 | else if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO) | |
2999 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_VIDEO_FACE; | |
3000 | } | |
3001 | break; | |
3002 | default: | |
3003 | break; | |
3004 | } | |
3005 | } | |
3006 | ||
13d8c7b4 SK |
3007 | void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self) |
3008 | { | |
3009 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3010 | SensorThread * selfThread = ((SensorThread*)self); | |
3011 | int index; | |
ad37861e | 3012 | int index_isp; |
13d8c7b4 SK |
3013 | status_t res; |
3014 | nsecs_t frameTime; | |
3015 | int bayersOnSensor = 0, bayersOnIsp = 0; | |
ad37861e SK |
3016 | int j = 0; |
3017 | bool isCapture = false; | |
13d8c7b4 SK |
3018 | ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal); |
3019 | ||
3020 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
2c872806 | 3021 | CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__); |
13d8c7b4 | 3022 | |
9dd63e1f | 3023 | ALOGV("(%s): calling sensor streamoff", __FUNCTION__); |
13d8c7b4 | 3024 | cam_int_streamoff(&(m_camera_info.sensor)); |
9dd63e1f | 3025 | ALOGV("(%s): calling sensor streamoff done", __FUNCTION__); |
b5237e6b SK |
3026 | |
3027 | m_camera_info.sensor.buffers = 0; | |
3028 | ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__); | |
3029 | cam_int_reqbufs(&(m_camera_info.sensor)); | |
3030 | ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__); | |
5506cebf | 3031 | m_camera_info.sensor.status = false; |
ad37861e | 3032 | |
9dd63e1f SK |
3033 | ALOGV("(%s): calling ISP streamoff", __FUNCTION__); |
3034 | isp_int_streamoff(&(m_camera_info.isp)); | |
3035 | ALOGV("(%s): calling ISP streamoff done", __FUNCTION__); | |
ad37861e | 3036 | |
b5237e6b SK |
3037 | m_camera_info.isp.buffers = 0; |
3038 | ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__); | |
3039 | cam_int_reqbufs(&(m_camera_info.isp)); | |
3040 | ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__); | |
3041 | ||
13d8c7b4 | 3042 | exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM); |
13d8c7b4 | 3043 | |
52f54308 | 3044 | m_requestManager->releaseSensorQ(); |
2adfa429 | 3045 | m_requestManager->ResetEntry(); |
ad37861e | 3046 | ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__); |
13d8c7b4 SK |
3047 | selfThread->SetSignal(SIGNAL_THREAD_TERMINATE); |
3048 | return; | |
3049 | } | |
3050 | ||
3051 | if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING) | |
3052 | { | |
3053 | ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__); | |
9dd63e1f | 3054 | int targetStreamIndex = 0, i=0; |
ad37861e | 3055 | int matchedFrameCnt = -1, processingReqIndex; |
13d8c7b4 | 3056 | struct camera2_shot_ext *shot_ext; |
ad37861e | 3057 | struct camera2_shot_ext *shot_ext_capture; |
0f26b20f SK |
3058 | bool triggered = false; |
3059 | int afMode; | |
9dd63e1f | 3060 | |
ad37861e | 3061 | /* dqbuf from sensor */ |
5506cebf | 3062 | ALOGV("Sensor DQbuf start"); |
13d8c7b4 | 3063 | index = cam_int_dqbuf(&(m_camera_info.sensor)); |
52f54308 SK |
3064 | m_requestManager->pushSensorQ(index); |
3065 | ALOGV("Sensor DQbuf done(%d)", index); | |
9dd63e1f | 3066 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]); |
ad37861e | 3067 | |
a15b4e3f SK |
3068 | if (m_nightCaptureCnt != 0) { |
3069 | matchedFrameCnt = m_nightCaptureFrameCnt; | |
e117f756 | 3070 | } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) { |
9a710a45 | 3071 | matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount; |
caea49e6 | 3072 | ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt); |
6f19b6cf | 3073 | } else { |
a15b4e3f SK |
3074 | matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext); |
3075 | } | |
ad37861e | 3076 | |
5c88d1f2 C |
3077 | if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) { |
3078 | matchedFrameCnt = m_vdisDupFrame; | |
3079 | } | |
5c88d1f2 | 3080 | |
a07cbd98 | 3081 | if (matchedFrameCnt != -1) { |
5c88d1f2 C |
3082 | if (m_vdisBubbleCnt == 0) { |
3083 | frameTime = systemTime(); | |
3084 | m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime); | |
3085 | m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo); | |
3086 | } | |
9a710a45 | 3087 | |
8e2c2fdb SK |
3088 | if (m_afModeWaitingCnt != 0) { |
3089 | ALOGV("### Af Trigger pulled, waiting for mode change cnt(%d) ", m_afModeWaitingCnt); | |
3090 | m_afModeWaitingCnt --; | |
3091 | if (m_afModeWaitingCnt == 1) { | |
cd13bb78 | 3092 | m_afModeWaitingCnt = 0; |
8e2c2fdb SK |
3093 | OnAfTrigger(m_afPendingTriggerId); |
3094 | } | |
3095 | } | |
308291de | 3096 | m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2]; |
e4657e32 SK |
3097 | float zoomLeft, zoomTop, zoomWidth, zoomHeight; |
3098 | int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0; | |
3099 | ||
3100 | m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(), | |
5506cebf | 3101 | m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height, |
e4657e32 SK |
3102 | &crop_x, &crop_y, |
3103 | &crop_w, &crop_h, | |
3104 | 0); | |
3105 | ||
5506cebf | 3106 | if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) { |
308291de | 3107 | zoomWidth = m_camera2->getSensorW() / m_zoomRatio; |
e4657e32 | 3108 | zoomHeight = zoomWidth * |
5506cebf | 3109 | m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width; |
e4657e32 | 3110 | } else { |
308291de | 3111 | zoomHeight = m_camera2->getSensorH() / m_zoomRatio; |
e4657e32 | 3112 | zoomWidth = zoomHeight * |
5506cebf | 3113 | m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height; |
e4657e32 SK |
3114 | } |
3115 | zoomLeft = (crop_w - zoomWidth) / 2; | |
3116 | zoomTop = (crop_h - zoomHeight) / 2; | |
3117 | ||
3118 | int32_t new_cropRegion[3] = { zoomLeft, zoomTop, zoomWidth }; | |
3119 | ||
5506cebf | 3120 | if (new_cropRegion[0] * 2 + new_cropRegion[2] > (int32_t)m_camera2->getSensorW()) |
308291de | 3121 | new_cropRegion[2]--; |
5506cebf | 3122 | else if (new_cropRegion[0] * 2 + new_cropRegion[2] < (int32_t)m_camera2->getSensorW()) |
308291de C |
3123 | new_cropRegion[2]++; |
3124 | ||
e4657e32 SK |
3125 | shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0]; |
3126 | shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1]; | |
3127 | shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2]; | |
0f26b20f | 3128 | if (m_IsAfModeUpdateRequired) { |
ed4ad5fe | 3129 | ALOGD("### AF Mode change(Mode %d) ", m_afMode); |
0f26b20f SK |
3130 | shot_ext->shot.ctl.aa.afMode = m_afMode; |
3131 | if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) { | |
ed4ad5fe | 3132 | ALOGD("### With Automatic triger for continuous modes"); |
0f26b20f SK |
3133 | m_afState = HAL_AFSTATE_STARTED; |
3134 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3135 | triggered = true; | |
3136 | } | |
3137 | m_IsAfModeUpdateRequired = false; | |
311d52eb | 3138 | // support inifinity focus mode |
fdbaf5d2 | 3139 | if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) { |
311d52eb YJ |
3140 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY; |
3141 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3142 | triggered = true; | |
3143 | } | |
0f26b20f SK |
3144 | if (m_afMode2 != NO_CHANGE) { |
3145 | enum aa_afmode tempAfMode = m_afMode2; | |
3146 | m_afMode2 = NO_CHANGE; | |
3147 | SetAfMode(tempAfMode); | |
3148 | } | |
3149 | } | |
3150 | else { | |
3151 | shot_ext->shot.ctl.aa.afMode = NO_CHANGE; | |
3152 | } | |
3153 | if (m_IsAfTriggerRequired) { | |
e117f756 | 3154 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
4a9565ae | 3155 | // flash case |
e117f756 | 3156 | if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) { |
4a9565ae YJ |
3157 | if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) { |
3158 | // Flash is enabled and start AF | |
3159 | m_afTrigger(shot_ext); | |
6f19b6cf | 3160 | } else { |
4a9565ae YJ |
3161 | if (m_ctlInfo.af.m_afTriggerTimeOut == 0) |
3162 | m_afTrigger(shot_ext); | |
6f19b6cf | 3163 | else |
4a9565ae | 3164 | m_ctlInfo.af.m_afTriggerTimeOut--; |
6f19b6cf | 3165 | } |
6f19b6cf YJ |
3166 | } |
3167 | } else { | |
4a9565ae YJ |
3168 | // non-flash case |
3169 | if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) { | |
3170 | m_afTrigger(shot_ext); | |
6f19b6cf | 3171 | } else { |
4a9565ae YJ |
3172 | if (m_ctlInfo.af.m_afTriggerTimeOut == 0) |
3173 | m_afTrigger(shot_ext); | |
6f19b6cf | 3174 | else |
4a9565ae | 3175 | m_ctlInfo.af.m_afTriggerTimeOut--; |
6f19b6cf | 3176 | } |
36c106c9 | 3177 | } |
4a9565ae | 3178 | } else { |
8e2c2fdb | 3179 | shot_ext->shot.ctl.aa.afTrigger = 0; |
0f26b20f | 3180 | } |
5506cebf SK |
3181 | |
3182 | if (m_wideAspect) { | |
3183 | shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO; | |
3184 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30; | |
15fd8231 | 3185 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; |
5506cebf SK |
3186 | } else { |
3187 | shot_ext->setfile = ISS_SUB_SCENARIO_STILL; | |
15fd8231 | 3188 | } |
0f26b20f SK |
3189 | if (triggered) |
3190 | shot_ext->shot.ctl.aa.afTrigger = 1; | |
3191 | ||
3192 | // TODO : check collision with AFMode Update | |
3193 | if (m_IsAfLockRequired) { | |
3194 | shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF; | |
3195 | m_IsAfLockRequired = false; | |
3196 | } | |
4ed2f103 | 3197 | ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)", |
ad37861e SK |
3198 | index, |
3199 | shot_ext->shot.ctl.request.frameCount, | |
3200 | shot_ext->request_scp, | |
3201 | shot_ext->request_scc, | |
3202 | shot_ext->dis_bypass, sizeof(camera2_shot)); | |
4a9565ae YJ |
3203 | |
3204 | // update AF region | |
3205 | m_updateAfRegion(shot_ext); | |
3206 | ||
6bf36b60 SK |
3207 | if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT |
3208 | && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED) | |
3209 | shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON; | |
a15b4e3f | 3210 | if (m_nightCaptureCnt == 0) { |
5506cebf | 3211 | if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE |
a15b4e3f SK |
3212 | && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) { |
3213 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d | 3214 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
a15b4e3f SK |
3215 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; |
3216 | m_nightCaptureCnt = 4; | |
3217 | m_nightCaptureFrameCnt = matchedFrameCnt; | |
3218 | shot_ext->request_scc = 0; | |
3219 | } | |
3220 | } | |
3221 | else if (m_nightCaptureCnt == 1) { | |
3222 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d SK |
3223 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30; |
3224 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
a15b4e3f | 3225 | m_nightCaptureCnt--; |
2f4d175d | 3226 | m_nightCaptureFrameCnt = 0; |
a15b4e3f SK |
3227 | shot_ext->request_scc = 1; |
3228 | } | |
6bf36b60 SK |
3229 | else if (m_nightCaptureCnt == 2) { |
3230 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
1c5e692d SK |
3231 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
3232 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
6bf36b60 SK |
3233 | m_nightCaptureCnt--; |
3234 | shot_ext->request_scc = 0; | |
3235 | } | |
1c5e692d | 3236 | else if (m_nightCaptureCnt == 3) { |
a15b4e3f | 3237 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; |
1c5e692d SK |
3238 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; |
3239 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
3240 | m_nightCaptureCnt--; | |
3241 | shot_ext->request_scc = 0; | |
3242 | } | |
3243 | else if (m_nightCaptureCnt == 4) { | |
3244 | shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE; | |
3245 | shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2; | |
3246 | shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30; | |
a15b4e3f SK |
3247 | m_nightCaptureCnt--; |
3248 | shot_ext->request_scc = 0; | |
3249 | } | |
6f19b6cf YJ |
3250 | |
3251 | // Flash mode | |
3252 | // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence | |
e117f756 YJ |
3253 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) |
3254 | && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) | |
6f19b6cf | 3255 | && (m_cameraId == 0)) { |
e117f756 YJ |
3256 | if (!m_ctlInfo.flash.m_flashDecisionResult) { |
3257 | m_ctlInfo.flash.m_flashEnableFlg = false; | |
3258 | m_ctlInfo.flash.m_afFlashDoneFlg = false; | |
3259 | m_ctlInfo.flash.m_flashCnt = 0; | |
3260 | } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) || (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) { | |
6f19b6cf YJ |
3261 | ALOGE("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__); |
3262 | shot_ext->request_scc = 0; | |
9a710a45 YJ |
3263 | m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt; |
3264 | m_ctlInfo.flash.m_flashEnableFlg = true; | |
e117f756 YJ |
3265 | m_ctlInfo.flash.m_afFlashDoneFlg = false; |
3266 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE; | |
73f5ad60 | 3267 | } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) { |
4a9565ae | 3268 | ALOGE("(%s): [Flash] Flash capture Error!!!!!!", __FUNCTION__); |
caea49e6 | 3269 | } |
4a9565ae YJ |
3270 | } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) { |
3271 | m_ctlInfo.flash.m_flashDecisionResult = false; | |
caea49e6 YJ |
3272 | } |
3273 | ||
3274 | // TODO : set torch mode for video recording. need to find proper position. | |
3275 | // m_wideAspect is will be changed to recording hint | |
3276 | if ((shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_SINGLE) && m_wideAspect) { | |
3277 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_TORCH; | |
3278 | shot_ext->shot.ctl.flash.firingPower = 10; | |
3279 | m_ctlInfo.flash.m_flashTorchMode = true; | |
3280 | } else if (m_wideAspect){ | |
3281 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF; | |
3282 | shot_ext->shot.ctl.flash.firingPower = 0; | |
3283 | m_ctlInfo.flash.m_flashTorchMode = false; | |
3284 | } else { | |
3285 | if (m_ctlInfo.flash.m_flashTorchMode) { | |
3286 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF; | |
3287 | shot_ext->shot.ctl.flash.firingPower = 0; | |
3288 | m_ctlInfo.flash.m_flashTorchMode = false; | |
3289 | } else { | |
3290 | shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP; | |
6f19b6cf YJ |
3291 | } |
3292 | } | |
3293 | ||
e117f756 YJ |
3294 | if (m_ctlInfo.flash.m_flashEnableFlg) { |
3295 | m_preCaptureListenerSensor(shot_ext); | |
3296 | m_preCaptureSetter(shot_ext); | |
3297 | } | |
3298 | ||
5506cebf SK |
3299 | if (shot_ext->isReprocessing) { |
3300 | ALOGE("(%s): Reprocess request ", __FUNCTION__); | |
3301 | m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0]; | |
3302 | shot_ext->request_scp = 0; | |
3303 | shot_ext->request_scc = 0; | |
3304 | m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount; | |
3305 | memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot)); | |
3306 | m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START); | |
3307 | } | |
fdbaf5d2 | 3308 | |
275c9744 YJ |
3309 | // face af mode setting in case of face priority scene mode |
3310 | if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) { | |
3311 | ALOGV("(%s): Scene mode changed", __FUNCTION__); | |
3312 | m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode; | |
3313 | m_sceneModeFaceSetter(shot_ext, 0); | |
3314 | } else if (triggered) { | |
3315 | // re-setting after single AF | |
3316 | m_sceneModeFaceSetter(shot_ext, 1); | |
3317 | } | |
3318 | ||
8e2c2fdb SK |
3319 | ALOGV("(%s): queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__, |
3320 | (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode), | |
3321 | (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode), | |
3322 | (int)(shot_ext->shot.ctl.aa.afTrigger)); | |
7d0efb59 | 3323 | |
5c88d1f2 C |
3324 | if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) { |
3325 | shot_ext->dis_bypass = 1; | |
3326 | shot_ext->request_scp = 0; | |
3327 | shot_ext->request_scc = 0; | |
3328 | m_vdisBubbleCnt--; | |
3329 | matchedFrameCnt = -1; | |
3330 | } else { | |
3331 | m_vdisDupFrame = matchedFrameCnt; | |
3332 | } | |
f9a06609 SK |
3333 | if (m_scpForceSuspended) |
3334 | shot_ext->request_scc = 0; | |
5c88d1f2 | 3335 | |
5506cebf | 3336 | uint32_t current_scp = shot_ext->request_scp; |
a85ec381 | 3337 | uint32_t current_scc = shot_ext->request_scc; |
7d0efb59 | 3338 | |
c0b6e17e | 3339 | if (shot_ext->shot.dm.request.frameCount == 0) { |
4aa4d739 | 3340 | CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount); |
c0b6e17e C |
3341 | } |
3342 | ||
ad37861e | 3343 | cam_int_qbuf(&(m_camera_info.isp), index); |
13d8c7b4 | 3344 | |
ad37861e SK |
3345 | ALOGV("### isp DQBUF start"); |
3346 | index_isp = cam_int_dqbuf(&(m_camera_info.isp)); | |
ef6f83ca | 3347 | |
ad37861e | 3348 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]); |
13d8c7b4 | 3349 | |
e117f756 YJ |
3350 | if (m_ctlInfo.flash.m_flashEnableFlg) |
3351 | m_preCaptureListenerISP(shot_ext); | |
9a710a45 | 3352 | |
5506cebf | 3353 | ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) shot_size(%d)", |
ad37861e SK |
3354 | index, |
3355 | shot_ext->shot.ctl.request.frameCount, | |
3356 | shot_ext->request_scp, | |
3357 | shot_ext->request_scc, | |
3358 | shot_ext->dis_bypass, sizeof(camera2_shot)); | |
fdbaf5d2 | 3359 | |
ef6f83ca | 3360 | ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__, |
8e2c2fdb SK |
3361 | (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode), |
3362 | (int)(shot_ext->shot.dm.aa.awbMode), | |
3363 | (int)(shot_ext->shot.dm.aa.afMode)); | |
13d8c7b4 | 3364 | |
2f4d175d | 3365 | #ifndef ENABLE_FRAME_SYNC |
5506cebf | 3366 | m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0]; |
2f4d175d | 3367 | #endif |
2adfa429 | 3368 | |
a85ec381 SK |
3369 | if (current_scc != shot_ext->request_scc) { |
3370 | ALOGD("(%s): scc frame drop1 request_scc(%d to %d)", | |
3371 | __FUNCTION__, current_scc, shot_ext->request_scc); | |
3372 | m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount); | |
3373 | } | |
1c5e692d SK |
3374 | if (shot_ext->request_scc) { |
3375 | ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)"); | |
3376 | memcpy(&m_jpegMetadata, &shot_ext->shot, sizeof(struct camera2_shot)); | |
3377 | m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING); | |
3378 | } | |
7d0efb59 | 3379 | if (current_scp != shot_ext->request_scp) { |
a85ec381 | 3380 | ALOGD("(%s): scp frame drop1 request_scp(%d to %d)", |
7d0efb59 | 3381 | __FUNCTION__, current_scp, shot_ext->request_scp); |
a85ec381 SK |
3382 | m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount); |
3383 | } | |
3384 | if (shot_ext->request_scp) { | |
3385 | ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)"); | |
3386 | m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING); | |
7d0efb59 | 3387 | } |
c15a6b00 | 3388 | |
ad37861e SK |
3389 | ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__, |
3390 | shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp); | |
3391 | if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) { | |
3392 | ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__); | |
3393 | m_scp_closed = true; | |
3394 | } | |
3395 | else | |
3396 | m_scp_closed = false; | |
c15a6b00 | 3397 | |
fd2d78a2 SK |
3398 | if (!shot_ext->fd_bypass) { |
3399 | /* FD orientation axis transformation */ | |
3400 | for (int i=0; i < CAMERA2_MAX_FACES; i++) { | |
3401 | if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0) | |
3402 | shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW | |
ef6f83ca | 3403 | * shot_ext->shot.dm.stats.faceRectangles[i][0]) |
5506cebf | 3404 | / m_streamThreads[0].get()->m_parameters.width; |
fd2d78a2 SK |
3405 | if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0) |
3406 | shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH | |
ef6f83ca | 3407 | * shot_ext->shot.dm.stats.faceRectangles[i][1]) |
5506cebf | 3408 | / m_streamThreads[0].get()->m_parameters.height; |
fd2d78a2 SK |
3409 | if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0) |
3410 | shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW | |
ef6f83ca | 3411 | * shot_ext->shot.dm.stats.faceRectangles[i][2]) |
5506cebf | 3412 | / m_streamThreads[0].get()->m_parameters.width; |
fd2d78a2 SK |
3413 | if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0) |
3414 | shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH | |
ef6f83ca | 3415 | * shot_ext->shot.dm.stats.faceRectangles[i][3]) |
5506cebf | 3416 | / m_streamThreads[0].get()->m_parameters.height; |
fd2d78a2 SK |
3417 | } |
3418 | } | |
275c9744 | 3419 | // At flash off mode, capture can be done as zsl capture |
47d3a1ea YJ |
3420 | if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON) |
3421 | shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED; | |
3422 | ||
275c9744 YJ |
3423 | // At scene mode face priority |
3424 | if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_FACE_PRIORITY) { | |
3425 | if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE) | |
3426 | shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE; | |
3427 | else if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_VIDEO_FACE) | |
3428 | shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE; | |
3429 | } | |
3430 | ||
e117f756 | 3431 | if (m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) { |
a15b4e3f SK |
3432 | m_requestManager->ApplyDynamicMetadata(shot_ext); |
3433 | } | |
0f26b20f | 3434 | OnAfNotification(shot_ext->shot.dm.aa.afState); |
73f5ad60 | 3435 | OnPrecaptureMeteringNotificationISP(); |
ad37861e | 3436 | } |
13d8c7b4 | 3437 | |
52f54308 SK |
3438 | index = m_requestManager->popSensorQ(); |
3439 | if(index < 0){ | |
3440 | ALOGE("sensorQ is empty"); | |
3441 | return; | |
3442 | } | |
3443 | ||
0f26b20f | 3444 | processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]), &afMode); |
ef6f83ca | 3445 | if (processingReqIndex != -1) |
0f26b20f | 3446 | SetAfMode((enum aa_afmode)afMode); |
ef6f83ca | 3447 | |
9dd63e1f | 3448 | |
ad37861e SK |
3449 | shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]); |
3450 | if (m_scp_closing || m_scp_closed) { | |
3451 | ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed); | |
3452 | shot_ext->request_scc = 0; | |
3453 | shot_ext->request_scp = 0; | |
3454 | shot_ext->request_sensor = 0; | |
3455 | } | |
ad37861e | 3456 | cam_int_qbuf(&(m_camera_info.sensor), index); |
52f54308 | 3457 | ALOGV("Sensor Qbuf done(%d)", index); |
c15a6b00 | 3458 | |
ef6f83ca SK |
3459 | if (!m_scp_closing |
3460 | && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){ | |
da7ca692 | 3461 | ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)", |
ef6f83ca | 3462 | matchedFrameCnt, processingReqIndex); |
ad37861e SK |
3463 | selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING); |
3464 | } | |
c15a6b00 | 3465 | } |
ad37861e SK |
3466 | return; |
3467 | } | |
9dd63e1f | 3468 | |
86646da4 SK |
3469 | void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self) |
3470 | { | |
3471 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3472 | StreamThread * selfThread = ((StreamThread*)self); | |
3473 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
5506cebf SK |
3474 | node_info_t *currentNode = selfStreamParms->node; |
3475 | substream_parameters_t *subParms; | |
86646da4 SK |
3476 | buffer_handle_t * buf = NULL; |
3477 | status_t res; | |
3478 | void *virtAddr[3]; | |
3479 | int i, j; | |
3480 | int index; | |
3481 | nsecs_t timestamp; | |
3482 | ||
3483 | if (!(selfThread->m_isBufferInit)) | |
3484 | { | |
3485 | for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) { | |
3486 | res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf); | |
3487 | if (res != NO_ERROR || buf == NULL) { | |
3488 | ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res); | |
3489 | return; | |
3490 | } | |
3491 | ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
3492 | ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
3493 | ||
5506cebf | 3494 | index = selfThread->findBufferIndex(buf); |
86646da4 SK |
3495 | if (index == -1) { |
3496 | ALOGE("ERR(%s): could not find buffer index", __FUNCTION__); | |
3497 | } | |
3498 | else { | |
3499 | ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)", | |
3500 | __FUNCTION__, index, selfStreamParms->svcBufStatus[index]); | |
3501 | if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC) | |
3502 | selfStreamParms->svcBufStatus[index] = ON_DRIVER; | |
3503 | else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE) | |
3504 | selfStreamParms->svcBufStatus[index] = ON_HAL; | |
3505 | else { | |
3506 | ALOGV("DBG(%s): buffer status abnormal (%d) " | |
3507 | , __FUNCTION__, selfStreamParms->svcBufStatus[index]); | |
3508 | } | |
3509 | selfStreamParms->numSvcBufsInHal++; | |
86646da4 | 3510 | } |
5506cebf | 3511 | selfStreamParms->bufIndex = 0; |
86646da4 SK |
3512 | } |
3513 | selfThread->m_isBufferInit = true; | |
3514 | } | |
5506cebf SK |
3515 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3516 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3517 | continue; | |
86646da4 | 3518 | |
5506cebf SK |
3519 | subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId]; |
3520 | if (subParms->type && subParms->needBufferInit) { | |
3521 | ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)", | |
3522 | __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers); | |
74d78ebe SK |
3523 | int checkingIndex = 0; |
3524 | bool found = false; | |
5506cebf SK |
3525 | for ( i = 0 ; i < subParms->numSvcBuffers; i++) { |
3526 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
74d78ebe SK |
3527 | if (res != NO_ERROR || buf == NULL) { |
3528 | ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res); | |
3529 | return; | |
3530 | } | |
5506cebf SK |
3531 | subParms->numSvcBufsInHal++; |
3532 | ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
3533 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
74d78ebe SK |
3534 | |
3535 | if (m_grallocHal->lock(m_grallocHal, *buf, | |
5506cebf SK |
3536 | subParms->usage, 0, 0, |
3537 | subParms->width, subParms->height, virtAddr) != 0) { | |
74d78ebe SK |
3538 | ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__); |
3539 | } | |
3540 | else { | |
5506cebf | 3541 | ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)", |
74d78ebe SK |
3542 | __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]); |
3543 | } | |
3544 | found = false; | |
5506cebf SK |
3545 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { |
3546 | if (subParms->svcBufHandle[checkingIndex] == *buf ) { | |
74d78ebe SK |
3547 | found = true; |
3548 | break; | |
3549 | } | |
3550 | } | |
5506cebf | 3551 | ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex); |
74d78ebe SK |
3552 | if (!found) break; |
3553 | ||
3554 | index = checkingIndex; | |
3555 | ||
3556 | if (index == -1) { | |
3557 | ALOGV("ERR(%s): could not find buffer index", __FUNCTION__); | |
3558 | } | |
3559 | else { | |
3560 | ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)", | |
5506cebf SK |
3561 | __FUNCTION__, index, subParms->svcBufStatus[index]); |
3562 | if (subParms->svcBufStatus[index]== ON_SERVICE) | |
3563 | subParms->svcBufStatus[index] = ON_HAL; | |
74d78ebe SK |
3564 | else { |
3565 | ALOGV("DBG(%s): buffer status abnormal (%d) " | |
5506cebf | 3566 | , __FUNCTION__, subParms->svcBufStatus[index]); |
74d78ebe | 3567 | } |
5506cebf | 3568 | if (*buf != subParms->svcBufHandle[index]) |
74d78ebe SK |
3569 | ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__); |
3570 | else | |
3571 | ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__); | |
3572 | } | |
5506cebf | 3573 | subParms->svcBufIndex = 0; |
74d78ebe | 3574 | } |
5506cebf SK |
3575 | if (subParms->type == SUBSTREAM_TYPE_JPEG) { |
3576 | m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2; | |
3577 | m_resizeBuf.size.extS[1] = 0; | |
3578 | m_resizeBuf.size.extS[2] = 0; | |
3579 | ||
3580 | if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) { | |
3581 | ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__); | |
3582 | } | |
3583 | } | |
3584 | if (subParms->type == SUBSTREAM_TYPE_PRVCB) { | |
3585 | m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width, | |
3586 | subParms->height, &m_previewCbBuf); | |
86646da4 | 3587 | |
5506cebf SK |
3588 | if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) { |
3589 | ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__); | |
3590 | } | |
3591 | } | |
3592 | subParms->needBufferInit= false; | |
3593 | } | |
3594 | } | |
86646da4 SK |
3595 | } |
3596 | ||
c15a6b00 JS |
3597 | void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self) |
3598 | { | |
13d8c7b4 SK |
3599 | StreamThread * selfThread = ((StreamThread*)self); |
3600 | ALOGV("DEBUG(%s): ", __FUNCTION__ ); | |
3601 | memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t)); | |
3602 | selfThread->m_isBufferInit = false; | |
5506cebf SK |
3603 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3604 | selfThread->m_attachedSubStreams[i].streamId = -1; | |
3605 | selfThread->m_attachedSubStreams[i].priority = 0; | |
3606 | } | |
c15a6b00 JS |
3607 | return; |
3608 | } | |
3609 | ||
5506cebf SK |
3610 | int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf, |
3611 | int stream_id, nsecs_t frameTimeStamp) | |
c15a6b00 | 3612 | { |
5506cebf SK |
3613 | substream_parameters_t *subParms = &m_subStreams[stream_id]; |
3614 | ||
3615 | switch (stream_id) { | |
c15a6b00 | 3616 | |
5506cebf SK |
3617 | case STREAM_ID_JPEG: |
3618 | return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp); | |
c15a6b00 | 3619 | |
5506cebf SK |
3620 | case STREAM_ID_RECORD: |
3621 | return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp); | |
3622 | ||
3623 | case STREAM_ID_PRVCB: | |
3624 | return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp); | |
3625 | ||
3626 | default: | |
3627 | return 0; | |
c15a6b00 | 3628 | } |
5506cebf SK |
3629 | } |
3630 | void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self) | |
3631 | { | |
3632 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3633 | StreamThread * selfThread = ((StreamThread*)self); | |
3634 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
3635 | node_info_t *currentNode = selfStreamParms->node; | |
3636 | int i = 0; | |
3637 | nsecs_t frameTimeStamp; | |
13d8c7b4 | 3638 | |
b55ed664 | 3639 | if (currentSignal & SIGNAL_THREAD_RELEASE) { |
5506cebf | 3640 | CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); |
b55ed664 SK |
3641 | |
3642 | if (selfThread->m_isBufferInit) { | |
a8be0011 SK |
3643 | if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) { |
3644 | ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__, | |
5506cebf | 3645 | selfThread->m_index, currentNode->fd); |
a8be0011 SK |
3646 | if (cam_int_streamoff(currentNode) < 0 ) { |
3647 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
3648 | } | |
3649 | ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__, | |
3650 | selfThread->m_index, currentNode->fd); | |
3651 | currentNode->buffers = 0; | |
3652 | cam_int_reqbufs(currentNode); | |
3653 | ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__, | |
3654 | selfThread->m_index, currentNode->fd); | |
3655 | } | |
b55ed664 SK |
3656 | } |
3657 | #ifdef ENABLE_FRAME_SYNC | |
3658 | // free metabuffers | |
5506cebf SK |
3659 | for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++) |
3660 | if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) { | |
b55ed664 SK |
3661 | freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1); |
3662 | selfStreamParms->metaBuffers[i].fd.extFd[0] = 0; | |
3663 | selfStreamParms->metaBuffers[i].size.extS[0] = 0; | |
3664 | } | |
3665 | #endif | |
3666 | selfThread->m_isBufferInit = false; | |
b55ed664 | 3667 | selfThread->m_releasing = false; |
5506cebf SK |
3668 | selfThread->m_activated = false; |
3669 | ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
3670 | return; | |
3671 | } | |
3672 | if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) { | |
3673 | status_t res; | |
3674 | buffer_handle_t * buf = NULL; | |
3675 | bool found = false; | |
3676 | ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START", | |
3677 | __FUNCTION__, selfThread->m_index); | |
3678 | res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf); | |
3679 | if (res != NO_ERROR || buf == NULL) { | |
3680 | ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res); | |
3681 | return; | |
3682 | } | |
3683 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
3684 | int checkingIndex = 0; | |
3685 | for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) { | |
3686 | if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
3687 | found = true; | |
3688 | break; | |
3689 | } | |
3690 | } | |
3691 | ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ", | |
3692 | __FUNCTION__, (unsigned int)buf, found, checkingIndex); | |
b55ed664 | 3693 | |
5506cebf SK |
3694 | if (!found) return; |
3695 | ||
3696 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { | |
3697 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3698 | continue; | |
3699 | ||
3700 | #ifdef ENABLE_FRAME_SYNC | |
a8be0011 | 3701 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt); |
5506cebf SK |
3702 | m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt); |
3703 | #else | |
3704 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()); | |
3705 | #endif | |
3706 | if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) | |
3707 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]), | |
3708 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
3709 | } | |
3710 | ||
3711 | res = m_reprocessOps->release_buffer(m_reprocessOps, buf); | |
3712 | if (res != NO_ERROR) { | |
3713 | ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res); | |
3714 | return; | |
3715 | } | |
3716 | ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_REPROCESSING_START", | |
3717 | __FUNCTION__,selfThread->m_index); | |
b55ed664 SK |
3718 | |
3719 | return; | |
3720 | } | |
13d8c7b4 | 3721 | if (currentSignal & SIGNAL_STREAM_DATA_COMING) { |
c15a6b00 | 3722 | buffer_handle_t * buf = NULL; |
5506cebf | 3723 | status_t res = 0; |
c15a6b00 JS |
3724 | int i, j; |
3725 | int index; | |
ad37861e | 3726 | nsecs_t timestamp; |
5506cebf | 3727 | #ifdef ENABLE_FRAME_SYNC |
feb7df4c | 3728 | camera2_stream *frame; |
2f4d175d | 3729 | uint8_t currentOutputStreams; |
a85ec381 | 3730 | bool directOutputEnabled = false; |
5506cebf | 3731 | #endif |
c0b6e17e | 3732 | int numOfUndqbuf = 0; |
c0b6e17e | 3733 | |
5506cebf | 3734 | ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index); |
ad37861e | 3735 | |
86646da4 | 3736 | m_streamBufferInit(self); |
c15a6b00 | 3737 | |
b5237e6b | 3738 | do { |
5506cebf SK |
3739 | ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__, |
3740 | selfThread->m_index, selfThread->streamType); | |
b5237e6b | 3741 | |
feb7df4c | 3742 | #ifdef ENABLE_FRAME_SYNC |
5506cebf SK |
3743 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes); |
3744 | frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]); | |
5506cebf | 3745 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount); |
2f4d175d SK |
3746 | currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount); |
3747 | ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams); | |
a85ec381 SK |
3748 | if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)|| |
3749 | ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) { | |
3750 | directOutputEnabled = true; | |
3751 | } | |
3752 | if (!directOutputEnabled) { | |
3753 | if (!m_nightCaptureFrameCnt) | |
3754 | m_requestManager->NotifyStreamOutput(frame->rcount); | |
3755 | } | |
feb7df4c | 3756 | #else |
5506cebf SK |
3757 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode); |
3758 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()) | |
feb7df4c | 3759 | #endif |
5506cebf SK |
3760 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d) sigcnt(%d)",__FUNCTION__, |
3761 | selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt); | |
804236a7 | 3762 | |
5506cebf | 3763 | if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] != ON_DRIVER) |
86646da4 | 3764 | ALOGV("DBG(%s): DQed buffer status abnormal (%d) ", |
5506cebf SK |
3765 | __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]); |
3766 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL; | |
b5237e6b | 3767 | |
5506cebf SK |
3768 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { |
3769 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3770 | continue; | |
5506cebf | 3771 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 3772 | if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { |
5506cebf SK |
3773 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]), |
3774 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
b5237e6b | 3775 | } |
2f4d175d SK |
3776 | #else |
3777 | if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { | |
3778 | m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]), | |
3779 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
3780 | } | |
3781 | #endif | |
86646da4 | 3782 | } |
c0b6e17e | 3783 | |
5506cebf | 3784 | if (m_requestManager->GetSkipCnt() <= 0) { |
5506cebf | 3785 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 3786 | if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) { |
5506cebf | 3787 | ALOGV("** Display Preview(frameCnt:%d)", frame->rcount); |
2f4d175d SK |
3788 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3789 | frameTimeStamp, | |
3790 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3791 | } | |
3792 | else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) { | |
3793 | ALOGV("** SCC output (frameCnt:%d), last(%d)", frame->rcount); | |
3794 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, | |
3795 | frameTimeStamp, | |
3796 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3797 | } | |
a85ec381 SK |
3798 | else { |
3799 | res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps, | |
3800 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
3801 | ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); | |
3802 | } | |
5506cebf | 3803 | #else |
2f4d175d | 3804 | if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) { |
5506cebf | 3805 | ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex()); |
5506cebf SK |
3806 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3807 | frameTimeStamp, | |
3808 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
74d78ebe | 3809 | } |
5506cebf | 3810 | else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) { |
5506cebf | 3811 | ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex()); |
5506cebf SK |
3812 | res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps, |
3813 | frameTimeStamp, | |
3814 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); | |
74d78ebe | 3815 | } |
2f4d175d | 3816 | #endif |
5506cebf | 3817 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); |
86646da4 SK |
3818 | } |
3819 | else { | |
3820 | res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps, | |
5506cebf SK |
3821 | &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex])); |
3822 | ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res); | |
86646da4 | 3823 | } |
2f4d175d | 3824 | #ifdef ENABLE_FRAME_SYNC |
a85ec381 SK |
3825 | if (directOutputEnabled) { |
3826 | if (!m_nightCaptureFrameCnt) | |
3827 | m_requestManager->NotifyStreamOutput(frame->rcount); | |
3828 | } | |
2f4d175d | 3829 | #endif |
86646da4 | 3830 | if (res == 0) { |
5506cebf | 3831 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE; |
86646da4 SK |
3832 | selfStreamParms->numSvcBufsInHal--; |
3833 | } | |
3834 | else { | |
5506cebf | 3835 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL; |
b5237e6b | 3836 | } |
86646da4 | 3837 | |
c15a6b00 | 3838 | } |
ce8e830a | 3839 | while(0); |
b5237e6b | 3840 | |
a85ec381 SK |
3841 | while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS)) |
3842 | < selfStreamParms->minUndequedBuffer) { | |
86646da4 SK |
3843 | res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf); |
3844 | if (res != NO_ERROR || buf == NULL) { | |
a85ec381 | 3845 | ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index, res, selfStreamParms->numSvcBufsInHal); |
86646da4 SK |
3846 | break; |
3847 | } | |
3848 | selfStreamParms->numSvcBufsInHal++; | |
5506cebf | 3849 | ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, |
86646da4 SK |
3850 | selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal, |
3851 | ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
3852 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
3853 | ||
3854 | bool found = false; | |
3855 | int checkingIndex = 0; | |
3856 | for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) { | |
3857 | if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
3858 | found = true; | |
9dd63e1f SK |
3859 | break; |
3860 | } | |
86646da4 | 3861 | } |
86646da4 | 3862 | if (!found) break; |
5506cebf SK |
3863 | selfStreamParms->bufIndex = checkingIndex; |
3864 | if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) { | |
86646da4 | 3865 | uint32_t plane_index = 0; |
5506cebf | 3866 | ExynosBuffer* currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]); |
86646da4 SK |
3867 | struct v4l2_buffer v4l2_buf; |
3868 | struct v4l2_plane planes[VIDEO_MAX_PLANES]; | |
13d8c7b4 | 3869 | |
86646da4 SK |
3870 | v4l2_buf.m.planes = planes; |
3871 | v4l2_buf.type = currentNode->type; | |
3872 | v4l2_buf.memory = currentNode->memory; | |
5506cebf | 3873 | v4l2_buf.index = selfStreamParms->bufIndex; |
86646da4 SK |
3874 | v4l2_buf.length = currentNode->planes; |
3875 | ||
5506cebf SK |
3876 | v4l2_buf.m.planes[0].m.fd = priv_handle->fd; |
3877 | v4l2_buf.m.planes[2].m.fd = priv_handle->fd1; | |
3878 | v4l2_buf.m.planes[1].m.fd = priv_handle->fd2; | |
3879 | for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) { | |
3880 | v4l2_buf.m.planes[plane_index].length = currentBuf->size.extS[plane_index]; | |
3881 | } | |
3882 | #ifdef ENABLE_FRAME_SYNC | |
3883 | /* add plane for metadata*/ | |
3884 | v4l2_buf.length += selfStreamParms->metaPlanes; | |
3885 | v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0]; | |
3886 | v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0]; | |
3887 | #endif | |
3888 | if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) { | |
3889 | ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail", | |
3890 | __FUNCTION__, selfThread->m_index); | |
3891 | return; | |
3892 | } | |
3893 | selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER; | |
3894 | ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)", | |
3895 | __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex); | |
3896 | } | |
3897 | } | |
3898 | ||
3899 | ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index); | |
3900 | } | |
3901 | return; | |
3902 | } | |
3903 | ||
3904 | void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self) | |
3905 | { | |
3906 | uint32_t currentSignal = self->GetProcessingSignal(); | |
3907 | StreamThread * selfThread = ((StreamThread*)self); | |
3908 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
3909 | node_info_t *currentNode = selfStreamParms->node; | |
3910 | ||
3911 | ||
3912 | if (currentSignal & SIGNAL_THREAD_RELEASE) { | |
3913 | CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
3914 | ||
3915 | if (selfThread->m_isBufferInit) { | |
3916 | if (currentNode->fd == m_camera_info.capture.fd) { | |
3917 | if (m_camera_info.capture.status == true) { | |
3918 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__, | |
3919 | selfThread->m_index, currentNode->fd); | |
3920 | if (cam_int_streamoff(currentNode) < 0 ){ | |
3921 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
3922 | } else { | |
3923 | m_camera_info.capture.status = false; | |
3924 | } | |
3925 | } | |
3926 | } else { | |
3927 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__, | |
3928 | selfThread->m_index, currentNode->fd); | |
3929 | if (cam_int_streamoff(currentNode) < 0 ){ | |
3930 | ALOGE("ERR(%s): stream off fail", __FUNCTION__); | |
3931 | } | |
3932 | } | |
3933 | ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index); | |
3934 | ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__, | |
3935 | selfThread->m_index, currentNode->fd); | |
3936 | currentNode->buffers = 0; | |
3937 | cam_int_reqbufs(currentNode); | |
3938 | ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__, | |
3939 | selfThread->m_index, currentNode->fd); | |
3940 | } | |
3941 | ||
3942 | selfThread->m_isBufferInit = false; | |
3943 | selfThread->m_releasing = false; | |
3944 | selfThread->m_activated = false; | |
3945 | ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index); | |
3946 | return; | |
3947 | } | |
3948 | ||
3949 | if (currentSignal & SIGNAL_STREAM_DATA_COMING) { | |
3950 | #ifdef ENABLE_FRAME_SYNC | |
3951 | camera2_stream *frame; | |
2f4d175d | 3952 | uint8_t currentOutputStreams; |
5506cebf SK |
3953 | #endif |
3954 | nsecs_t frameTimeStamp; | |
3955 | ||
3956 | ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING", | |
3957 | __FUNCTION__,selfThread->m_index); | |
3958 | ||
3959 | m_streamBufferInit(self); | |
3960 | ||
ed4ad5fe | 3961 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index); |
5506cebf | 3962 | selfStreamParms->bufIndex = cam_int_dqbuf(currentNode); |
ed4ad5fe | 3963 | ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__, |
5506cebf SK |
3964 | selfThread->m_index, selfStreamParms->bufIndex); |
3965 | ||
3966 | #ifdef ENABLE_FRAME_SYNC | |
3967 | frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]); | |
5506cebf | 3968 | frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount); |
2f4d175d SK |
3969 | currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount); |
3970 | ALOGV("frame count(SCC) : %d outputStream(%x)", frame->rcount, currentOutputStreams); | |
5506cebf SK |
3971 | #else |
3972 | frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex()); | |
3973 | #endif | |
3974 | ||
3975 | for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) { | |
3976 | if (selfThread->m_attachedSubStreams[i].streamId == -1) | |
3977 | continue; | |
feb7df4c | 3978 | #ifdef ENABLE_FRAME_SYNC |
2f4d175d | 3979 | if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { |
5506cebf | 3980 | m_requestManager->NotifyStreamOutput(frame->rcount); |
5506cebf SK |
3981 | m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]), |
3982 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
be494d19 | 3983 | } |
2f4d175d SK |
3984 | #else |
3985 | if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) { | |
3986 | m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]), | |
3987 | selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp); | |
3988 | } | |
3989 | #endif | |
be494d19 | 3990 | } |
5506cebf SK |
3991 | cam_int_qbuf(currentNode, selfStreamParms->bufIndex); |
3992 | ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index); | |
86646da4 | 3993 | |
5506cebf SK |
3994 | |
3995 | ||
3996 | ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE", | |
3997 | __FUNCTION__, selfThread->m_index); | |
86646da4 | 3998 | } |
5506cebf SK |
3999 | |
4000 | ||
86646da4 SK |
4001 | return; |
4002 | } | |
4003 | ||
5506cebf | 4004 | void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self) |
86646da4 SK |
4005 | { |
4006 | uint32_t currentSignal = self->GetProcessingSignal(); | |
4007 | StreamThread * selfThread = ((StreamThread*)self); | |
4008 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
5506cebf SK |
4009 | node_info_t *currentNode = selfStreamParms->node; |
4010 | ||
4011 | ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal); | |
86646da4 | 4012 | |
5506cebf SK |
4013 | // Do something in Child thread handler |
4014 | // Should change function to class that inherited StreamThread class to support dynamic stream allocation | |
4015 | if (selfThread->streamType == STREAM_TYPE_DIRECT) { | |
4016 | m_streamFunc_direct(self); | |
4017 | } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) { | |
4018 | m_streamFunc_indirect(self); | |
4019 | } | |
86646da4 | 4020 | |
5506cebf SK |
4021 | return; |
4022 | } | |
4023 | int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) | |
4024 | { | |
4025 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4026 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_JPEG]; | |
4027 | status_t res; | |
4028 | ExynosRect jpegRect; | |
4029 | bool found = false; | |
de48e362 | 4030 | int srcW, srcH, srcCropX, srcCropY; |
5506cebf SK |
4031 | int pictureW, pictureH, pictureFramesize = 0; |
4032 | int pictureFormat; | |
4033 | int cropX, cropY, cropW, cropH = 0; | |
4034 | ExynosBuffer resizeBufInfo; | |
4035 | ExynosRect m_jpegPictureRect; | |
4036 | buffer_handle_t * buf = NULL; | |
86646da4 | 4037 | |
5506cebf SK |
4038 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4039 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4040 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4041 | found = true; | |
4042 | break; | |
86646da4 | 4043 | } |
5506cebf SK |
4044 | subParms->svcBufIndex++; |
4045 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4046 | subParms->svcBufIndex = 0; | |
4047 | } | |
4048 | if (!found) { | |
4049 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4050 | subParms->svcBufIndex++; | |
4051 | return 1; | |
86646da4 SK |
4052 | } |
4053 | ||
de48e362 SK |
4054 | m_getRatioSize(selfStreamParms->width, selfStreamParms->height, |
4055 | m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height, | |
4056 | &srcCropX, &srcCropY, | |
4057 | &srcW, &srcH, | |
4058 | 0); | |
4059 | ||
5506cebf SK |
4060 | m_jpegPictureRect.w = subParms->width; |
4061 | m_jpegPictureRect.h = subParms->height; | |
7d0efb59 | 4062 | |
5506cebf SK |
4063 | ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d", |
4064 | __FUNCTION__, selfStreamParms->width, selfStreamParms->height, | |
4065 | m_jpegPictureRect.w, m_jpegPictureRect.h); | |
7d0efb59 | 4066 | |
de48e362 | 4067 | m_getRatioSize(srcW, srcH, |
5506cebf SK |
4068 | m_jpegPictureRect.w, m_jpegPictureRect.h, |
4069 | &cropX, &cropY, | |
4070 | &pictureW, &pictureH, | |
4071 | 0); | |
4072 | pictureFormat = V4L2_PIX_FMT_YUYV; | |
4073 | pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH); | |
4074 | ||
4075 | if (m_exynosPictureCSC) { | |
4076 | float zoom_w = 0, zoom_h = 0; | |
4077 | if (m_zoomRatio == 0) | |
4078 | m_zoomRatio = 1; | |
4079 | ||
4080 | if (m_jpegPictureRect.w >= m_jpegPictureRect.h) { | |
4081 | zoom_w = pictureW / m_zoomRatio; | |
4082 | zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w; | |
4083 | } else { | |
4084 | zoom_h = pictureH / m_zoomRatio; | |
4085 | zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h; | |
7d0efb59 | 4086 | } |
de48e362 SK |
4087 | cropX = (srcW - zoom_w) / 2; |
4088 | cropY = (srcH - zoom_h) / 2; | |
5506cebf SK |
4089 | cropW = zoom_w; |
4090 | cropH = zoom_h; | |
4091 | ||
4092 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", | |
4093 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
4094 | ||
4095 | csc_set_src_format(m_exynosPictureCSC, | |
de48e362 | 4096 | ALIGN(srcW, 16), ALIGN(srcH, 16), |
5506cebf SK |
4097 | cropX, cropY, cropW, cropH, |
4098 | V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), | |
4099 | 0); | |
7d0efb59 | 4100 | |
5506cebf SK |
4101 | csc_set_dst_format(m_exynosPictureCSC, |
4102 | m_jpegPictureRect.w, m_jpegPictureRect.h, | |
4103 | 0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h, | |
4104 | V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16), | |
4105 | 0); | |
4106 | for (int i = 0 ; i < 3 ; i++) | |
4107 | ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ", | |
4108 | __FUNCTION__, i, srcImageBuf->fd.extFd[i]); | |
4109 | csc_set_src_buffer(m_exynosPictureCSC, | |
4110 | (void **)&srcImageBuf->fd.fd); | |
7d0efb59 | 4111 | |
5506cebf SK |
4112 | csc_set_dst_buffer(m_exynosPictureCSC, |
4113 | (void **)&m_resizeBuf.fd.fd); | |
4114 | for (int i = 0 ; i < 3 ; i++) | |
4115 | ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d", | |
4116 | __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]); | |
7d0efb59 | 4117 | |
5506cebf SK |
4118 | if (csc_convert(m_exynosPictureCSC) != 0) |
4119 | ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__); | |
4120 | ||
4121 | } | |
4122 | else { | |
4123 | ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__); | |
7d0efb59 C |
4124 | } |
4125 | ||
5506cebf | 4126 | resizeBufInfo = m_resizeBuf; |
86646da4 | 4127 | |
5506cebf | 4128 | m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf); |
86646da4 | 4129 | |
5506cebf SK |
4130 | for (int i = 1; i < 3; i++) { |
4131 | if (m_resizeBuf.size.extS[i] != 0) | |
4132 | m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1]; | |
86646da4 | 4133 | |
5506cebf SK |
4134 | ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]); |
4135 | } | |
2adfa429 | 4136 | |
5506cebf SK |
4137 | jpegRect.w = m_jpegPictureRect.w; |
4138 | jpegRect.h = m_jpegPictureRect.h; | |
4139 | jpegRect.colorFormat = V4L2_PIX_FMT_NV16; | |
86646da4 | 4140 | |
5506cebf SK |
4141 | for (int j = 0 ; j < 3 ; j++) |
4142 | ALOGV("DEBUG(%s): dest buf node fd.extFd[%d]=%d size=%d virt=%x ", | |
4143 | __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j], | |
4144 | (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j], | |
4145 | (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]); | |
86646da4 | 4146 | |
5506cebf SK |
4147 | if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) |
4148 | ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__); | |
2adfa429 | 4149 | |
5506cebf | 4150 | m_resizeBuf = resizeBufInfo; |
2adfa429 | 4151 | |
5506cebf | 4152 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); |
be494d19 | 4153 | |
5506cebf SK |
4154 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", |
4155 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4156 | if (res == 0) { | |
4157 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4158 | subParms->numSvcBufsInHal--; | |
4159 | } | |
4160 | else { | |
4161 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4162 | } | |
86646da4 | 4163 | |
5506cebf SK |
4164 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4165 | { | |
4166 | bool found = false; | |
4167 | int checkingIndex = 0; | |
86646da4 | 4168 | |
5506cebf SK |
4169 | ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
4170 | ||
4171 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
4172 | if (res != NO_ERROR || buf == NULL) { | |
4173 | ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4174 | break; | |
4175 | } | |
4176 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4177 | subParms->numSvcBufsInHal ++; | |
4178 | ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4179 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
4180 | ||
4181 | ||
4182 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { | |
4183 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4184 | found = true; | |
4185 | break; | |
86646da4 | 4186 | } |
5506cebf SK |
4187 | } |
4188 | ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found); | |
be494d19 | 4189 | |
5506cebf SK |
4190 | if (!found) { |
4191 | break; | |
4192 | } | |
4193 | ||
4194 | subParms->svcBufIndex = checkingIndex; | |
4195 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4196 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4197 | } | |
4198 | else { | |
4199 | ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4200 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4201 | } | |
4202 | } | |
4203 | return 0; | |
4204 | } | |
86646da4 | 4205 | |
5506cebf SK |
4206 | int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) |
4207 | { | |
4208 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); | |
4209 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_RECORD]; | |
4210 | status_t res; | |
4211 | ExynosRect jpegRect; | |
4212 | bool found = false; | |
4213 | int cropX, cropY, cropW, cropH = 0; | |
4214 | buffer_handle_t * buf = NULL; | |
86646da4 | 4215 | |
5506cebf SK |
4216 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4217 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4218 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4219 | found = true; | |
4220 | break; | |
4221 | } | |
4222 | subParms->svcBufIndex++; | |
4223 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4224 | subParms->svcBufIndex = 0; | |
4225 | } | |
4226 | if (!found) { | |
4227 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4228 | subParms->svcBufIndex++; | |
4229 | return 1; | |
4230 | } | |
86646da4 | 4231 | |
5506cebf SK |
4232 | if (m_exynosVideoCSC) { |
4233 | int videoW = subParms->width, videoH = subParms->height; | |
4234 | int cropX, cropY, cropW, cropH = 0; | |
4235 | int previewW = selfStreamParms->width, previewH = selfStreamParms->height; | |
4236 | m_getRatioSize(previewW, previewH, | |
4237 | videoW, videoH, | |
4238 | &cropX, &cropY, | |
4239 | &cropW, &cropH, | |
4240 | 0); | |
86646da4 | 4241 | |
5506cebf SK |
4242 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", |
4243 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
86646da4 | 4244 | |
5506cebf SK |
4245 | csc_set_src_format(m_exynosVideoCSC, |
4246 | previewW, previewH, | |
4247 | cropX, cropY, cropW, cropH, | |
4248 | selfStreamParms->format, | |
4249 | 0); | |
86646da4 | 4250 | |
5506cebf SK |
4251 | csc_set_dst_format(m_exynosVideoCSC, |
4252 | videoW, videoH, | |
4253 | 0, 0, videoW, videoH, | |
4254 | subParms->format, | |
4255 | 1); | |
86646da4 | 4256 | |
5506cebf SK |
4257 | csc_set_src_buffer(m_exynosVideoCSC, |
4258 | (void **)&srcImageBuf->fd.fd); | |
86646da4 | 4259 | |
5506cebf SK |
4260 | csc_set_dst_buffer(m_exynosVideoCSC, |
4261 | (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd))); | |
4262 | ||
4263 | if (csc_convert(m_exynosVideoCSC) != 0) { | |
4264 | ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__); | |
4265 | } | |
4266 | else { | |
4267 | ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__); | |
86646da4 | 4268 | } |
5506cebf SK |
4269 | } |
4270 | else { | |
4271 | ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__); | |
4272 | } | |
86646da4 | 4273 | |
5506cebf | 4274 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); |
86646da4 | 4275 | |
5506cebf SK |
4276 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", |
4277 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4278 | if (res == 0) { | |
4279 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4280 | subParms->numSvcBufsInHal--; | |
4281 | } | |
4282 | else { | |
4283 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4284 | } | |
86646da4 | 4285 | |
5506cebf SK |
4286 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4287 | { | |
4288 | bool found = false; | |
4289 | int checkingIndex = 0; | |
86646da4 | 4290 | |
5506cebf SK |
4291 | ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
4292 | ||
4293 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); | |
4294 | if (res != NO_ERROR || buf == NULL) { | |
4295 | ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4296 | break; | |
4297 | } | |
4298 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4299 | subParms->numSvcBufsInHal ++; | |
4300 | ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4301 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
4302 | ||
4303 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { | |
4304 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4305 | found = true; | |
4306 | break; | |
86646da4 | 4307 | } |
13d8c7b4 | 4308 | } |
5506cebf | 4309 | ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex); |
86646da4 | 4310 | |
5506cebf SK |
4311 | if (!found) { |
4312 | break; | |
4313 | } | |
86646da4 | 4314 | |
5506cebf SK |
4315 | subParms->svcBufIndex = checkingIndex; |
4316 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4317 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4318 | } | |
4319 | else { | |
4320 | ALOGV("DEBUG(%s): record bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4321 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4322 | } | |
4323 | } | |
4324 | return 0; | |
86646da4 SK |
4325 | } |
4326 | ||
5506cebf | 4327 | int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp) |
86646da4 | 4328 | { |
5506cebf SK |
4329 | stream_parameters_t *selfStreamParms = &(selfThread->m_parameters); |
4330 | substream_parameters_t *subParms = &m_subStreams[STREAM_ID_PRVCB]; | |
4331 | status_t res; | |
4332 | bool found = false; | |
4333 | int cropX, cropY, cropW, cropH = 0; | |
4334 | buffer_handle_t * buf = NULL; | |
86646da4 | 4335 | |
5506cebf SK |
4336 | ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex); |
4337 | for (int i = 0 ; subParms->numSvcBuffers ; i++) { | |
4338 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) { | |
4339 | found = true; | |
4340 | break; | |
4341 | } | |
4342 | subParms->svcBufIndex++; | |
4343 | if (subParms->svcBufIndex >= subParms->numSvcBuffers) | |
4344 | subParms->svcBufIndex = 0; | |
4345 | } | |
4346 | if (!found) { | |
4347 | ALOGE("(%s): cannot find free svc buffer", __FUNCTION__); | |
4348 | subParms->svcBufIndex++; | |
4349 | return 1; | |
4350 | } | |
86646da4 | 4351 | |
5506cebf SK |
4352 | if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) { |
4353 | if (m_exynosVideoCSC) { | |
4354 | int previewCbW = subParms->width, previewCbH = subParms->height; | |
4355 | int cropX, cropY, cropW, cropH = 0; | |
4356 | int previewW = selfStreamParms->width, previewH = selfStreamParms->height; | |
4357 | m_getRatioSize(previewW, previewH, | |
4358 | previewCbW, previewCbH, | |
4359 | &cropX, &cropY, | |
4360 | &cropW, &cropH, | |
4361 | 0); | |
86646da4 | 4362 | |
5506cebf SK |
4363 | ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d", |
4364 | __FUNCTION__, cropX, cropY, cropW, cropH); | |
4365 | csc_set_src_format(m_exynosVideoCSC, | |
4366 | previewW, previewH, | |
4367 | cropX, cropY, cropW, cropH, | |
4368 | selfStreamParms->format, | |
4369 | 0); | |
86646da4 | 4370 | |
5506cebf SK |
4371 | csc_set_dst_format(m_exynosVideoCSC, |
4372 | previewCbW, previewCbH, | |
4373 | 0, 0, previewCbW, previewCbH, | |
4374 | subParms->internalFormat, | |
4375 | 1); | |
86646da4 | 4376 | |
5506cebf SK |
4377 | csc_set_src_buffer(m_exynosVideoCSC, |
4378 | (void **)&srcImageBuf->fd.fd); | |
4379 | ||
4380 | csc_set_dst_buffer(m_exynosVideoCSC, | |
4381 | (void **)(&(m_previewCbBuf.fd.fd))); | |
4382 | ||
4383 | if (csc_convert(m_exynosVideoCSC) != 0) { | |
4384 | ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__); | |
4385 | } | |
4386 | else { | |
4387 | ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__); | |
4388 | } | |
4389 | if (previewCbW == ALIGN(previewCbW, 16)) { | |
4390 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0], | |
4391 | m_previewCbBuf.virt.extP[0], previewCbW * previewCbH); | |
4392 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH, | |
4393 | m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 ); | |
4394 | } | |
4395 | else { | |
4396 | // TODO : copy line by line ? | |
4397 | } | |
4398 | } | |
4399 | else { | |
4400 | ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__); | |
4401 | } | |
86646da4 | 4402 | } |
5506cebf SK |
4403 | else if (subParms->format == HAL_PIXEL_FORMAT_YV12) { |
4404 | int previewCbW = subParms->width, previewCbH = subParms->height; | |
4405 | int stride = ALIGN(previewCbW, 16); | |
0d220b42 | 4406 | int c_stride = ALIGN(stride / 2, 16); |
5506cebf SK |
4407 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0], |
4408 | srcImageBuf->virt.extP[0], stride * previewCbH); | |
4409 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH, | |
4410 | srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 ); | |
4411 | memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2), | |
4412 | srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 ); | |
4413 | } | |
4414 | res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex])); | |
4415 | ||
4416 | ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)", | |
4417 | __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res); | |
4418 | if (res == 0) { | |
4419 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE; | |
4420 | subParms->numSvcBufsInHal--; | |
4421 | } | |
4422 | else { | |
4423 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
86646da4 SK |
4424 | } |
4425 | ||
5506cebf SK |
4426 | while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer) |
4427 | { | |
4428 | bool found = false; | |
4429 | int checkingIndex = 0; | |
86646da4 | 4430 | |
5506cebf | 4431 | ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal); |
86646da4 | 4432 | |
5506cebf SK |
4433 | res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf); |
4434 | if (res != NO_ERROR || buf == NULL) { | |
4435 | ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res); | |
4436 | break; | |
4437 | } | |
4438 | const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf); | |
4439 | subParms->numSvcBufsInHal ++; | |
4440 | ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf), | |
4441 | subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts); | |
86646da4 | 4442 | |
86646da4 | 4443 | |
5506cebf SK |
4444 | for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) { |
4445 | if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) { | |
4446 | found = true; | |
4447 | break; | |
4448 | } | |
4449 | } | |
4450 | ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex); | |
4451 | ||
4452 | if (!found) { | |
4453 | break; | |
4454 | } | |
4455 | ||
4456 | subParms->svcBufIndex = checkingIndex; | |
4457 | if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) { | |
4458 | subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL; | |
4459 | } | |
4460 | else { | |
4461 | ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d] status = %d", __FUNCTION__, | |
4462 | subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]); | |
4463 | } | |
4464 | } | |
4465 | return 0; | |
c15a6b00 JS |
4466 | } |
4467 | ||
2d5e6ec2 SK |
4468 | bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h) |
4469 | { | |
4470 | int sizeOfSupportList; | |
4471 | ||
4472 | //REAR Camera | |
4473 | if(this->getCameraId() == 0) { | |
4474 | sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2); | |
4475 | ||
4476 | for(int i = 0; i < sizeOfSupportList; i++) { | |
4477 | if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h)) | |
4478 | return true; | |
4479 | } | |
4480 | ||
4481 | } | |
4482 | else { | |
4483 | sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2); | |
4484 | ||
4485 | for(int i = 0; i < sizeOfSupportList; i++) { | |
4486 | if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h)) | |
4487 | return true; | |
4488 | } | |
4489 | } | |
4490 | ||
4491 | return false; | |
4492 | } | |
13d8c7b4 SK |
4493 | bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf, |
4494 | ExynosBuffer *jpegBuf, | |
4495 | ExynosRect *rect) | |
4496 | { | |
4497 | unsigned char *addr; | |
4498 | ||
4499 | ExynosJpegEncoderForCamera jpegEnc; | |
4500 | bool ret = false; | |
4501 | int res = 0; | |
4502 | ||
4503 | unsigned int *yuvSize = yuvBuf->size.extS; | |
4504 | ||
4505 | if (jpegEnc.create()) { | |
9dd63e1f | 4506 | ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__); |
13d8c7b4 SK |
4507 | goto jpeg_encode_done; |
4508 | } | |
4509 | ||
4510 | if (jpegEnc.setQuality(100)) { | |
9dd63e1f | 4511 | ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__); |
13d8c7b4 SK |
4512 | goto jpeg_encode_done; |
4513 | } | |
4514 | ||
4515 | if (jpegEnc.setSize(rect->w, rect->h)) { | |
9dd63e1f | 4516 | ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__); |
13d8c7b4 SK |
4517 | goto jpeg_encode_done; |
4518 | } | |
4519 | ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h); | |
4520 | ||
4521 | if (jpegEnc.setColorFormat(rect->colorFormat)) { | |
9dd63e1f | 4522 | ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__); |
13d8c7b4 SK |
4523 | goto jpeg_encode_done; |
4524 | } | |
13d8c7b4 SK |
4525 | |
4526 | if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) { | |
9dd63e1f | 4527 | ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__); |
13d8c7b4 SK |
4528 | goto jpeg_encode_done; |
4529 | } | |
13d8c7b4 | 4530 | |
2d5e6ec2 SK |
4531 | if((m_jpegMetadata.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.ctl.jpeg.thumbnailSize[1] != 0)) { |
4532 | mExifInfo.enableThumb = true; | |
4533 | if(!m_checkThumbnailSize(m_jpegMetadata.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.ctl.jpeg.thumbnailSize[1])) { | |
4017b08c SK |
4534 | // in the case of unsupported parameter, disable thumbnail |
4535 | mExifInfo.enableThumb = false; | |
2d5e6ec2 SK |
4536 | } else { |
4537 | m_thumbNailW = m_jpegMetadata.ctl.jpeg.thumbnailSize[0]; | |
4538 | m_thumbNailH = m_jpegMetadata.ctl.jpeg.thumbnailSize[1]; | |
4539 | } | |
4540 | ||
4541 | ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH); | |
4542 | ||
4543 | } else { | |
4544 | mExifInfo.enableThumb = false; | |
4545 | } | |
eed7ed1b | 4546 | |
2d5e6ec2 SK |
4547 | if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) { |
4548 | ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH); | |
eed7ed1b SK |
4549 | goto jpeg_encode_done; |
4550 | } | |
4551 | ||
2d5e6ec2 | 4552 | ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW); |
eed7ed1b SK |
4553 | if (jpegEnc.setThumbnailQuality(50)) { |
4554 | ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__); | |
4555 | goto jpeg_encode_done; | |
4556 | } | |
13d8c7b4 | 4557 | |
eed7ed1b | 4558 | m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata); |
9dd63e1f | 4559 | ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize); |
eed7ed1b | 4560 | if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) { |
9dd63e1f | 4561 | ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__); |
13d8c7b4 SK |
4562 | goto jpeg_encode_done; |
4563 | } | |
eed7ed1b | 4564 | if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) { |
9dd63e1f | 4565 | ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__); |
13d8c7b4 SK |
4566 | goto jpeg_encode_done; |
4567 | } | |
13d8c7b4 SK |
4568 | |
4569 | if (jpegEnc.updateConfig()) { | |
9dd63e1f | 4570 | ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__); |
13d8c7b4 SK |
4571 | goto jpeg_encode_done; |
4572 | } | |
4573 | ||
eed7ed1b | 4574 | if (res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo)) { |
9dd63e1f | 4575 | ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res); |
13d8c7b4 SK |
4576 | goto jpeg_encode_done; |
4577 | } | |
4578 | ||
4579 | ret = true; | |
4580 | ||
4581 | jpeg_encode_done: | |
4582 | ||
4583 | if (jpegEnc.flagCreate() == true) | |
4584 | jpegEnc.destroy(); | |
4585 | ||
4586 | return ret; | |
4587 | } | |
4588 | ||
e117f756 YJ |
4589 | void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id) |
4590 | { | |
4591 | m_ctlInfo.flash.m_precaptureTriggerId = id; | |
73f5ad60 | 4592 | m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE; |
e117f756 YJ |
4593 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) { |
4594 | // flash is required | |
4595 | switch (m_ctlInfo.flash.m_flashCnt) { | |
4596 | case IS_FLASH_STATE_AUTO_DONE: | |
d91c0269 | 4597 | case IS_FLASH_STATE_AUTO_OFF: |
e117f756 YJ |
4598 | // Flash capture sequence, AF flash was executed before |
4599 | break; | |
4600 | default: | |
4601 | // Full flash sequence | |
4602 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
4603 | m_ctlInfo.flash.m_flashEnableFlg = true; | |
4a9565ae | 4604 | m_ctlInfo.flash.m_flashTimeOut = 3; |
e117f756 YJ |
4605 | } |
4606 | } else { | |
73f5ad60 YJ |
4607 | // Skip pre-capture in case of non-flash. |
4608 | ALOGV("[PreCap] Flash OFF mode "); | |
e117f756 YJ |
4609 | m_ctlInfo.flash.m_flashEnableFlg = false; |
4610 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE; | |
e117f756 | 4611 | } |
73f5ad60 YJ |
4612 | ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt); |
4613 | OnPrecaptureMeteringNotificationSensor(); | |
e117f756 | 4614 | } |
8e2c2fdb SK |
4615 | void ExynosCameraHWInterface2::OnAfTriggerStart(int id) |
4616 | { | |
4617 | m_afPendingTriggerId = id; | |
7d0efb59 | 4618 | m_afModeWaitingCnt = 6; |
8e2c2fdb | 4619 | } |
13d8c7b4 | 4620 | |
0f26b20f SK |
4621 | void ExynosCameraHWInterface2::OnAfTrigger(int id) |
4622 | { | |
8e2c2fdb | 4623 | m_afTriggerId = id; |
6f19b6cf | 4624 | |
0f26b20f SK |
4625 | switch (m_afMode) { |
4626 | case AA_AFMODE_AUTO: | |
4627 | case AA_AFMODE_MACRO: | |
8e2c2fdb | 4628 | case AA_AFMODE_OFF: |
73f5ad60 | 4629 | ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode); |
caea49e6 YJ |
4630 | // If flash is enable, Flash operation is executed before triggering AF |
4631 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) | |
e117f756 | 4632 | && (m_ctlInfo.flash.m_flashEnableFlg == false) |
caea49e6 | 4633 | && (m_cameraId == 0)) { |
73f5ad60 | 4634 | ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode); |
e117f756 YJ |
4635 | m_ctlInfo.flash.m_flashEnableFlg = true; |
4636 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
caea49e6 | 4637 | m_ctlInfo.flash.m_flashDecisionResult = false; |
e117f756 | 4638 | m_ctlInfo.flash.m_afFlashDoneFlg = true; |
caea49e6 | 4639 | } |
0f26b20f SK |
4640 | OnAfTriggerAutoMacro(id); |
4641 | break; | |
4642 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
73f5ad60 | 4643 | ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode); |
0f26b20f SK |
4644 | OnAfTriggerCAFVideo(id); |
4645 | break; | |
4646 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
73f5ad60 | 4647 | ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode); |
0f26b20f SK |
4648 | OnAfTriggerCAFPicture(id); |
4649 | break; | |
8e2c2fdb | 4650 | |
0f26b20f SK |
4651 | default: |
4652 | break; | |
4653 | } | |
4654 | } | |
4655 | ||
4656 | void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int id) | |
4657 | { | |
4658 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4659 | |
4660 | switch (m_afState) { | |
4661 | case HAL_AFSTATE_INACTIVE: | |
4662 | nextState = HAL_AFSTATE_NEEDS_COMMAND; | |
4663 | m_IsAfTriggerRequired = true; | |
4a9565ae | 4664 | m_ctlInfo.af.m_afTriggerTimeOut = 4; |
0f26b20f SK |
4665 | break; |
4666 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4667 | nextState = NO_TRANSITION; | |
4668 | break; | |
4669 | case HAL_AFSTATE_STARTED: | |
4670 | nextState = NO_TRANSITION; | |
4671 | break; | |
4672 | case HAL_AFSTATE_SCANNING: | |
4673 | nextState = NO_TRANSITION; | |
4674 | break; | |
4675 | case HAL_AFSTATE_LOCKED: | |
4676 | nextState = HAL_AFSTATE_NEEDS_COMMAND; | |
4677 | m_IsAfTriggerRequired = true; | |
4678 | break; | |
4679 | case HAL_AFSTATE_FAILED: | |
4680 | nextState = HAL_AFSTATE_NEEDS_COMMAND; | |
4681 | m_IsAfTriggerRequired = true; | |
4a9565ae | 4682 | m_ctlInfo.af.m_afTriggerTimeOut = 4; |
0f26b20f SK |
4683 | break; |
4684 | default: | |
4685 | break; | |
4686 | } | |
4687 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4688 | if (nextState != NO_TRANSITION) | |
4689 | m_afState = nextState; | |
4690 | } | |
4691 | ||
4692 | void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id) | |
4693 | { | |
4694 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4695 | |
4696 | switch (m_afState) { | |
4697 | case HAL_AFSTATE_INACTIVE: | |
4698 | nextState = HAL_AFSTATE_FAILED; | |
4699 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4700 | break; | |
4701 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4702 | // not used | |
4703 | break; | |
4704 | case HAL_AFSTATE_STARTED: | |
4705 | nextState = HAL_AFSTATE_NEEDS_DETERMINATION; | |
36c106c9 | 4706 | m_AfHwStateFailed = false; |
0f26b20f SK |
4707 | break; |
4708 | case HAL_AFSTATE_SCANNING: | |
4709 | nextState = HAL_AFSTATE_NEEDS_DETERMINATION; | |
36c106c9 | 4710 | m_AfHwStateFailed = false; |
caea49e6 YJ |
4711 | // If flash is enable, Flash operation is executed before triggering AF |
4712 | if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) | |
e117f756 | 4713 | && (m_ctlInfo.flash.m_flashEnableFlg == false) |
caea49e6 | 4714 | && (m_cameraId == 0)) { |
73f5ad60 | 4715 | ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id); |
e117f756 YJ |
4716 | m_ctlInfo.flash.m_flashEnableFlg = true; |
4717 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON; | |
caea49e6 | 4718 | m_ctlInfo.flash.m_flashDecisionResult = false; |
e117f756 | 4719 | m_ctlInfo.flash.m_afFlashDoneFlg = true; |
caea49e6 | 4720 | } |
0f26b20f SK |
4721 | break; |
4722 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
4723 | nextState = NO_TRANSITION; | |
4724 | break; | |
4725 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
4726 | m_IsAfLockRequired = true; | |
36c106c9 | 4727 | if (m_AfHwStateFailed) { |
caea49e6 | 4728 | ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__); |
36c106c9 SK |
4729 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); |
4730 | nextState = HAL_AFSTATE_FAILED; | |
4731 | } | |
4732 | else { | |
caea49e6 | 4733 | ALOGV("(%s): [CAF] LAST : success", __FUNCTION__); |
36c106c9 SK |
4734 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); |
4735 | nextState = HAL_AFSTATE_LOCKED; | |
4736 | } | |
4737 | m_AfHwStateFailed = false; | |
0f26b20f SK |
4738 | break; |
4739 | case HAL_AFSTATE_LOCKED: | |
4740 | nextState = NO_TRANSITION; | |
4741 | break; | |
4742 | case HAL_AFSTATE_FAILED: | |
4743 | nextState = NO_TRANSITION; | |
4744 | break; | |
4745 | default: | |
4746 | break; | |
4747 | } | |
4748 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4749 | if (nextState != NO_TRANSITION) | |
4750 | m_afState = nextState; | |
4751 | } | |
4752 | ||
4753 | ||
4754 | void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int id) | |
4755 | { | |
4756 | int nextState = NO_TRANSITION; | |
0f26b20f SK |
4757 | |
4758 | switch (m_afState) { | |
4759 | case HAL_AFSTATE_INACTIVE: | |
4760 | nextState = HAL_AFSTATE_FAILED; | |
4761 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4762 | break; | |
4763 | case HAL_AFSTATE_NEEDS_COMMAND: | |
4764 | // not used | |
4765 | break; | |
4766 | case HAL_AFSTATE_STARTED: | |
4767 | m_IsAfLockRequired = true; | |
4768 | nextState = HAL_AFSTATE_FAILED; | |
4769 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4770 | break; | |
4771 | case HAL_AFSTATE_SCANNING: | |
4772 | m_IsAfLockRequired = true; | |
4773 | nextState = HAL_AFSTATE_FAILED; | |
4774 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4775 | break; | |
4776 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
4777 | // not used | |
4778 | break; | |
4779 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
4780 | m_IsAfLockRequired = true; | |
4781 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
4782 | nextState = HAL_AFSTATE_LOCKED; | |
4783 | break; | |
4784 | case HAL_AFSTATE_LOCKED: | |
4785 | nextState = NO_TRANSITION; | |
4786 | break; | |
4787 | case HAL_AFSTATE_FAILED: | |
4788 | nextState = NO_TRANSITION; | |
4789 | break; | |
4790 | default: | |
4791 | break; | |
4792 | } | |
4793 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
4794 | if (nextState != NO_TRANSITION) | |
4795 | m_afState = nextState; | |
4796 | } | |
4797 | ||
73f5ad60 YJ |
4798 | void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor() |
4799 | { | |
4800 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
4801 | // Just noti of pre-capture start | |
4802 | if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) { | |
4803 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
4804 | ANDROID_CONTROL_AE_STATE_PRECAPTURE, | |
4805 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
4806 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); | |
4807 | m_notifyCb(CAMERA2_MSG_AUTOWB, | |
4808 | ANDROID_CONTROL_AWB_STATE_CONVERGED, | |
4809 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
4810 | m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE; | |
4811 | } | |
4812 | } | |
4813 | } | |
4814 | ||
4815 | void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP() | |
e117f756 YJ |
4816 | { |
4817 | if (m_ctlInfo.flash.m_precaptureTriggerId > 0) { | |
4818 | if (m_ctlInfo.flash.m_flashEnableFlg) { | |
4819 | // flash case | |
4820 | switch (m_ctlInfo.flash.m_flashCnt) { | |
4821 | case IS_FLASH_STATE_AUTO_DONE: | |
d91c0269 | 4822 | case IS_FLASH_STATE_AUTO_OFF: |
73f5ad60 YJ |
4823 | if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) { |
4824 | // End notification | |
e117f756 YJ |
4825 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, |
4826 | ANDROID_CONTROL_AE_STATE_LOCKED, | |
4827 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
73f5ad60 | 4828 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_LOCKED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 YJ |
4829 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
4830 | ANDROID_CONTROL_AWB_STATE_LOCKED, | |
4831 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
4832 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
4833 | } else { | |
4834 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
4835 | ANDROID_CONTROL_AE_STATE_PRECAPTURE, | |
4836 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
73f5ad60 | 4837 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_LOCKED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 YJ |
4838 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
4839 | ANDROID_CONTROL_AWB_STATE_CONVERGED, | |
4840 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
73f5ad60 | 4841 | m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE; |
e117f756 | 4842 | } |
73f5ad60 YJ |
4843 | break; |
4844 | case IS_FLASH_STATE_CAPTURE: | |
4845 | case IS_FLASH_STATE_CAPTURE_WAIT: | |
4846 | case IS_FLASH_STATE_CAPTURE_JPEG: | |
4847 | case IS_FLASH_STATE_CAPTURE_END: | |
4848 | ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt); | |
4849 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE; | |
4850 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, | |
4851 | ANDROID_CONTROL_AE_STATE_LOCKED, | |
4852 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
4853 | m_notifyCb(CAMERA2_MSG_AUTOWB, | |
4854 | ANDROID_CONTROL_AWB_STATE_LOCKED, | |
4855 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
4856 | m_ctlInfo.flash.m_precaptureTriggerId = 0; | |
4857 | break; | |
e117f756 YJ |
4858 | } |
4859 | } else { | |
4860 | // non-flash case | |
73f5ad60 | 4861 | if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) { |
d91c0269 YJ |
4862 | m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE, |
4863 | ANDROID_CONTROL_AE_STATE_LOCKED, | |
4864 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
73f5ad60 | 4865 | ALOGV("(%s) ANDROID_CONTROL_AE_STATE_LOCKED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt); |
e117f756 YJ |
4866 | m_notifyCb(CAMERA2_MSG_AUTOWB, |
4867 | ANDROID_CONTROL_AWB_STATE_LOCKED, | |
4868 | m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie); | |
d91c0269 | 4869 | m_ctlInfo.flash.m_precaptureTriggerId = 0; |
e117f756 YJ |
4870 | } |
4871 | } | |
4872 | } | |
4873 | } | |
4874 | ||
0f26b20f SK |
4875 | void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti) |
4876 | { | |
4877 | switch (m_afMode) { | |
4878 | case AA_AFMODE_AUTO: | |
4879 | case AA_AFMODE_MACRO: | |
4880 | OnAfNotificationAutoMacro(noti); | |
4881 | break; | |
4882 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
4883 | OnAfNotificationCAFVideo(noti); | |
4884 | break; | |
4885 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
4886 | OnAfNotificationCAFPicture(noti); | |
4887 | break; | |
4888 | case AA_AFMODE_OFF: | |
4889 | default: | |
4890 | break; | |
4891 | } | |
4892 | } | |
4893 | ||
4894 | void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti) | |
4895 | { | |
4896 | int nextState = NO_TRANSITION; | |
4897 | bool bWrongTransition = false; | |
4898 | ||
4899 | if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) { | |
4900 | switch (noti) { | |
4901 | case AA_AFSTATE_INACTIVE: | |
4902 | case AA_AFSTATE_ACTIVE_SCAN: | |
4903 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
4904 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
4905 | default: | |
4906 | nextState = NO_TRANSITION; | |
4907 | break; | |
4908 | } | |
4909 | } | |
4910 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
4911 | switch (noti) { | |
4912 | case AA_AFSTATE_INACTIVE: | |
4913 | nextState = NO_TRANSITION; | |
4914 | break; | |
4915 | case AA_AFSTATE_ACTIVE_SCAN: | |
4916 | nextState = HAL_AFSTATE_SCANNING; | |
4917 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN); | |
4918 | break; | |
4919 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
4920 | nextState = NO_TRANSITION; | |
4921 | break; | |
4922 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
4923 | nextState = NO_TRANSITION; | |
4924 | break; | |
4925 | default: | |
4926 | bWrongTransition = true; | |
4927 | break; | |
4928 | } | |
4929 | } | |
4930 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
4931 | switch (noti) { | |
4932 | case AA_AFSTATE_INACTIVE: | |
4933 | bWrongTransition = true; | |
4934 | break; | |
4935 | case AA_AFSTATE_ACTIVE_SCAN: | |
4936 | nextState = NO_TRANSITION; | |
4937 | break; | |
4938 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
9a710a45 | 4939 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 YJ |
4940 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
4941 | switch (m_ctlInfo.flash.m_flashCnt) { | |
4942 | case IS_FLASH_STATE_ON_DONE: | |
4943 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
4a9565ae | 4944 | nextState = NO_TRANSITION; |
e117f756 YJ |
4945 | break; |
4946 | case IS_FLASH_STATE_AUTO_DONE: | |
4a9565ae | 4947 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
9a710a45 YJ |
4948 | nextState = HAL_AFSTATE_LOCKED; |
4949 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
e117f756 YJ |
4950 | break; |
4951 | default: | |
9a710a45 YJ |
4952 | nextState = NO_TRANSITION; |
4953 | } | |
4954 | } else { | |
4955 | nextState = HAL_AFSTATE_LOCKED; | |
4956 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
4957 | } | |
0f26b20f SK |
4958 | break; |
4959 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
9a710a45 | 4960 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 YJ |
4961 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
4962 | switch (m_ctlInfo.flash.m_flashCnt) { | |
4963 | case IS_FLASH_STATE_ON_DONE: | |
4964 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; | |
4a9565ae | 4965 | nextState = NO_TRANSITION; |
e117f756 YJ |
4966 | break; |
4967 | case IS_FLASH_STATE_AUTO_DONE: | |
4a9565ae | 4968 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
9a710a45 YJ |
4969 | nextState = HAL_AFSTATE_FAILED; |
4970 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
e117f756 YJ |
4971 | break; |
4972 | default: | |
9a710a45 YJ |
4973 | nextState = NO_TRANSITION; |
4974 | } | |
4975 | } else { | |
4976 | nextState = HAL_AFSTATE_FAILED; | |
4977 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
4978 | } | |
0f26b20f SK |
4979 | break; |
4980 | default: | |
4981 | bWrongTransition = true; | |
4982 | break; | |
4983 | } | |
4984 | } | |
4985 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
4986 | switch (noti) { | |
4987 | case AA_AFSTATE_INACTIVE: | |
4988 | case AA_AFSTATE_ACTIVE_SCAN: | |
4989 | bWrongTransition = true; | |
4990 | break; | |
4991 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
4992 | nextState = NO_TRANSITION; | |
4993 | break; | |
4994 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
4995 | default: | |
4996 | bWrongTransition = true; | |
4997 | break; | |
4998 | } | |
4999 | } | |
5000 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5001 | switch (noti) { | |
5002 | case AA_AFSTATE_INACTIVE: | |
5003 | case AA_AFSTATE_ACTIVE_SCAN: | |
5004 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5005 | bWrongTransition = true; | |
5006 | break; | |
5007 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5008 | nextState = NO_TRANSITION; | |
5009 | break; | |
5010 | default: | |
5011 | bWrongTransition = true; | |
5012 | break; | |
5013 | } | |
5014 | } | |
5015 | if (bWrongTransition) { | |
5016 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5017 | return; | |
5018 | } | |
5019 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5020 | if (nextState != NO_TRANSITION) | |
5021 | m_afState = nextState; | |
5022 | } | |
5023 | ||
5024 | void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti) | |
5025 | { | |
5026 | int nextState = NO_TRANSITION; | |
5027 | bool bWrongTransition = false; | |
5028 | ||
5029 | if (m_afState == HAL_AFSTATE_INACTIVE) { | |
5030 | switch (noti) { | |
5031 | case AA_AFSTATE_INACTIVE: | |
5032 | case AA_AFSTATE_ACTIVE_SCAN: | |
5033 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5034 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5035 | default: | |
5036 | nextState = NO_TRANSITION; | |
5037 | break; | |
5038 | } | |
5039 | } | |
5040 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5041 | switch (noti) { | |
5042 | case AA_AFSTATE_INACTIVE: | |
5043 | nextState = NO_TRANSITION; | |
5044 | break; | |
5045 | case AA_AFSTATE_ACTIVE_SCAN: | |
5046 | nextState = HAL_AFSTATE_SCANNING; | |
5047 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
5048 | break; | |
5049 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5050 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5051 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
5052 | break; | |
5053 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
cd13bb78 SK |
5054 | //nextState = HAL_AFSTATE_FAILED; |
5055 | //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5056 | nextState = NO_TRANSITION; | |
0f26b20f SK |
5057 | break; |
5058 | default: | |
5059 | bWrongTransition = true; | |
5060 | break; | |
5061 | } | |
5062 | } | |
5063 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5064 | switch (noti) { | |
5065 | case AA_AFSTATE_INACTIVE: | |
36c106c9 | 5066 | nextState = NO_TRANSITION; |
0f26b20f SK |
5067 | break; |
5068 | case AA_AFSTATE_ACTIVE_SCAN: | |
5069 | nextState = NO_TRANSITION; | |
36c106c9 | 5070 | m_AfHwStateFailed = false; |
0f26b20f SK |
5071 | break; |
5072 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5073 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
36c106c9 | 5074 | m_AfHwStateFailed = false; |
0f26b20f SK |
5075 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); |
5076 | break; | |
5077 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
36c106c9 SK |
5078 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; |
5079 | m_AfHwStateFailed = true; | |
5080 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
0f26b20f SK |
5081 | break; |
5082 | default: | |
5083 | bWrongTransition = true; | |
5084 | break; | |
5085 | } | |
5086 | } | |
5087 | else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) { | |
5088 | switch (noti) { | |
5089 | case AA_AFSTATE_INACTIVE: | |
36c106c9 | 5090 | nextState = NO_TRANSITION; |
0f26b20f SK |
5091 | break; |
5092 | case AA_AFSTATE_ACTIVE_SCAN: | |
5093 | nextState = HAL_AFSTATE_SCANNING; | |
36c106c9 | 5094 | m_AfHwStateFailed = false; |
0f26b20f SK |
5095 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); |
5096 | break; | |
5097 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5098 | nextState = NO_TRANSITION; | |
36c106c9 | 5099 | m_AfHwStateFailed = false; |
0f26b20f SK |
5100 | break; |
5101 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
36c106c9 SK |
5102 | nextState = NO_TRANSITION; |
5103 | m_AfHwStateFailed = true; | |
0f26b20f SK |
5104 | break; |
5105 | default: | |
5106 | bWrongTransition = true; | |
5107 | break; | |
5108 | } | |
5109 | } | |
5110 | else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) { | |
f7f8d321 YJ |
5111 | //Skip notification in case of flash, wait the end of flash on |
5112 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { | |
5113 | if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE) | |
5114 | return; | |
5115 | } | |
0f26b20f SK |
5116 | switch (noti) { |
5117 | case AA_AFSTATE_INACTIVE: | |
36c106c9 | 5118 | nextState = NO_TRANSITION; |
0f26b20f SK |
5119 | break; |
5120 | case AA_AFSTATE_ACTIVE_SCAN: | |
5121 | nextState = NO_TRANSITION; | |
5122 | break; | |
5123 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
caea49e6 | 5124 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 | 5125 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
e117f756 YJ |
5126 | switch (m_ctlInfo.flash.m_flashCnt) { |
5127 | case IS_FLASH_STATE_ON_DONE: | |
f7f8d321 | 5128 | ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
e117f756 | 5129 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; |
4a9565ae | 5130 | nextState = NO_TRANSITION; |
e117f756 YJ |
5131 | break; |
5132 | case IS_FLASH_STATE_AUTO_DONE: | |
f7f8d321 | 5133 | ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
4a9565ae | 5134 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
caea49e6 YJ |
5135 | m_IsAfLockRequired = true; |
5136 | nextState = HAL_AFSTATE_LOCKED; | |
5137 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
e117f756 YJ |
5138 | break; |
5139 | default: | |
caea49e6 YJ |
5140 | nextState = NO_TRANSITION; |
5141 | } | |
5142 | } else { | |
5143 | m_IsAfLockRequired = true; | |
5144 | nextState = HAL_AFSTATE_LOCKED; | |
5145 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5146 | } | |
0f26b20f SK |
5147 | break; |
5148 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
caea49e6 | 5149 | // If Flash mode is enable, after AF execute pre-capture metering |
e117f756 | 5150 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
e117f756 YJ |
5151 | switch (m_ctlInfo.flash.m_flashCnt) { |
5152 | case IS_FLASH_STATE_ON_DONE: | |
f7f8d321 | 5153 | ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
e117f756 | 5154 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK; |
4a9565ae | 5155 | nextState = NO_TRANSITION; |
e117f756 YJ |
5156 | break; |
5157 | case IS_FLASH_STATE_AUTO_DONE: | |
f7f8d321 | 5158 | ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti); |
4a9565ae | 5159 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; |
caea49e6 YJ |
5160 | m_IsAfLockRequired = true; |
5161 | nextState = HAL_AFSTATE_FAILED; | |
5162 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
e117f756 YJ |
5163 | break; |
5164 | default: | |
caea49e6 YJ |
5165 | nextState = NO_TRANSITION; |
5166 | } | |
5167 | } else { | |
5168 | m_IsAfLockRequired = true; | |
5169 | nextState = HAL_AFSTATE_FAILED; | |
5170 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5171 | } | |
0f26b20f SK |
5172 | break; |
5173 | default: | |
5174 | bWrongTransition = true; | |
5175 | break; | |
5176 | } | |
5177 | } | |
5178 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5179 | switch (noti) { | |
5180 | case AA_AFSTATE_INACTIVE: | |
5181 | nextState = NO_TRANSITION; | |
5182 | break; | |
5183 | case AA_AFSTATE_ACTIVE_SCAN: | |
5184 | bWrongTransition = true; | |
5185 | break; | |
5186 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5187 | nextState = NO_TRANSITION; | |
5188 | break; | |
5189 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5190 | default: | |
5191 | bWrongTransition = true; | |
5192 | break; | |
5193 | } | |
5194 | } | |
5195 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5196 | switch (noti) { | |
5197 | case AA_AFSTATE_INACTIVE: | |
36c106c9 SK |
5198 | bWrongTransition = true; |
5199 | break; | |
0f26b20f | 5200 | case AA_AFSTATE_ACTIVE_SCAN: |
36c106c9 SK |
5201 | nextState = HAL_AFSTATE_SCANNING; |
5202 | break; | |
0f26b20f SK |
5203 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: |
5204 | bWrongTransition = true; | |
5205 | break; | |
5206 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5207 | nextState = NO_TRANSITION; | |
5208 | break; | |
5209 | default: | |
5210 | bWrongTransition = true; | |
5211 | break; | |
5212 | } | |
5213 | } | |
5214 | if (bWrongTransition) { | |
5215 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5216 | return; | |
5217 | } | |
5218 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5219 | if (nextState != NO_TRANSITION) | |
5220 | m_afState = nextState; | |
5221 | } | |
5222 | ||
5223 | void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti) | |
5224 | { | |
5225 | int nextState = NO_TRANSITION; | |
5226 | bool bWrongTransition = false; | |
5227 | ||
5228 | if (m_afState == HAL_AFSTATE_INACTIVE) { | |
5229 | switch (noti) { | |
5230 | case AA_AFSTATE_INACTIVE: | |
5231 | case AA_AFSTATE_ACTIVE_SCAN: | |
5232 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5233 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5234 | default: | |
5235 | nextState = NO_TRANSITION; | |
5236 | break; | |
5237 | } | |
5238 | } | |
5239 | else if (m_afState == HAL_AFSTATE_STARTED) { | |
5240 | switch (noti) { | |
5241 | case AA_AFSTATE_INACTIVE: | |
5242 | nextState = NO_TRANSITION; | |
5243 | break; | |
5244 | case AA_AFSTATE_ACTIVE_SCAN: | |
5245 | nextState = HAL_AFSTATE_SCANNING; | |
5246 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
5247 | break; | |
5248 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5249 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5250 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
5251 | break; | |
5252 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5253 | nextState = HAL_AFSTATE_FAILED; | |
5254 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5255 | break; | |
5256 | default: | |
5257 | bWrongTransition = true; | |
5258 | break; | |
5259 | } | |
5260 | } | |
5261 | else if (m_afState == HAL_AFSTATE_SCANNING) { | |
5262 | switch (noti) { | |
5263 | case AA_AFSTATE_INACTIVE: | |
5264 | bWrongTransition = true; | |
5265 | break; | |
5266 | case AA_AFSTATE_ACTIVE_SCAN: | |
5267 | nextState = NO_TRANSITION; | |
5268 | break; | |
5269 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5270 | nextState = HAL_AFSTATE_PASSIVE_FOCUSED; | |
5271 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED); | |
5272 | break; | |
5273 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5274 | nextState = NO_TRANSITION; | |
5275 | break; | |
5276 | default: | |
5277 | bWrongTransition = true; | |
5278 | break; | |
5279 | } | |
5280 | } | |
5281 | else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) { | |
5282 | switch (noti) { | |
5283 | case AA_AFSTATE_INACTIVE: | |
5284 | bWrongTransition = true; | |
5285 | break; | |
5286 | case AA_AFSTATE_ACTIVE_SCAN: | |
5287 | nextState = HAL_AFSTATE_SCANNING; | |
5288 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN); | |
5289 | break; | |
5290 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5291 | nextState = NO_TRANSITION; | |
5292 | break; | |
5293 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5294 | nextState = HAL_AFSTATE_FAILED; | |
5295 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
8e2c2fdb | 5296 | // TODO : needs NO_TRANSITION ? |
0f26b20f SK |
5297 | break; |
5298 | default: | |
5299 | bWrongTransition = true; | |
5300 | break; | |
5301 | } | |
5302 | } | |
5303 | else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) { | |
5304 | switch (noti) { | |
5305 | case AA_AFSTATE_INACTIVE: | |
5306 | bWrongTransition = true; | |
5307 | break; | |
5308 | case AA_AFSTATE_ACTIVE_SCAN: | |
5309 | nextState = NO_TRANSITION; | |
5310 | break; | |
5311 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5312 | m_IsAfLockRequired = true; | |
5313 | nextState = HAL_AFSTATE_LOCKED; | |
5314 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED); | |
5315 | break; | |
5316 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5317 | nextState = HAL_AFSTATE_FAILED; | |
5318 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED); | |
5319 | break; | |
5320 | default: | |
5321 | bWrongTransition = true; | |
5322 | break; | |
5323 | } | |
5324 | } | |
5325 | else if (m_afState == HAL_AFSTATE_LOCKED) { | |
5326 | switch (noti) { | |
5327 | case AA_AFSTATE_INACTIVE: | |
5328 | nextState = NO_TRANSITION; | |
5329 | break; | |
5330 | case AA_AFSTATE_ACTIVE_SCAN: | |
5331 | bWrongTransition = true; | |
5332 | break; | |
5333 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5334 | nextState = NO_TRANSITION; | |
5335 | break; | |
5336 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5337 | default: | |
5338 | bWrongTransition = true; | |
5339 | break; | |
5340 | } | |
5341 | } | |
5342 | else if (m_afState == HAL_AFSTATE_FAILED) { | |
5343 | switch (noti) { | |
5344 | case AA_AFSTATE_INACTIVE: | |
5345 | case AA_AFSTATE_ACTIVE_SCAN: | |
5346 | case AA_AFSTATE_AF_ACQUIRED_FOCUS: | |
5347 | bWrongTransition = true; | |
5348 | break; | |
5349 | case AA_AFSTATE_AF_FAILED_FOCUS: | |
5350 | nextState = NO_TRANSITION; | |
5351 | break; | |
5352 | default: | |
5353 | bWrongTransition = true; | |
5354 | break; | |
5355 | } | |
5356 | } | |
5357 | if (bWrongTransition) { | |
5358 | ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti); | |
5359 | return; | |
5360 | } | |
5361 | ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti); | |
5362 | if (nextState != NO_TRANSITION) | |
5363 | m_afState = nextState; | |
5364 | } | |
5365 | ||
5366 | void ExynosCameraHWInterface2::OnAfCancel(int id) | |
5367 | { | |
8e2c2fdb SK |
5368 | m_afTriggerId = id; |
5369 | ||
0f26b20f SK |
5370 | switch (m_afMode) { |
5371 | case AA_AFMODE_AUTO: | |
5372 | case AA_AFMODE_MACRO: | |
8e2c2fdb | 5373 | case AA_AFMODE_OFF: |
0f26b20f SK |
5374 | OnAfCancelAutoMacro(id); |
5375 | break; | |
5376 | case AA_AFMODE_CONTINUOUS_VIDEO: | |
5377 | OnAfCancelCAFVideo(id); | |
5378 | break; | |
5379 | case AA_AFMODE_CONTINUOUS_PICTURE: | |
5380 | OnAfCancelCAFPicture(id); | |
5381 | break; | |
0f26b20f SK |
5382 | default: |
5383 | break; | |
5384 | } | |
5385 | } | |
5386 | ||
5387 | void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int id) | |
5388 | { | |
5389 | int nextState = NO_TRANSITION; | |
5390 | m_afTriggerId = id; | |
5391 | ||
e117f756 YJ |
5392 | if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) { |
5393 | m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF; | |
6f19b6cf | 5394 | } |
0f26b20f SK |
5395 | switch (m_afState) { |
5396 | case HAL_AFSTATE_INACTIVE: | |
5397 | nextState = NO_TRANSITION; | |
cd13bb78 | 5398 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); |
0f26b20f SK |
5399 | break; |
5400 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5401 | case HAL_AFSTATE_STARTED: | |
5402 | case HAL_AFSTATE_SCANNING: | |
5403 | case HAL_AFSTATE_LOCKED: | |
5404 | case HAL_AFSTATE_FAILED: | |
5405 | SetAfMode(AA_AFMODE_OFF); | |
5406 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5407 | nextState = HAL_AFSTATE_INACTIVE; | |
5408 | break; | |
5409 | default: | |
5410 | break; | |
5411 | } | |
5412 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5413 | if (nextState != NO_TRANSITION) | |
5414 | m_afState = nextState; | |
5415 | } | |
5416 | ||
5417 | void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int id) | |
5418 | { | |
5419 | int nextState = NO_TRANSITION; | |
5420 | m_afTriggerId = id; | |
5421 | ||
5422 | switch (m_afState) { | |
5423 | case HAL_AFSTATE_INACTIVE: | |
5424 | nextState = NO_TRANSITION; | |
5425 | break; | |
5426 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5427 | case HAL_AFSTATE_STARTED: | |
5428 | case HAL_AFSTATE_SCANNING: | |
5429 | case HAL_AFSTATE_LOCKED: | |
5430 | case HAL_AFSTATE_FAILED: | |
5431 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
5432 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
5433 | SetAfMode(AA_AFMODE_OFF); | |
5434 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5435 | SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE); | |
5436 | nextState = HAL_AFSTATE_INACTIVE; | |
5437 | break; | |
5438 | default: | |
5439 | break; | |
5440 | } | |
5441 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5442 | if (nextState != NO_TRANSITION) | |
5443 | m_afState = nextState; | |
5444 | } | |
5445 | ||
5446 | void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int id) | |
5447 | { | |
5448 | int nextState = NO_TRANSITION; | |
5449 | m_afTriggerId = id; | |
5450 | ||
5451 | switch (m_afState) { | |
5452 | case HAL_AFSTATE_INACTIVE: | |
5453 | nextState = NO_TRANSITION; | |
5454 | break; | |
5455 | case HAL_AFSTATE_NEEDS_COMMAND: | |
5456 | case HAL_AFSTATE_STARTED: | |
5457 | case HAL_AFSTATE_SCANNING: | |
5458 | case HAL_AFSTATE_LOCKED: | |
5459 | case HAL_AFSTATE_FAILED: | |
5460 | case HAL_AFSTATE_NEEDS_DETERMINATION: | |
5461 | case HAL_AFSTATE_PASSIVE_FOCUSED: | |
5462 | SetAfMode(AA_AFMODE_OFF); | |
5463 | SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE); | |
5464 | SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO); | |
5465 | nextState = HAL_AFSTATE_INACTIVE; | |
5466 | break; | |
5467 | default: | |
5468 | break; | |
5469 | } | |
5470 | ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState); | |
5471 | if (nextState != NO_TRANSITION) | |
5472 | m_afState = nextState; | |
5473 | } | |
5474 | ||
5475 | void ExynosCameraHWInterface2::SetAfStateForService(int newState) | |
5476 | { | |
8e2c2fdb SK |
5477 | if (m_serviceAfState != newState || newState == 0) |
5478 | m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie); | |
0f26b20f | 5479 | m_serviceAfState = newState; |
0f26b20f SK |
5480 | } |
5481 | ||
5482 | int ExynosCameraHWInterface2::GetAfStateForService() | |
5483 | { | |
5484 | return m_serviceAfState; | |
5485 | } | |
5486 | ||
5487 | void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode) | |
5488 | { | |
5489 | if (m_afMode != afMode) { | |
5490 | if (m_IsAfModeUpdateRequired) { | |
5491 | m_afMode2 = afMode; | |
5492 | ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode); | |
5493 | } | |
5494 | else { | |
5495 | ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode); | |
5496 | m_IsAfModeUpdateRequired = true; | |
5497 | m_afMode = afMode; | |
cd13bb78 SK |
5498 | if (m_afModeWaitingCnt != 0) { |
5499 | m_afModeWaitingCnt = 0; | |
7d0efb59 | 5500 | m_afState = HAL_AFSTATE_INACTIVE; |
cd13bb78 SK |
5501 | OnAfTrigger(m_afPendingTriggerId); |
5502 | } | |
8e2c2fdb | 5503 | } |
0f26b20f SK |
5504 | } |
5505 | } | |
5506 | ||
eed7ed1b SK |
5507 | void ExynosCameraHWInterface2::m_setExifFixedAttribute(void) |
5508 | { | |
5509 | char property[PROPERTY_VALUE_MAX]; | |
5510 | ||
5511 | //2 0th IFD TIFF Tags | |
1b8ef182 | 5512 | #if 0 // STOPSHIP TODO(aray): remove before launch, but for now don't leak product data |
eed7ed1b SK |
5513 | //3 Maker |
5514 | property_get("ro.product.brand", property, EXIF_DEF_MAKER); | |
5515 | strncpy((char *)mExifInfo.maker, property, | |
5516 | sizeof(mExifInfo.maker) - 1); | |
5517 | mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0'; | |
5518 | //3 Model | |
5519 | property_get("ro.product.model", property, EXIF_DEF_MODEL); | |
5520 | strncpy((char *)mExifInfo.model, property, | |
5521 | sizeof(mExifInfo.model) - 1); | |
5522 | mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0'; | |
5523 | //3 Software | |
5524 | property_get("ro.build.id", property, EXIF_DEF_SOFTWARE); | |
5525 | strncpy((char *)mExifInfo.software, property, | |
5526 | sizeof(mExifInfo.software) - 1); | |
5527 | mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0'; | |
5a92f77a | 5528 | #endif |
eed7ed1b SK |
5529 | |
5530 | //3 YCbCr Positioning | |
5531 | mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING; | |
5532 | ||
5533 | //2 0th IFD Exif Private Tags | |
5534 | //3 F Number | |
e00f6591 | 5535 | mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN); |
eed7ed1b SK |
5536 | mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN; |
5537 | //3 Exposure Program | |
5538 | mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM; | |
5539 | //3 Exif Version | |
5540 | memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version)); | |
5541 | //3 Aperture | |
e00f6591 SK |
5542 | double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den); |
5543 | mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN); | |
eed7ed1b SK |
5544 | mExifInfo.aperture.den = EXIF_DEF_APEX_DEN; |
5545 | //3 Maximum lens aperture | |
5546 | mExifInfo.max_aperture.num = mExifInfo.aperture.num; | |
5547 | mExifInfo.max_aperture.den = mExifInfo.aperture.den; | |
5548 | //3 Lens Focal Length | |
e00f6591 SK |
5549 | mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100); |
5550 | ||
eed7ed1b SK |
5551 | mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN; |
5552 | //3 User Comments | |
5553 | strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS); | |
5554 | //3 Color Space information | |
5555 | mExifInfo.color_space = EXIF_DEF_COLOR_SPACE; | |
5556 | //3 Exposure Mode | |
5557 | mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE; | |
5558 | ||
5559 | //2 0th IFD GPS Info Tags | |
5560 | unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 }; | |
5561 | memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version)); | |
5562 | ||
5563 | //2 1th IFD TIFF Tags | |
5564 | mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION; | |
5565 | mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM; | |
5566 | mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN; | |
5567 | mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM; | |
5568 | mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN; | |
5569 | mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT; | |
5570 | } | |
5571 | ||
5572 | void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect, | |
5573 | camera2_shot *currentEntry) | |
5574 | { | |
5575 | camera2_dm *dm = &(currentEntry->dm); | |
5576 | camera2_ctl *ctl = &(currentEntry->ctl); | |
5577 | ||
5578 | ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue ); | |
5579 | if (!ctl->request.frameCount) | |
5580 | return; | |
5581 | //2 0th IFD TIFF Tags | |
5582 | //3 Width | |
5583 | exifInfo->width = rect->w; | |
5584 | //3 Height | |
5585 | exifInfo->height = rect->h; | |
5586 | //3 Orientation | |
5587 | switch (ctl->jpeg.orientation) { | |
5588 | case 90: | |
5589 | exifInfo->orientation = EXIF_ORIENTATION_90; | |
5590 | break; | |
5591 | case 180: | |
5592 | exifInfo->orientation = EXIF_ORIENTATION_180; | |
5593 | break; | |
5594 | case 270: | |
5595 | exifInfo->orientation = EXIF_ORIENTATION_270; | |
5596 | break; | |
5597 | case 0: | |
5598 | default: | |
5599 | exifInfo->orientation = EXIF_ORIENTATION_UP; | |
5600 | break; | |
5601 | } | |
5602 | ||
5603 | //3 Date time | |
5604 | time_t rawtime; | |
5605 | struct tm *timeinfo; | |
5606 | time(&rawtime); | |
5607 | timeinfo = localtime(&rawtime); | |
5608 | strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo); | |
5609 | ||
5610 | //2 0th IFD Exif Private Tags | |
5611 | //3 Exposure Time | |
5612 | int shutterSpeed = (dm->sensor.exposureTime/1000); | |
5613 | ||
9a77d67e SK |
5614 | // To display exposure time just above 500ms as 1/2sec, not 1 sec. |
5615 | if (shutterSpeed > 500000) | |
5616 | shutterSpeed -= 100000; | |
5617 | ||
eed7ed1b SK |
5618 | if (shutterSpeed < 0) { |
5619 | shutterSpeed = 100; | |
5620 | } | |
5621 | ||
5622 | exifInfo->exposure_time.num = 1; | |
5623 | // x us -> 1/x s */ | |
5624 | //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed); | |
5625 | exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed); | |
5626 | ||
5627 | //3 ISO Speed Rating | |
5628 | exifInfo->iso_speed_rating = dm->aa.isoValue; | |
5629 | ||
5630 | uint32_t av, tv, bv, sv, ev; | |
5631 | av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den); | |
5632 | tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den); | |
5633 | sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating); | |
5634 | bv = av + tv - sv; | |
5635 | ev = av + tv; | |
5636 | //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating); | |
5637 | ALOGD("AV=%d, TV=%d, SV=%d", av, tv, sv); | |
5638 | ||
5639 | //3 Shutter Speed | |
5640 | exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN; | |
5641 | exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN; | |
5642 | //3 Brightness | |
5643 | exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN; | |
5644 | exifInfo->brightness.den = EXIF_DEF_APEX_DEN; | |
5645 | //3 Exposure Bias | |
5646 | if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH|| | |
5647 | ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) { | |
5648 | exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN; | |
5649 | exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN; | |
5650 | } else { | |
5651 | exifInfo->exposure_bias.num = 0; | |
5652 | exifInfo->exposure_bias.den = 0; | |
5653 | } | |
5654 | //3 Metering Mode | |
5655 | /*switch (m_curCameraInfo->metering) { | |
5656 | case METERING_MODE_CENTER: | |
5657 | exifInfo->metering_mode = EXIF_METERING_CENTER; | |
5658 | break; | |
5659 | case METERING_MODE_MATRIX: | |
5660 | exifInfo->metering_mode = EXIF_METERING_MULTISPOT; | |
5661 | break; | |
5662 | case METERING_MODE_SPOT: | |
5663 | exifInfo->metering_mode = EXIF_METERING_SPOT; | |
5664 | break; | |
5665 | case METERING_MODE_AVERAGE: | |
5666 | default: | |
5667 | exifInfo->metering_mode = EXIF_METERING_AVERAGE; | |
5668 | break; | |
5669 | }*/ | |
5670 | exifInfo->metering_mode = EXIF_METERING_CENTER; | |
5671 | ||
5672 | //3 Flash | |
9257e29e YJ |
5673 | if (m_ctlInfo.flash.m_flashDecisionResult) |
5674 | exifInfo->flash = 1; | |
5675 | else | |
5676 | exifInfo->flash = EXIF_DEF_FLASH; | |
eed7ed1b SK |
5677 | |
5678 | //3 White Balance | |
53f62ad9 | 5679 | if (m_ctlInfo.awb.i_awbMode == AA_AWBMODE_WB_AUTO) |
eed7ed1b SK |
5680 | exifInfo->white_balance = EXIF_WB_AUTO; |
5681 | else | |
5682 | exifInfo->white_balance = EXIF_WB_MANUAL; | |
5683 | ||
5684 | //3 Scene Capture Type | |
5685 | switch (ctl->aa.sceneMode) { | |
5686 | case AA_SCENE_MODE_PORTRAIT: | |
5687 | exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT; | |
5688 | break; | |
5689 | case AA_SCENE_MODE_LANDSCAPE: | |
5690 | exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE; | |
5691 | break; | |
5692 | case AA_SCENE_MODE_NIGHT_PORTRAIT: | |
5693 | exifInfo->scene_capture_type = EXIF_SCENE_NIGHT; | |
5694 | break; | |
5695 | default: | |
5696 | exifInfo->scene_capture_type = EXIF_SCENE_STANDARD; | |
5697 | break; | |
5698 | } | |
5699 | ||
5700 | //2 0th IFD GPS Info Tags | |
5701 | if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) { | |
5702 | ||
5703 | if (ctl->jpeg.gpsCoordinates[0] > 0) | |
5704 | strcpy((char *)exifInfo->gps_latitude_ref, "N"); | |
5705 | else | |
5706 | strcpy((char *)exifInfo->gps_latitude_ref, "S"); | |
5707 | ||
5708 | if (ctl->jpeg.gpsCoordinates[1] > 0) | |
5709 | strcpy((char *)exifInfo->gps_longitude_ref, "E"); | |
5710 | else | |
5711 | strcpy((char *)exifInfo->gps_longitude_ref, "W"); | |
5712 | ||
5713 | if (ctl->jpeg.gpsCoordinates[2] > 0) | |
5714 | exifInfo->gps_altitude_ref = 0; | |
5715 | else | |
5716 | exifInfo->gps_altitude_ref = 1; | |
5717 | ||
5718 | double latitude = fabs(ctl->jpeg.gpsCoordinates[0] / 10000.0); | |
5719 | double longitude = fabs(ctl->jpeg.gpsCoordinates[1] / 10000.0); | |
5720 | double altitude = fabs(ctl->jpeg.gpsCoordinates[2] / 100.0); | |
5721 | ||
5722 | exifInfo->gps_latitude[0].num = (uint32_t)latitude; | |
5723 | exifInfo->gps_latitude[0].den = 1; | |
5724 | exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60); | |
5725 | exifInfo->gps_latitude[1].den = 1; | |
5726 | exifInfo->gps_latitude[2].num = (uint32_t)((((latitude - exifInfo->gps_latitude[0].num) * 60) | |
5727 | - exifInfo->gps_latitude[1].num) * 60); | |
5728 | exifInfo->gps_latitude[2].den = 1; | |
5729 | ||
5730 | exifInfo->gps_longitude[0].num = (uint32_t)longitude; | |
5731 | exifInfo->gps_longitude[0].den = 1; | |
5732 | exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60); | |
5733 | exifInfo->gps_longitude[1].den = 1; | |
5734 | exifInfo->gps_longitude[2].num = (uint32_t)((((longitude - exifInfo->gps_longitude[0].num) * 60) | |
5735 | - exifInfo->gps_longitude[1].num) * 60); | |
5736 | exifInfo->gps_longitude[2].den = 1; | |
5737 | ||
5738 | exifInfo->gps_altitude.num = (uint32_t)altitude; | |
5739 | exifInfo->gps_altitude.den = 1; | |
5740 | ||
5741 | struct tm tm_data; | |
5742 | long timestamp; | |
5743 | timestamp = (long)ctl->jpeg.gpsTimestamp; | |
5744 | gmtime_r(×tamp, &tm_data); | |
5745 | exifInfo->gps_timestamp[0].num = tm_data.tm_hour; | |
5746 | exifInfo->gps_timestamp[0].den = 1; | |
5747 | exifInfo->gps_timestamp[1].num = tm_data.tm_min; | |
5748 | exifInfo->gps_timestamp[1].den = 1; | |
5749 | exifInfo->gps_timestamp[2].num = tm_data.tm_sec; | |
5750 | exifInfo->gps_timestamp[2].den = 1; | |
5751 | snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp), | |
5752 | "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday); | |
5753 | ||
5754 | exifInfo->enableGps = true; | |
5755 | } else { | |
5756 | exifInfo->enableGps = false; | |
5757 | } | |
5758 | ||
5759 | //2 1th IFD TIFF Tags | |
5760 | exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0]; | |
5761 | exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1]; | |
5762 | } | |
5763 | ||
13d8c7b4 SK |
5764 | ExynosCameraHWInterface2::MainThread::~MainThread() |
5765 | { | |
ad37861e | 5766 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5767 | } |
5768 | ||
5769 | void ExynosCameraHWInterface2::MainThread::release() | |
5770 | { | |
ad37861e | 5771 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5772 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5773 | } |
5774 | ||
5775 | ExynosCameraHWInterface2::SensorThread::~SensorThread() | |
5776 | { | |
ad37861e | 5777 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5778 | } |
5779 | ||
5780 | void ExynosCameraHWInterface2::SensorThread::release() | |
5781 | { | |
ad37861e | 5782 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5783 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5784 | } |
5785 | ||
13d8c7b4 SK |
5786 | ExynosCameraHWInterface2::StreamThread::~StreamThread() |
5787 | { | |
ad37861e | 5788 | ALOGV("(%s):", __FUNCTION__); |
13d8c7b4 SK |
5789 | } |
5790 | ||
5791 | void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters) | |
5792 | { | |
5793 | ALOGV("DEBUG(%s):", __FUNCTION__); | |
5506cebf | 5794 | memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t)); |
c15a6b00 JS |
5795 | } |
5796 | ||
13d8c7b4 | 5797 | void ExynosCameraHWInterface2::StreamThread::release() |
c15a6b00 | 5798 | { |
9dd63e1f | 5799 | ALOGV("(%s):", __func__); |
13d8c7b4 | 5800 | SetSignal(SIGNAL_THREAD_RELEASE); |
13d8c7b4 SK |
5801 | } |
5802 | ||
5803 | int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr) | |
5804 | { | |
5805 | int index; | |
5806 | for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) { | |
5807 | if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr) | |
5808 | return index; | |
5809 | } | |
5810 | return -1; | |
c15a6b00 JS |
5811 | } |
5812 | ||
5506cebf SK |
5813 | int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle) |
5814 | { | |
5815 | int index; | |
5816 | for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) { | |
5817 | if (m_parameters.svcBufHandle[index] == *bufHandle) | |
5818 | return index; | |
5819 | } | |
5820 | return -1; | |
5821 | } | |
5822 | ||
5823 | status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority) | |
9dd63e1f | 5824 | { |
5506cebf SK |
5825 | ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id); |
5826 | int index, vacantIndex; | |
5827 | bool vacancy = false; | |
5828 | ||
5829 | for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) { | |
5830 | if (!vacancy && m_attachedSubStreams[index].streamId == -1) { | |
5831 | vacancy = true; | |
5832 | vacantIndex = index; | |
5833 | } else if (m_attachedSubStreams[index].streamId == stream_id) { | |
5834 | return BAD_VALUE; | |
5835 | } | |
5836 | } | |
5837 | if (!vacancy) | |
5838 | return NO_MEMORY; | |
5839 | m_attachedSubStreams[vacantIndex].streamId = stream_id; | |
5840 | m_attachedSubStreams[vacantIndex].priority = priority; | |
5841 | m_numRegisteredStream++; | |
5842 | return NO_ERROR; | |
9dd63e1f SK |
5843 | } |
5844 | ||
5506cebf | 5845 | status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id) |
74d78ebe | 5846 | { |
5506cebf SK |
5847 | ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id); |
5848 | int index; | |
5849 | bool found = false; | |
5850 | ||
5851 | for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) { | |
5852 | if (m_attachedSubStreams[index].streamId == stream_id) { | |
5853 | found = true; | |
5854 | break; | |
5855 | } | |
5856 | } | |
5857 | if (!found) | |
5858 | return BAD_VALUE; | |
5859 | m_attachedSubStreams[index].streamId = -1; | |
5860 | m_attachedSubStreams[index].priority = 0; | |
5861 | m_numRegisteredStream--; | |
5862 | return NO_ERROR; | |
74d78ebe SK |
5863 | } |
5864 | ||
c15a6b00 JS |
5865 | int ExynosCameraHWInterface2::createIonClient(ion_client ionClient) |
5866 | { | |
5867 | if (ionClient == 0) { | |
5868 | ionClient = ion_client_create(); | |
5869 | if (ionClient < 0) { | |
13d8c7b4 | 5870 | ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient); |
c15a6b00 JS |
5871 | return 0; |
5872 | } | |
5873 | } | |
c15a6b00 JS |
5874 | return ionClient; |
5875 | } | |
5876 | ||
5877 | int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient) | |
5878 | { | |
5879 | if (ionClient != 0) { | |
5880 | if (ionClient > 0) { | |
5881 | ion_client_destroy(ionClient); | |
5882 | } | |
5883 | ionClient = 0; | |
5884 | } | |
c15a6b00 JS |
5885 | return ionClient; |
5886 | } | |
5887 | ||
13d8c7b4 | 5888 | int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum) |
181e425e SK |
5889 | { |
5890 | return allocCameraMemory(ionClient, buf, iMemoryNum, 0); | |
5891 | } | |
5892 | ||
5893 | int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag) | |
c15a6b00 JS |
5894 | { |
5895 | int ret = 0; | |
5896 | int i = 0; | |
181e425e | 5897 | int flag = 0; |
c15a6b00 JS |
5898 | |
5899 | if (ionClient == 0) { | |
13d8c7b4 | 5900 | ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient); |
c15a6b00 JS |
5901 | return -1; |
5902 | } | |
5903 | ||
181e425e | 5904 | for (i = 0 ; i < iMemoryNum ; i++) { |
13d8c7b4 | 5905 | if (buf->size.extS[i] == 0) { |
c15a6b00 JS |
5906 | break; |
5907 | } | |
181e425e SK |
5908 | if (1 << i & cacheFlag) |
5909 | flag = ION_FLAG_CACHED; | |
5910 | else | |
5911 | flag = 0; | |
13d8c7b4 | 5912 | buf->fd.extFd[i] = ion_alloc(ionClient, \ |
181e425e | 5913 | buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK, flag); |
13d8c7b4 SK |
5914 | if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) { |
5915 | ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]); | |
5916 | buf->fd.extFd[i] = -1; | |
c15a6b00 JS |
5917 | freeCameraMemory(buf, iMemoryNum); |
5918 | return -1; | |
5919 | } | |
5920 | ||
13d8c7b4 SK |
5921 | buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \ |
5922 | buf->size.extS[i], 0); | |
5923 | if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) { | |
5924 | ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]); | |
5925 | buf->virt.extP[i] = (char *)MAP_FAILED; | |
c15a6b00 JS |
5926 | freeCameraMemory(buf, iMemoryNum); |
5927 | return -1; | |
5928 | } | |
181e425e | 5929 | ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag); |
c15a6b00 JS |
5930 | } |
5931 | ||
5932 | return ret; | |
5933 | } | |
5934 | ||
13d8c7b4 | 5935 | void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum) |
c15a6b00 | 5936 | { |
13d8c7b4 | 5937 | |
5506cebf | 5938 | int i = 0 ; |
15fd8231 | 5939 | int ret = 0; |
c15a6b00 JS |
5940 | |
5941 | for (i=0;i<iMemoryNum;i++) { | |
13d8c7b4 SK |
5942 | if (buf->fd.extFd[i] != -1) { |
5943 | if (buf->virt.extP[i] != (char *)MAP_FAILED) { | |
15fd8231 SK |
5944 | ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]); |
5945 | if (ret < 0) | |
5946 | ALOGE("ERR(%s)", __FUNCTION__); | |
c15a6b00 | 5947 | } |
13d8c7b4 | 5948 | ion_free(buf->fd.extFd[i]); |
90e439c1 | 5949 | ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]); |
c15a6b00 | 5950 | } |
13d8c7b4 SK |
5951 | buf->fd.extFd[i] = -1; |
5952 | buf->virt.extP[i] = (char *)MAP_FAILED; | |
5953 | buf->size.extS[i] = 0; | |
c15a6b00 JS |
5954 | } |
5955 | } | |
5956 | ||
13d8c7b4 | 5957 | void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum) |
c15a6b00 JS |
5958 | { |
5959 | int i =0 ; | |
5960 | for (i=0;i<iMemoryNum;i++) { | |
13d8c7b4 SK |
5961 | buf->virt.extP[i] = (char *)MAP_FAILED; |
5962 | buf->fd.extFd[i] = -1; | |
5963 | buf->size.extS[i] = 0; | |
c15a6b00 JS |
5964 | } |
5965 | } | |
5966 | ||
5967 | ||
13d8c7b4 SK |
5968 | |
5969 | ||
9dd63e1f | 5970 | static camera2_device_t *g_cam2_device = NULL; |
b5237e6b | 5971 | static bool g_camera_vaild = false; |
daa1fcd6 | 5972 | ExynosCamera2 * g_camera2[2] = { NULL, NULL }; |
c15a6b00 JS |
5973 | |
5974 | static int HAL2_camera_device_close(struct hw_device_t* device) | |
5975 | { | |
ed4ad5fe | 5976 | ALOGD("(%s): ENTER", __FUNCTION__); |
c15a6b00 | 5977 | if (device) { |
9dd63e1f | 5978 | |
c15a6b00 | 5979 | camera2_device_t *cam_device = (camera2_device_t *)device; |
ad37861e SK |
5980 | ALOGV("cam_device(0x%08x):", (unsigned int)cam_device); |
5981 | ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device); | |
c15a6b00 JS |
5982 | delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv); |
5983 | free(cam_device); | |
b5237e6b | 5984 | g_camera_vaild = false; |
053d38cf | 5985 | g_cam2_device = NULL; |
c15a6b00 | 5986 | } |
15fd8231 | 5987 | |
ed4ad5fe | 5988 | ALOGD("(%s): EXIT", __FUNCTION__); |
c15a6b00 JS |
5989 | return 0; |
5990 | } | |
5991 | ||
5992 | static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev) | |
5993 | { | |
5994 | return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv); | |
5995 | } | |
5996 | ||
5997 | static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev, | |
5998 | const camera2_request_queue_src_ops_t *request_src_ops) | |
5999 | { | |
13d8c7b4 | 6000 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6001 | return obj(dev)->setRequestQueueSrcOps(request_src_ops); |
6002 | } | |
6003 | ||
6004 | static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev) | |
6005 | { | |
13d8c7b4 | 6006 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6007 | return obj(dev)->notifyRequestQueueNotEmpty(); |
6008 | } | |
6009 | ||
6010 | static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev, | |
6011 | const camera2_frame_queue_dst_ops_t *frame_dst_ops) | |
6012 | { | |
13d8c7b4 | 6013 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6014 | return obj(dev)->setFrameQueueDstOps(frame_dst_ops); |
6015 | } | |
6016 | ||
6017 | static int HAL2_device_get_in_progress_count(const struct camera2_device *dev) | |
6018 | { | |
13d8c7b4 | 6019 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6020 | return obj(dev)->getInProgressCount(); |
6021 | } | |
6022 | ||
6023 | static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev) | |
6024 | { | |
13d8c7b4 | 6025 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6026 | return obj(dev)->flushCapturesInProgress(); |
6027 | } | |
6028 | ||
6029 | static int HAL2_device_construct_default_request(const struct camera2_device *dev, | |
6030 | int request_template, camera_metadata_t **request) | |
6031 | { | |
13d8c7b4 | 6032 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6033 | return obj(dev)->constructDefaultRequest(request_template, request); |
6034 | } | |
6035 | ||
6036 | static int HAL2_device_allocate_stream( | |
6037 | const struct camera2_device *dev, | |
6038 | // inputs | |
6039 | uint32_t width, | |
6040 | uint32_t height, | |
6041 | int format, | |
6042 | const camera2_stream_ops_t *stream_ops, | |
6043 | // outputs | |
6044 | uint32_t *stream_id, | |
6045 | uint32_t *format_actual, | |
6046 | uint32_t *usage, | |
6047 | uint32_t *max_buffers) | |
6048 | { | |
9dd63e1f | 6049 | ALOGV("(%s): ", __FUNCTION__); |
c15a6b00 JS |
6050 | return obj(dev)->allocateStream(width, height, format, stream_ops, |
6051 | stream_id, format_actual, usage, max_buffers); | |
6052 | } | |
6053 | ||
c15a6b00 JS |
6054 | static int HAL2_device_register_stream_buffers(const struct camera2_device *dev, |
6055 | uint32_t stream_id, | |
6056 | int num_buffers, | |
6057 | buffer_handle_t *buffers) | |
6058 | { | |
13d8c7b4 | 6059 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6060 | return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers); |
6061 | } | |
6062 | ||
6063 | static int HAL2_device_release_stream( | |
6064 | const struct camera2_device *dev, | |
6065 | uint32_t stream_id) | |
6066 | { | |
ad37861e | 6067 | ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id); |
b5237e6b SK |
6068 | if (!g_camera_vaild) |
6069 | return 0; | |
c15a6b00 JS |
6070 | return obj(dev)->releaseStream(stream_id); |
6071 | } | |
6072 | ||
6073 | static int HAL2_device_allocate_reprocess_stream( | |
6074 | const struct camera2_device *dev, | |
6075 | uint32_t width, | |
6076 | uint32_t height, | |
6077 | uint32_t format, | |
6078 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
6079 | // outputs | |
6080 | uint32_t *stream_id, | |
6081 | uint32_t *consumer_usage, | |
6082 | uint32_t *max_buffers) | |
6083 | { | |
13d8c7b4 | 6084 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6085 | return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops, |
6086 | stream_id, consumer_usage, max_buffers); | |
6087 | } | |
6088 | ||
2b0421d1 EVT |
6089 | static int HAL2_device_allocate_reprocess_stream_from_stream( |
6090 | const struct camera2_device *dev, | |
6091 | uint32_t output_stream_id, | |
6092 | const camera2_stream_in_ops_t *reprocess_stream_ops, | |
6093 | // outputs | |
6094 | uint32_t *stream_id) | |
6095 | { | |
6096 | ALOGV("DEBUG(%s):", __FUNCTION__); | |
5506cebf SK |
6097 | return obj(dev)->allocateReprocessStreamFromStream(output_stream_id, |
6098 | reprocess_stream_ops, stream_id); | |
2b0421d1 EVT |
6099 | } |
6100 | ||
c15a6b00 JS |
6101 | static int HAL2_device_release_reprocess_stream( |
6102 | const struct camera2_device *dev, | |
6103 | uint32_t stream_id) | |
6104 | { | |
13d8c7b4 | 6105 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6106 | return obj(dev)->releaseReprocessStream(stream_id); |
6107 | } | |
6108 | ||
6109 | static int HAL2_device_trigger_action(const struct camera2_device *dev, | |
6110 | uint32_t trigger_id, | |
6111 | int ext1, | |
6112 | int ext2) | |
6113 | { | |
13d8c7b4 | 6114 | ALOGV("DEBUG(%s):", __FUNCTION__); |
b8d41ae2 SK |
6115 | if (!g_camera_vaild) |
6116 | return 0; | |
c15a6b00 JS |
6117 | return obj(dev)->triggerAction(trigger_id, ext1, ext2); |
6118 | } | |
6119 | ||
6120 | static int HAL2_device_set_notify_callback(const struct camera2_device *dev, | |
6121 | camera2_notify_callback notify_cb, | |
6122 | void *user) | |
6123 | { | |
13d8c7b4 | 6124 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6125 | return obj(dev)->setNotifyCallback(notify_cb, user); |
6126 | } | |
6127 | ||
6128 | static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev, | |
6129 | vendor_tag_query_ops_t **ops) | |
6130 | { | |
13d8c7b4 | 6131 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6132 | return obj(dev)->getMetadataVendorTagOps(ops); |
6133 | } | |
6134 | ||
6135 | static int HAL2_device_dump(const struct camera2_device *dev, int fd) | |
6136 | { | |
13d8c7b4 | 6137 | ALOGV("DEBUG(%s):", __FUNCTION__); |
c15a6b00 JS |
6138 | return obj(dev)->dump(fd); |
6139 | } | |
6140 | ||
6141 | ||
6142 | ||
6143 | ||
6144 | ||
6145 | static int HAL2_getNumberOfCameras() | |
6146 | { | |
9dd63e1f SK |
6147 | ALOGV("(%s): returning 2", __FUNCTION__); |
6148 | return 2; | |
c15a6b00 JS |
6149 | } |
6150 | ||
6151 | ||
c15a6b00 JS |
6152 | static int HAL2_getCameraInfo(int cameraId, struct camera_info *info) |
6153 | { | |
ad37861e | 6154 | ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId); |
9dd63e1f | 6155 | static camera_metadata_t * mCameraInfo[2] = {NULL, NULL}; |
ad37861e | 6156 | |
c15a6b00 | 6157 | status_t res; |
13d8c7b4 | 6158 | |
daa1fcd6 | 6159 | if (cameraId == 0) { |
9dd63e1f | 6160 | info->facing = CAMERA_FACING_BACK; |
daa1fcd6 SK |
6161 | if (!g_camera2[0]) |
6162 | g_camera2[0] = new ExynosCamera2(0); | |
6163 | } | |
6164 | else if (cameraId == 1) { | |
9dd63e1f | 6165 | info->facing = CAMERA_FACING_FRONT; |
daa1fcd6 SK |
6166 | if (!g_camera2[1]) |
6167 | g_camera2[1] = new ExynosCamera2(1); | |
6168 | } | |
6169 | else | |
6170 | return BAD_VALUE; | |
6171 | ||
c15a6b00 JS |
6172 | info->orientation = 0; |
6173 | info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0); | |
9dd63e1f | 6174 | if (mCameraInfo[cameraId] == NULL) { |
daa1fcd6 | 6175 | res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true); |
c15a6b00 JS |
6176 | if (res != OK) { |
6177 | ALOGE("%s: Unable to allocate static info: %s (%d)", | |
13d8c7b4 | 6178 | __FUNCTION__, strerror(-res), res); |
c15a6b00 JS |
6179 | return res; |
6180 | } | |
daa1fcd6 | 6181 | res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false); |
c15a6b00 JS |
6182 | if (res != OK) { |
6183 | ALOGE("%s: Unable to fill in static info: %s (%d)", | |
13d8c7b4 | 6184 | __FUNCTION__, strerror(-res), res); |
c15a6b00 JS |
6185 | return res; |
6186 | } | |
6187 | } | |
9dd63e1f | 6188 | info->static_camera_characteristics = mCameraInfo[cameraId]; |
13d8c7b4 | 6189 | return NO_ERROR; |
c15a6b00 JS |
6190 | } |
6191 | ||
6192 | #define SET_METHOD(m) m : HAL2_device_##m | |
6193 | ||
6194 | static camera2_device_ops_t camera2_device_ops = { | |
6195 | SET_METHOD(set_request_queue_src_ops), | |
6196 | SET_METHOD(notify_request_queue_not_empty), | |
6197 | SET_METHOD(set_frame_queue_dst_ops), | |
6198 | SET_METHOD(get_in_progress_count), | |
6199 | SET_METHOD(flush_captures_in_progress), | |
6200 | SET_METHOD(construct_default_request), | |
6201 | SET_METHOD(allocate_stream), | |
6202 | SET_METHOD(register_stream_buffers), | |
6203 | SET_METHOD(release_stream), | |
6204 | SET_METHOD(allocate_reprocess_stream), | |
2b0421d1 | 6205 | SET_METHOD(allocate_reprocess_stream_from_stream), |
c15a6b00 JS |
6206 | SET_METHOD(release_reprocess_stream), |
6207 | SET_METHOD(trigger_action), | |
6208 | SET_METHOD(set_notify_callback), | |
6209 | SET_METHOD(get_metadata_vendor_tag_ops), | |
6210 | SET_METHOD(dump), | |
6211 | }; | |
6212 | ||
6213 | #undef SET_METHOD | |
6214 | ||
6215 | ||
6216 | static int HAL2_camera_device_open(const struct hw_module_t* module, | |
6217 | const char *id, | |
6218 | struct hw_device_t** device) | |
6219 | { | |
c15a6b00 | 6220 | int cameraId = atoi(id); |
6044e509 | 6221 | int openInvalid = 0; |
9dd63e1f | 6222 | |
b5237e6b | 6223 | g_camera_vaild = false; |
0eb27a9d | 6224 | ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId); |
c15a6b00 | 6225 | if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) { |
13d8c7b4 | 6226 | ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id); |
c15a6b00 JS |
6227 | return -EINVAL; |
6228 | } | |
6229 | ||
0eb27a9d | 6230 | ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device); |
c15a6b00 JS |
6231 | if (g_cam2_device) { |
6232 | if (obj(g_cam2_device)->getCameraId() == cameraId) { | |
0eb27a9d | 6233 | ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id); |
c15a6b00 JS |
6234 | goto done; |
6235 | } else { | |
0eb27a9d | 6236 | ALOGD("(%s): START waiting for cam device free", __FUNCTION__); |
9dd63e1f | 6237 | while (g_cam2_device) |
041f38de | 6238 | usleep(SIG_WAITING_TICK); |
0eb27a9d | 6239 | ALOGD("(%s): END waiting for cam device free", __FUNCTION__); |
c15a6b00 JS |
6240 | } |
6241 | } | |
6242 | ||
6243 | g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t)); | |
ad37861e | 6244 | ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device); |
9dd63e1f | 6245 | |
c15a6b00 JS |
6246 | if (!g_cam2_device) |
6247 | return -ENOMEM; | |
6248 | ||
6249 | g_cam2_device->common.tag = HARDWARE_DEVICE_TAG; | |
6250 | g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0; | |
6251 | g_cam2_device->common.module = const_cast<hw_module_t *>(module); | |
6252 | g_cam2_device->common.close = HAL2_camera_device_close; | |
6253 | ||
6254 | g_cam2_device->ops = &camera2_device_ops; | |
6255 | ||
13d8c7b4 | 6256 | ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id); |
c15a6b00 | 6257 | |
6044e509 SK |
6258 | g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid); |
6259 | if (!openInvalid) { | |
5506cebf | 6260 | ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__); |
6f19b6cf | 6261 | return -ENODEV; |
6044e509 | 6262 | } |
c15a6b00 JS |
6263 | done: |
6264 | *device = (hw_device_t *)g_cam2_device; | |
13d8c7b4 | 6265 | ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device); |
b5237e6b | 6266 | g_camera_vaild = true; |
c15a6b00 JS |
6267 | |
6268 | return 0; | |
6269 | } | |
6270 | ||
6271 | ||
6272 | static hw_module_methods_t camera_module_methods = { | |
6273 | open : HAL2_camera_device_open | |
6274 | }; | |
6275 | ||
6276 | extern "C" { | |
6277 | struct camera_module HAL_MODULE_INFO_SYM = { | |
6278 | common : { | |
6279 | tag : HARDWARE_MODULE_TAG, | |
6280 | module_api_version : CAMERA_MODULE_API_VERSION_2_0, | |
6281 | hal_api_version : HARDWARE_HAL_API_VERSION, | |
6282 | id : CAMERA_HARDWARE_MODULE_ID, | |
6283 | name : "Exynos Camera HAL2", | |
6284 | author : "Samsung Corporation", | |
6285 | methods : &camera_module_methods, | |
6286 | dso: NULL, | |
6287 | reserved: {0}, | |
6288 | }, | |
6289 | get_number_of_cameras : HAL2_getNumberOfCameras, | |
6290 | get_camera_info : HAL2_getCameraInfo | |
6291 | }; | |
6292 | } | |
6293 | ||
6294 | }; // namespace android |