Manta: libcamera2: prevent HAL crash when ZSL stream freed
[GitHub/LineageOS/android_hardware_samsung_slsi_exynos5.git] / libcamera2 / ExynosCameraHWInterface2.cpp
1 /*
2 **
3 ** Copyright 2008, The Android Open Source Project
4 ** Copyright 2012, Samsung Electronics Co. LTD
5 **
6 ** Licensed under the Apache License, Version 2.0 (the "License");
7 ** you may not use this file except in compliance with the License.
8 ** You may obtain a copy of the License at
9 **
10 ** http://www.apache.org/licenses/LICENSE-2.0
11 **
12 ** Unless required by applicable law or agreed to in writing, software
13 ** distributed under the License is distributed on an "AS IS" BASIS,
14 ** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15 ** See the License for the specific language governing permissions and
16 ** limitations under the License.
17 */
18
19 /*!
20 * \file ExynosCameraHWInterface2.cpp
21 * \brief source file for Android Camera API 2.0 HAL
22 * \author Sungjoong Kang(sj3.kang@samsung.com)
23 * \date 2012/07/10
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 * Initial Release
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 * 2nd Release
31 *
32 */
33
34 //#define LOG_NDEBUG 0
35 #define LOG_TAG "ExynosCameraHAL2"
36 #include <utils/Log.h>
37 #include <math.h>
38
39 #include "ExynosCameraHWInterface2.h"
40 #include "exynos_format.h"
41
42 namespace android {
43
44 void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
45 {
46 int nw;
47 int cnt = 0;
48 uint32_t written = 0;
49
50 ALOGV("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
51 int fd = open(fname, O_RDWR | O_CREAT, 0644);
52 if (fd < 0) {
53 ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
54 return;
55 }
56
57 ALOGV("writing %d bytes to file [%s]", size, fname);
58 while (written < size) {
59 nw = ::write(fd, buf + written, size - written);
60 if (nw < 0) {
61 ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
62 break;
63 }
64 written += nw;
65 cnt++;
66 }
67 ALOGV("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
68 ::close(fd);
69 }
70
71 int get_pixel_depth(uint32_t fmt)
72 {
73 int depth = 0;
74
75 switch (fmt) {
76 case V4L2_PIX_FMT_JPEG:
77 depth = 8;
78 break;
79
80 case V4L2_PIX_FMT_NV12:
81 case V4L2_PIX_FMT_NV21:
82 case V4L2_PIX_FMT_YUV420:
83 case V4L2_PIX_FMT_YVU420M:
84 case V4L2_PIX_FMT_NV12M:
85 case V4L2_PIX_FMT_NV12MT:
86 depth = 12;
87 break;
88
89 case V4L2_PIX_FMT_RGB565:
90 case V4L2_PIX_FMT_YUYV:
91 case V4L2_PIX_FMT_YVYU:
92 case V4L2_PIX_FMT_UYVY:
93 case V4L2_PIX_FMT_VYUY:
94 case V4L2_PIX_FMT_NV16:
95 case V4L2_PIX_FMT_NV61:
96 case V4L2_PIX_FMT_YUV422P:
97 case V4L2_PIX_FMT_SBGGR10:
98 case V4L2_PIX_FMT_SBGGR12:
99 case V4L2_PIX_FMT_SBGGR16:
100 depth = 16;
101 break;
102
103 case V4L2_PIX_FMT_RGB32:
104 depth = 32;
105 break;
106 default:
107 ALOGE("Get depth failed(format : %d)", fmt);
108 break;
109 }
110
111 return depth;
112 }
113
114 int cam_int_s_fmt(node_info_t *node)
115 {
116 struct v4l2_format v4l2_fmt;
117 unsigned int framesize;
118 int ret;
119
120 memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
121
122 v4l2_fmt.type = node->type;
123 framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
124
125 if (node->planes >= 1) {
126 v4l2_fmt.fmt.pix_mp.width = node->width;
127 v4l2_fmt.fmt.pix_mp.height = node->height;
128 v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
129 v4l2_fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
130 } else {
131 ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
132 }
133
134 /* Set up for capture */
135 ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
136
137 if (ret < 0)
138 ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
139
140
141 return ret;
142 }
143
144 int cam_int_reqbufs(node_info_t *node)
145 {
146 struct v4l2_requestbuffers req;
147 int ret;
148
149 req.count = node->buffers;
150 req.type = node->type;
151 req.memory = node->memory;
152
153 ret = exynos_v4l2_reqbufs(node->fd, &req);
154
155 if (ret < 0)
156 ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
157
158 return req.count;
159 }
160
161 int cam_int_qbuf(node_info_t *node, int index)
162 {
163 struct v4l2_buffer v4l2_buf;
164 struct v4l2_plane planes[VIDEO_MAX_PLANES];
165 int i;
166 int ret = 0;
167
168 v4l2_buf.m.planes = planes;
169 v4l2_buf.type = node->type;
170 v4l2_buf.memory = node->memory;
171 v4l2_buf.index = index;
172 v4l2_buf.length = node->planes;
173
174 for(i = 0; i < node->planes; i++){
175 v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
176 v4l2_buf.m.planes[i].length = (unsigned long)(node->buffer[index].size.extS[i]);
177 }
178
179 ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
180
181 if (ret < 0)
182 ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
183
184 return ret;
185 }
186
187 int cam_int_streamon(node_info_t *node)
188 {
189 enum v4l2_buf_type type = node->type;
190 int ret;
191
192
193 ret = exynos_v4l2_streamon(node->fd, type);
194
195 if (ret < 0)
196 ALOGE("%s: VIDIOC_STREAMON failed [%d] (%d)",__FUNCTION__, node->fd,ret);
197
198 ALOGV("On streaming I/O... ... fd(%d)", node->fd);
199
200 return ret;
201 }
202
203 int cam_int_streamoff(node_info_t *node)
204 {
205 enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
206 int ret;
207
208
209 ALOGV("Off streaming I/O... fd(%d)", node->fd);
210 ret = exynos_v4l2_streamoff(node->fd, type);
211
212 if (ret < 0)
213 ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
214
215 return ret;
216 }
217
218 int isp_int_streamoff(node_info_t *node)
219 {
220 enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
221 int ret;
222
223 ALOGV("Off streaming I/O... fd(%d)", node->fd);
224 ret = exynos_v4l2_streamoff(node->fd, type);
225
226 if (ret < 0)
227 ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
228
229 return ret;
230 }
231
232 int cam_int_dqbuf(node_info_t *node)
233 {
234 struct v4l2_buffer v4l2_buf;
235 struct v4l2_plane planes[VIDEO_MAX_PLANES];
236 int ret;
237
238 v4l2_buf.type = node->type;
239 v4l2_buf.memory = node->memory;
240 v4l2_buf.m.planes = planes;
241 v4l2_buf.length = node->planes;
242
243 ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
244 if (ret < 0)
245 ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
246
247 return v4l2_buf.index;
248 }
249
250 int cam_int_dqbuf(node_info_t *node, int num_plane)
251 {
252 struct v4l2_buffer v4l2_buf;
253 struct v4l2_plane planes[VIDEO_MAX_PLANES];
254 int ret;
255
256 v4l2_buf.type = node->type;
257 v4l2_buf.memory = node->memory;
258 v4l2_buf.m.planes = planes;
259 v4l2_buf.length = num_plane;
260
261 ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
262 if (ret < 0)
263 ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
264
265 return v4l2_buf.index;
266 }
267
268 int cam_int_s_input(node_info_t *node, int index)
269 {
270 int ret;
271
272 ret = exynos_v4l2_s_input(node->fd, index);
273 if (ret < 0)
274 ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
275
276 return ret;
277 }
278
279
280 gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
281
282 RequestManager::RequestManager(SignalDrivenThread* main_thread):
283 m_vdisEnable(false),
284 m_lastCompletedFrameCnt(-1),
285 m_lastAeMode(0),
286 m_lastAaMode(0),
287 m_lastAwbMode(0),
288 m_lastAeComp(0),
289 m_vdisBubbleEn(false)
290 {
291 m_metadataConverter = new MetadataConverter;
292 m_mainThread = main_thread;
293 ResetEntry();
294 m_sensorPipelineSkipCnt = 0;
295 return;
296 }
297
298 RequestManager::~RequestManager()
299 {
300 ALOGV("%s", __FUNCTION__);
301 if (m_metadataConverter != NULL) {
302 delete m_metadataConverter;
303 m_metadataConverter = NULL;
304 }
305
306 releaseSensorQ();
307 return;
308 }
309
310 void RequestManager::ResetEntry()
311 {
312 Mutex::Autolock lock(m_requestMutex);
313 Mutex::Autolock lock2(m_numOfEntriesLock);
314 for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
315 memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
316 entries[i].internal_shot.shot.ctl.request.frameCount = -1;
317 }
318 m_numOfEntries = 0;
319 m_entryInsertionIndex = -1;
320 m_entryProcessingIndex = -1;
321 m_entryFrameOutputIndex = -1;
322 }
323
324 int RequestManager::GetNumEntries()
325 {
326 Mutex::Autolock lock(m_numOfEntriesLock);
327 return m_numOfEntries;
328 }
329
330 void RequestManager::SetDefaultParameters(int cropX)
331 {
332 m_cropX = cropX;
333 }
334
335 bool RequestManager::IsRequestQueueFull()
336 {
337 Mutex::Autolock lock(m_requestMutex);
338 Mutex::Autolock lock2(m_numOfEntriesLock);
339 if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
340 return true;
341 else
342 return false;
343 }
344
345 void RequestManager::RegisterRequest(camera_metadata_t * new_request, int * afMode, uint32_t * afRegion)
346 {
347 ALOGV("DEBUG(%s):", __FUNCTION__);
348
349 Mutex::Autolock lock(m_requestMutex);
350 Mutex::Autolock lock2(m_numOfEntriesLock);
351
352 request_manager_entry * newEntry = NULL;
353 int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
354 ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex, m_numOfEntries );
355
356
357 newEntry = &(entries[newInsertionIndex]);
358
359 if (newEntry->status!=EMPTY) {
360 ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
361 return;
362 }
363 newEntry->status = REGISTERED;
364 newEntry->original_request = new_request;
365 memset(&(newEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
366 m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
367 newEntry->output_stream_count = 0;
368 if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCP)
369 newEntry->output_stream_count++;
370
371 if (newEntry->internal_shot.shot.ctl.request.outputStreams[0] & MASK_OUTPUT_SCC)
372 newEntry->output_stream_count++;
373
374 m_numOfEntries++;
375 m_entryInsertionIndex = newInsertionIndex;
376
377
378 *afMode = (int)(newEntry->internal_shot.shot.ctl.aa.afMode);
379 afRegion[0] = newEntry->internal_shot.shot.ctl.aa.afRegions[0];
380 afRegion[1] = newEntry->internal_shot.shot.ctl.aa.afRegions[1];
381 afRegion[2] = newEntry->internal_shot.shot.ctl.aa.afRegions[2];
382 afRegion[3] = newEntry->internal_shot.shot.ctl.aa.afRegions[3];
383 ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
384 m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
385 }
386
387 void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
388 {
389 ALOGV("DEBUG(%s):", __FUNCTION__);
390 int frame_index;
391 request_manager_entry * currentEntry;
392
393 Mutex::Autolock lock(m_requestMutex);
394 Mutex::Autolock lock2(m_numOfEntriesLock);
395
396 frame_index = GetCompletedIndex();
397 currentEntry = &(entries[frame_index]);
398 if (currentEntry->status != COMPLETED) {
399 CAM_LOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__,
400 m_entryProcessingIndex, frame_index,(int)(currentEntry->status));
401 return;
402 }
403 if (deregistered_request) *deregistered_request = currentEntry->original_request;
404
405 m_lastCompletedFrameCnt = currentEntry->internal_shot.shot.ctl.request.frameCount;
406
407 currentEntry->status = EMPTY;
408 currentEntry->original_request = NULL;
409 memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
410 currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
411 currentEntry->output_stream_count = 0;
412 m_numOfEntries--;
413 ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
414 m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
415
416 CheckCompleted(GetNextIndex(frame_index));
417 return;
418 }
419
420 bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
421 camera_metadata_t ** prepared_frame, int afState)
422 {
423 ALOGV("DEBUG(%s):", __FUNCTION__);
424 Mutex::Autolock lock(m_requestMutex);
425 status_t res = NO_ERROR;
426 int tempFrameOutputIndex = GetCompletedIndex();
427 request_manager_entry * currentEntry = &(entries[tempFrameOutputIndex]);
428 ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
429 m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
430
431 if (currentEntry->status != COMPLETED) {
432 ALOGV("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
433
434 return false;
435 }
436 m_entryFrameOutputIndex = tempFrameOutputIndex;
437 m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 35, 500); //estimated
438 add_camera_metadata_entry(m_tempFrameMetadata, ANDROID_CONTROL_AF_STATE, &afState, 1);
439 res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
440 m_tempFrameMetadata);
441 if (res!=NO_ERROR) {
442 ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
443 return false;
444 }
445 *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
446 *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
447 *prepared_frame = m_tempFrameMetadata;
448 ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d) timestamp(%lld)", m_entryFrameOutputIndex,
449 currentEntry->internal_shot.shot.ctl.request.frameCount, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
450 // Dump();
451 return true;
452 }
453
454 int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
455 {
456 struct camera2_shot_ext * shot_ext;
457 struct camera2_shot_ext * request_shot;
458 int targetStreamIndex = 0;
459 request_manager_entry * newEntry = NULL;
460 static int count = 0;
461
462 Mutex::Autolock lock(m_requestMutex);
463 Mutex::Autolock lock2(m_numOfEntriesLock);
464 if (m_numOfEntries == 0) {
465 CAM_LOGD("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
466 return -1;
467 }
468
469 if ((m_entryProcessingIndex == m_entryInsertionIndex)
470 && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
471 ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)",
472 m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
473 return -1;
474 }
475
476 int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
477 ALOGV("DEBUG(%s): index(%d)", __FUNCTION__, newProcessingIndex);
478
479 newEntry = &(entries[newProcessingIndex]);
480 request_shot = &(newEntry->internal_shot);
481 if (newEntry->status != REGISTERED) {
482 CAM_LOGD("DEBUG(%s)(%d): Circular buffer abnormal, numOfEntries(%d), status(%d)", __FUNCTION__, newProcessingIndex, m_numOfEntries, newEntry->status);
483 for (int i = 0; i < NUM_MAX_REQUEST_MGR_ENTRY; i++) {
484 CAM_LOGD("DBG: entrie[%d].stream output cnt = %d, framecnt(%d)", i, entries[i].output_stream_count, entries[i].internal_shot.shot.ctl.request.frameCount);
485 }
486 return -1;
487 }
488
489 newEntry->status = REQUESTED;
490
491 shot_ext = (struct camera2_shot_ext *)buf->virt.extP[1];
492
493 memset(shot_ext, 0x00, sizeof(struct camera2_shot_ext));
494 shot_ext->shot.ctl.request.frameCount = request_shot->shot.ctl.request.frameCount;
495 shot_ext->request_sensor = 1;
496 shot_ext->dis_bypass = 1;
497 shot_ext->dnr_bypass = 1;
498 shot_ext->fd_bypass = 1;
499 shot_ext->setfile = 0;
500
501 targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
502 shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
503 if (targetStreamIndex & MASK_OUTPUT_SCP)
504 shot_ext->request_scp = 1;
505
506 if (targetStreamIndex & MASK_OUTPUT_SCC)
507 shot_ext->request_scc = 1;
508
509 if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
510 shot_ext->fd_bypass = 0;
511
512 if (count == 0){
513 shot_ext->shot.ctl.aa.mode = AA_CONTROL_AUTO;
514 } else
515 shot_ext->shot.ctl.aa.mode = AA_CONTROL_NONE;
516
517 count++;
518 shot_ext->shot.ctl.request.metadataMode = METADATA_MODE_FULL;
519 shot_ext->shot.ctl.stats.faceDetectMode = FACEDETECT_MODE_FULL;
520 shot_ext->shot.magicNumber = 0x23456789;
521 shot_ext->shot.ctl.sensor.exposureTime = 0;
522 shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
523 shot_ext->shot.ctl.sensor.sensitivity = 0;
524
525
526 shot_ext->shot.ctl.scaler.cropRegion[0] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[0];
527 shot_ext->shot.ctl.scaler.cropRegion[1] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[1];
528 shot_ext->shot.ctl.scaler.cropRegion[2] = newEntry->internal_shot.shot.ctl.scaler.cropRegion[2];
529
530 m_entryProcessingIndex = newProcessingIndex;
531 return newProcessingIndex;
532 }
533
534 void RequestManager::NotifyStreamOutput(int frameCnt)
535 {
536 int index;
537
538 Mutex::Autolock lock(m_requestMutex);
539 ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
540
541 index = FindEntryIndexByFrameCnt(frameCnt);
542 if (index == -1) {
543 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
544 return;
545 }
546 ALOGV("DEBUG(%s): frameCnt(%d), last cnt (%d)", __FUNCTION__, frameCnt, entries[index].output_stream_count);
547
548 entries[index].output_stream_count--; //TODO : match stream id also
549 CheckCompleted(index);
550 }
551
552 void RequestManager::CheckCompleted(int index)
553 {
554 if ((entries[index].status == METADONE || entries[index].status == COMPLETED)
555 && (entries[index].output_stream_count <= 0)){
556 ALOGV("(%s): Completed(index:%d)(frameCnt:%d)", __FUNCTION__,
557 index, entries[index].internal_shot.shot.ctl.request.frameCount );
558 entries[index].status = COMPLETED;
559 if (m_lastCompletedFrameCnt + 1 == (int)entries[index].internal_shot.shot.ctl.request.frameCount)
560 m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
561 }
562 }
563
564 int RequestManager::GetCompletedIndex()
565 {
566 return FindEntryIndexByFrameCnt(m_lastCompletedFrameCnt + 1);
567 }
568
569 void RequestManager::pushSensorQ(int index)
570 {
571 Mutex::Autolock lock(m_requestMutex);
572 m_sensorQ.push_back(index);
573 }
574
575 int RequestManager::popSensorQ()
576 {
577 List<int>::iterator sensor_token;
578 int index;
579
580 Mutex::Autolock lock(m_requestMutex);
581
582 if(m_sensorQ.size() == 0)
583 return -1;
584
585 sensor_token = m_sensorQ.begin()++;
586 index = *sensor_token;
587 m_sensorQ.erase(sensor_token);
588
589 return (index);
590 }
591
592 void RequestManager::releaseSensorQ()
593 {
594 List<int>::iterator r;
595
596 Mutex::Autolock lock(m_requestMutex);
597 ALOGV("(%s)m_sensorQ.size : %d", __FUNCTION__, m_sensorQ.size());
598
599 while(m_sensorQ.size() > 0){
600 r = m_sensorQ.begin()++;
601 m_sensorQ.erase(r);
602 }
603 return;
604 }
605
606 void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext)
607 {
608 int index;
609 struct camera2_shot_ext * request_shot;
610 nsecs_t timeStamp;
611 int i;
612
613 Mutex::Autolock lock(m_requestMutex);
614 ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
615
616 for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
617 if((entries[i].internal_shot.shot.ctl.request.frameCount == shot_ext->shot.ctl.request.frameCount)
618 && (entries[i].status == CAPTURED)){
619 entries[i].status = METADONE;
620 break;
621 }
622 }
623
624 if (i == NUM_MAX_REQUEST_MGR_ENTRY){
625 ALOGE("[%s] no entry found(framecount:%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
626 return;
627 }
628
629 request_manager_entry * newEntry = &(entries[i]);
630 request_shot = &(newEntry->internal_shot);
631
632 timeStamp = request_shot->shot.dm.sensor.timeStamp;
633 memcpy(&(request_shot->shot.dm), &(shot_ext->shot.dm), sizeof(struct camera2_dm));
634 request_shot->shot.dm.sensor.timeStamp = timeStamp;
635 m_lastTimeStamp = timeStamp;
636 CheckCompleted(i);
637 }
638
639 void RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt, ctl_request_info_t *ctl_info)
640 {
641 int index, targetStreamIndex;
642 struct camera2_shot_ext * request_shot;
643
644 ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
645 if (frameCnt < 0)
646 return;
647
648 index = FindEntryIndexByFrameCnt(frameCnt);
649 if (index == -1) {
650 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
651 return;
652 }
653
654 request_manager_entry * newEntry = &(entries[index]);
655 request_shot = &(newEntry->internal_shot);
656 memcpy(&(shot_ext->shot.ctl), &(request_shot->shot.ctl), sizeof(struct camera2_ctl));
657 shot_ext->shot.ctl.request.frameCount = frameCnt;
658 shot_ext->request_sensor = 1;
659 shot_ext->dis_bypass = 1;
660 shot_ext->dnr_bypass = 1;
661 shot_ext->fd_bypass = 1;
662 shot_ext->drc_bypass = 1;
663 shot_ext->setfile = 0;
664
665 shot_ext->request_scc = 0;
666 shot_ext->request_scp = 0;
667
668 shot_ext->isReprocessing = request_shot->isReprocessing;
669 shot_ext->reprocessInput = request_shot->reprocessInput;
670 shot_ext->shot.ctl.request.outputStreams[0] = 0;
671
672 shot_ext->awb_mode_dm = request_shot->awb_mode_dm;
673
674 shot_ext->shot.ctl.scaler.cropRegion[0] = request_shot->shot.ctl.scaler.cropRegion[0];
675 shot_ext->shot.ctl.scaler.cropRegion[1] = request_shot->shot.ctl.scaler.cropRegion[1];
676 shot_ext->shot.ctl.scaler.cropRegion[2] = request_shot->shot.ctl.scaler.cropRegion[2];
677
678 // mapping flash UI mode from aeMode
679 if (request_shot->shot.ctl.aa.aeMode >= AA_AEMODE_ON) {
680 if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_PREVIEW)
681 ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
682 else if (request_shot->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_VIDEO_RECORD)
683 ctl_info->flash.i_flashMode = request_shot->shot.ctl.aa.aeMode;
684 request_shot->shot.ctl.aa.aeMode = AA_AEMODE_ON;
685 }
686
687 // Apply ae/awb lock or unlock
688 if (request_shot->ae_lock == AEMODE_LOCK_ON)
689 request_shot->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
690 if (request_shot->awb_lock == AWBMODE_LOCK_ON)
691 request_shot->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
692
693 if (m_lastAaMode == request_shot->shot.ctl.aa.mode) {
694 shot_ext->shot.ctl.aa.mode = (enum aa_mode)(0);
695 }
696 else {
697 shot_ext->shot.ctl.aa.mode = request_shot->shot.ctl.aa.mode;
698 m_lastAaMode = (int)(shot_ext->shot.ctl.aa.mode);
699 }
700 if (m_lastAeMode == request_shot->shot.ctl.aa.aeMode) {
701 shot_ext->shot.ctl.aa.aeMode = (enum aa_aemode)(0);
702 }
703 else {
704 shot_ext->shot.ctl.aa.aeMode = request_shot->shot.ctl.aa.aeMode;
705 m_lastAeMode = (int)(shot_ext->shot.ctl.aa.aeMode);
706 }
707 if (m_lastAwbMode == request_shot->shot.ctl.aa.awbMode) {
708 shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)(0);
709 }
710 else {
711 shot_ext->shot.ctl.aa.awbMode = request_shot->shot.ctl.aa.awbMode;
712 m_lastAwbMode = (int)(shot_ext->shot.ctl.aa.awbMode);
713 }
714 if (m_lastAeComp == request_shot->shot.ctl.aa.aeExpCompensation) {
715 shot_ext->shot.ctl.aa.aeExpCompensation = 0;
716 }
717 else {
718 shot_ext->shot.ctl.aa.aeExpCompensation = request_shot->shot.ctl.aa.aeExpCompensation;
719 m_lastAeComp = (int)(shot_ext->shot.ctl.aa.aeExpCompensation);
720 }
721
722 if (request_shot->shot.ctl.aa.videoStabilizationMode && m_vdisEnable) {
723 m_vdisBubbleEn = true;
724 shot_ext->dis_bypass = 0;
725 shot_ext->dnr_bypass = 0;
726 } else {
727 m_vdisBubbleEn = false;
728 shot_ext->dis_bypass = 1;
729 shot_ext->dnr_bypass = 1;
730 }
731
732 shot_ext->shot.ctl.aa.afTrigger = 0;
733
734 targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[0];
735 shot_ext->shot.ctl.request.outputStreams[0] = targetStreamIndex;
736 if (targetStreamIndex & MASK_OUTPUT_SCP)
737 shot_ext->request_scp = 1;
738
739 if (targetStreamIndex & MASK_OUTPUT_SCC)
740 shot_ext->request_scc = 1;
741
742 if (shot_ext->shot.ctl.stats.faceDetectMode != FACEDETECT_MODE_OFF)
743 shot_ext->fd_bypass = 0;
744
745 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = request_shot->shot.ctl.aa.aeTargetFpsRange[0];
746 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = request_shot->shot.ctl.aa.aeTargetFpsRange[1];
747
748 ALOGV("(%s): applied aa(%d) aemode(%d) expComp(%d), awb(%d) afmode(%d), ", __FUNCTION__,
749 (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
750 (int)(shot_ext->shot.ctl.aa.aeExpCompensation), (int)(shot_ext->shot.ctl.aa.awbMode),
751 (int)(shot_ext->shot.ctl.aa.afMode));
752 }
753
754 bool RequestManager::IsVdisEnable(void)
755 {
756 return m_vdisBubbleEn;
757 }
758
759 int RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
760 {
761 for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
762 if ((int)entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
763 return i;
764 }
765 return -1;
766 }
767
768 void RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
769 {
770 int index = FindEntryIndexByFrameCnt(frameCnt);
771 if (index == -1) {
772 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
773 return;
774 }
775
776 request_manager_entry * currentEntry = &(entries[index]);
777 if (currentEntry->internal_shot.isReprocessing == 1) {
778 ALOGV("DEBUG(%s): REPROCESSING : preserving timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
779 index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
780 } else {
781 currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
782 ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
783 index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
784 }
785 }
786
787
788 nsecs_t RequestManager::GetTimestampByFrameCnt(int frameCnt)
789 {
790 int index = FindEntryIndexByFrameCnt(frameCnt);
791 if (index == -1) {
792 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d) returning saved time(%lld)", __FUNCTION__, frameCnt, m_lastTimeStamp);
793 return m_lastTimeStamp;
794 }
795 else
796 return GetTimestamp(index);
797 }
798
799 nsecs_t RequestManager::GetTimestamp(int index)
800 {
801 Mutex::Autolock lock(m_requestMutex);
802 if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
803 ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
804 return 0;
805 }
806
807 request_manager_entry * currentEntry = &(entries[index]);
808 nsecs_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
809 if (frameTime == 0) {
810 ALOGV("DEBUG(%s): timestamp null, returning saved value", __FUNCTION__);
811 frameTime = m_lastTimeStamp;
812 }
813 ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
814 return frameTime;
815 }
816
817 uint8_t RequestManager::GetOutputStreamByFrameCnt(int frameCnt)
818 {
819 int index = FindEntryIndexByFrameCnt(frameCnt);
820 if (index == -1) {
821 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
822 return 0;
823 }
824 else
825 return GetOutputStream(index);
826 }
827
828 uint8_t RequestManager::GetOutputStream(int index)
829 {
830 Mutex::Autolock lock(m_requestMutex);
831 if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
832 ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
833 return 0;
834 }
835
836 request_manager_entry * currentEntry = &(entries[index]);
837 return currentEntry->internal_shot.shot.ctl.request.outputStreams[0];
838 }
839
840 camera2_shot_ext * RequestManager::GetInternalShotExtByFrameCnt(int frameCnt)
841 {
842 int index = FindEntryIndexByFrameCnt(frameCnt);
843 if (index == -1) {
844 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
845 return 0;
846 }
847 else
848 return GetInternalShotExt(index);
849 }
850
851 camera2_shot_ext * RequestManager::GetInternalShotExt(int index)
852 {
853 Mutex::Autolock lock(m_requestMutex);
854 if (index < 0 || index >= NUM_MAX_REQUEST_MGR_ENTRY) {
855 ALOGE("ERR(%s): Request entry outside of bounds (%d)", __FUNCTION__, index);
856 return 0;
857 }
858
859 request_manager_entry * currentEntry = &(entries[index]);
860 return &currentEntry->internal_shot;
861 }
862
863 int RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
864 {
865 Mutex::Autolock lock(m_requestMutex);
866 int i;
867
868 if (m_numOfEntries == 0) {
869 CAM_LOGD("DBG(%s): No Entry found", __FUNCTION__);
870 return -1;
871 }
872
873 for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
874 if(entries[i].internal_shot.shot.ctl.request.frameCount != shot_ext->shot.ctl.request.frameCount)
875 continue;
876
877 if (entries[i].status == REQUESTED) {
878 entries[i].status = CAPTURED;
879 return entries[i].internal_shot.shot.ctl.request.frameCount;
880 }
881 CAM_LOGE("ERR(%s): frameCount(%d), index(%d), status(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount, i, entries[i].status);
882
883 }
884 CAM_LOGD("(%s): No Entry found frame count(%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
885
886 return -1;
887 }
888
889 void RequestManager::SetInitialSkip(int count)
890 {
891 ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
892 if (count > m_sensorPipelineSkipCnt)
893 m_sensorPipelineSkipCnt = count;
894 }
895
896 int RequestManager::GetSkipCnt()
897 {
898 ALOGV("(%s): skip cnt(%d)", __FUNCTION__, m_sensorPipelineSkipCnt);
899 if (m_sensorPipelineSkipCnt == 0)
900 return m_sensorPipelineSkipCnt;
901 else
902 return --m_sensorPipelineSkipCnt;
903 }
904
905 void RequestManager::Dump(void)
906 {
907 int i = 0;
908 request_manager_entry * currentEntry;
909 Mutex::Autolock lock(m_numOfEntriesLock);
910 ALOGD("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)",
911 m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
912
913 for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
914 currentEntry = &(entries[i]);
915 ALOGD("[%2d] status[%d] frameCnt[%3d] numOutput[%d] outstream[0]-%x ", i,
916 currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
917 currentEntry->output_stream_count,
918 currentEntry->internal_shot.shot.ctl.request.outputStreams[0]);
919 }
920 }
921
922 int RequestManager::GetNextIndex(int index)
923 {
924 index++;
925 if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
926 index = 0;
927
928 return index;
929 }
930
931 int RequestManager::GetPrevIndex(int index)
932 {
933 index--;
934 if (index < 0)
935 index = NUM_MAX_REQUEST_MGR_ENTRY-1;
936
937 return index;
938 }
939
940 ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera, int *openInvalid):
941 m_requestQueueOps(NULL),
942 m_frameQueueOps(NULL),
943 m_callbackCookie(NULL),
944 m_numOfRemainingReqInSvc(0),
945 m_isRequestQueuePending(false),
946 m_isRequestQueueNull(true),
947 m_halDevice(dev),
948 m_ionCameraClient(0),
949 m_isIspStarted(false),
950 m_sccLocalBufferValid(false),
951 m_cameraId(cameraId),
952 m_scp_closing(false),
953 m_scp_closed(false),
954 m_wideAspect(false),
955 m_zoomRatio(1),
956 m_vdisBubbleCnt(0),
957 m_vdisDupFrame(0),
958 m_jpegEncodingCount(0),
959 m_scpForceSuspended(false),
960 m_afState(HAL_AFSTATE_INACTIVE),
961 m_afTriggerId(0),
962 m_afMode(NO_CHANGE),
963 m_afMode2(NO_CHANGE),
964 m_IsAfModeUpdateRequired(false),
965 m_IsAfTriggerRequired(false),
966 m_IsAfLockRequired(false),
967 m_serviceAfState(ANDROID_CONTROL_AF_STATE_INACTIVE),
968 m_afPendingTriggerId(0),
969 m_afModeWaitingCnt(0),
970 m_scpOutputSignalCnt(0),
971 m_scpOutputImageCnt(0),
972 m_nightCaptureCnt(0),
973 m_nightCaptureFrameCnt(0),
974 m_lastSceneMode(0),
975 m_thumbNailW(160),
976 m_thumbNailH(120)
977 {
978 ALOGD("(%s): ENTER", __FUNCTION__);
979 int ret = 0;
980 int res = 0;
981
982 m_exynosPictureCSC = NULL;
983 m_exynosVideoCSC = NULL;
984
985 if (!m_grallocHal) {
986 ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
987 if (ret)
988 ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
989 }
990
991 m_camera2 = camera;
992 m_ionCameraClient = createIonClient(m_ionCameraClient);
993 if(m_ionCameraClient == 0)
994 ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
995
996
997 m_BayerManager = new BayerBufManager();
998 m_mainThread = new MainThread(this);
999 m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
1000 *openInvalid = InitializeISPChain();
1001 if (*openInvalid < 0) {
1002 ALOGD("(%s): ISP chain init failed. exiting", __FUNCTION__);
1003 // clean process
1004 // 1. close video nodes
1005 // SCP
1006 res = exynos_v4l2_close(m_camera_info.scp.fd);
1007 if (res != NO_ERROR ) {
1008 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1009 }
1010 // SCC
1011 res = exynos_v4l2_close(m_camera_info.capture.fd);
1012 if (res != NO_ERROR ) {
1013 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1014 }
1015 // Sensor
1016 res = exynos_v4l2_close(m_camera_info.sensor.fd);
1017 if (res != NO_ERROR ) {
1018 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1019 }
1020 // ISP
1021 res = exynos_v4l2_close(m_camera_info.isp.fd);
1022 if (res != NO_ERROR ) {
1023 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1024 }
1025 } else {
1026 m_sensorThread = new SensorThread(this);
1027 m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
1028 m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1029 ALOGV("DEBUG(%s): created sensorthread ", __FUNCTION__);
1030
1031 for (int i = 0 ; i < STREAM_ID_LAST+1 ; i++)
1032 m_subStreams[i].type = SUBSTREAM_TYPE_NONE;
1033 CSC_METHOD cscMethod = CSC_METHOD_HW;
1034 m_exynosPictureCSC = csc_init(cscMethod);
1035 if (m_exynosPictureCSC == NULL)
1036 ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1037 csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
1038 csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
1039
1040 m_exynosVideoCSC = csc_init(cscMethod);
1041 if (m_exynosVideoCSC == NULL)
1042 ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
1043 csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, VIDEO_GSC_NODE_NUM);
1044 csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_HW_TYPE, CSC_HW_TYPE_GSCALER);
1045
1046 m_setExifFixedAttribute();
1047
1048 // contol information clear
1049 // flash
1050 m_ctlInfo.flash.i_flashMode = AA_AEMODE_ON;
1051 m_ctlInfo.flash.m_afFlashDoneFlg= false;
1052 m_ctlInfo.flash.m_flashEnableFlg = false;
1053 m_ctlInfo.flash.m_flashFrameCount = 0;
1054 m_ctlInfo.flash.m_flashCnt = 0;
1055 m_ctlInfo.flash.m_flashTimeOut = 0;
1056 m_ctlInfo.flash.m_flashDecisionResult = false;
1057 m_ctlInfo.flash.m_flashTorchMode = false;
1058 m_ctlInfo.flash.m_precaptureState = 0;
1059 m_ctlInfo.flash.m_precaptureTriggerId = 0;
1060 // ae
1061 m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
1062 // af
1063 m_ctlInfo.af.m_afTriggerTimeOut = 0;
1064 // scene
1065 m_ctlInfo.scene.prevSceneMode = AA_SCENE_MODE_MAX;
1066 }
1067 ALOGD("(%s): EXIT", __FUNCTION__);
1068 }
1069
1070 ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
1071 {
1072 ALOGD("(%s): ENTER", __FUNCTION__);
1073 this->release();
1074 ALOGD("(%s): EXIT", __FUNCTION__);
1075 }
1076
1077 void ExynosCameraHWInterface2::release()
1078 {
1079 int i, res;
1080 ALOGD("(HAL2::release): ENTER");
1081
1082 if (m_streamThreads[1] != NULL) {
1083 m_streamThreads[1]->release();
1084 m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
1085 }
1086
1087 if (m_streamThreads[0] != NULL) {
1088 m_streamThreads[0]->release();
1089 m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
1090 }
1091
1092 if (m_sensorThread != NULL) {
1093 m_sensorThread->release();
1094 }
1095
1096 if (m_mainThread != NULL) {
1097 m_mainThread->release();
1098 }
1099
1100 if (m_exynosPictureCSC)
1101 csc_deinit(m_exynosPictureCSC);
1102 m_exynosPictureCSC = NULL;
1103
1104 if (m_exynosVideoCSC)
1105 csc_deinit(m_exynosVideoCSC);
1106 m_exynosVideoCSC = NULL;
1107
1108 if (m_streamThreads[1] != NULL) {
1109 ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 1 termination");
1110 while (!m_streamThreads[1]->IsTerminated())
1111 usleep(SIG_WAITING_TICK);
1112 ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 1 termination");
1113 m_streamThreads[1] = NULL;
1114 }
1115
1116 if (m_streamThreads[0] != NULL) {
1117 ALOGD("(HAL2::release): START Waiting for (indirect) stream thread 0 termination");
1118 while (!m_streamThreads[0]->IsTerminated())
1119 usleep(SIG_WAITING_TICK);
1120 ALOGD("(HAL2::release): END Waiting for (indirect) stream thread 0 termination");
1121 m_streamThreads[0] = NULL;
1122 }
1123
1124 if (m_sensorThread != NULL) {
1125 ALOGD("(HAL2::release): START Waiting for (indirect) sensor thread termination");
1126 while (!m_sensorThread->IsTerminated())
1127 usleep(SIG_WAITING_TICK);
1128 ALOGD("(HAL2::release): END Waiting for (indirect) sensor thread termination");
1129 m_sensorThread = NULL;
1130 }
1131
1132 if (m_mainThread != NULL) {
1133 ALOGD("(HAL2::release): START Waiting for (indirect) main thread termination");
1134 while (!m_mainThread->IsTerminated())
1135 usleep(SIG_WAITING_TICK);
1136 ALOGD("(HAL2::release): END Waiting for (indirect) main thread termination");
1137 m_mainThread = NULL;
1138 }
1139
1140 if (m_requestManager != NULL) {
1141 delete m_requestManager;
1142 m_requestManager = NULL;
1143 }
1144
1145 if (m_BayerManager != NULL) {
1146 delete m_BayerManager;
1147 m_BayerManager = NULL;
1148 }
1149 for (i = 0; i < NUM_BAYER_BUFFERS; i++)
1150 freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1151
1152 if (m_sccLocalBufferValid) {
1153 for (i = 0; i < NUM_SCC_BUFFERS; i++)
1154 #ifdef ENABLE_FRAME_SYNC
1155 freeCameraMemory(&m_sccLocalBuffer[i], 2);
1156 #else
1157 freeCameraMemory(&m_sccLocalBuffer[i], 1);
1158 #endif
1159 }
1160 else {
1161 for (i = 0; i < NUM_SCC_BUFFERS; i++)
1162 freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1163 }
1164
1165 ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
1166 res = exynos_v4l2_close(m_camera_info.sensor.fd);
1167 if (res != NO_ERROR ) {
1168 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1169 }
1170
1171 ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
1172 res = exynos_v4l2_close(m_camera_info.isp.fd);
1173 if (res != NO_ERROR ) {
1174 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1175 }
1176
1177 ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
1178 res = exynos_v4l2_close(m_camera_info.capture.fd);
1179 if (res != NO_ERROR ) {
1180 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1181 }
1182
1183 ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
1184 res = exynos_v4l2_close(m_camera_info.scp.fd);
1185 if (res != NO_ERROR ) {
1186 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
1187 }
1188 ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
1189 deleteIonClient(m_ionCameraClient);
1190
1191 ALOGD("(HAL2::release): EXIT");
1192 }
1193
1194 int ExynosCameraHWInterface2::InitializeISPChain()
1195 {
1196 char node_name[30];
1197 int fd = 0;
1198 int i;
1199 int ret = 0;
1200
1201 /* Open Sensor */
1202 memset(&node_name, 0x00, sizeof(char[30]));
1203 sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1204 fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1205
1206 if (fd < 0) {
1207 ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1208 }
1209 else {
1210 ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1211 }
1212 m_camera_info.sensor.fd = fd;
1213
1214 /* Open ISP */
1215 memset(&node_name, 0x00, sizeof(char[30]));
1216 sprintf(node_name, "%s%d", NODE_PREFIX, 41);
1217 fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1218
1219 if (fd < 0) {
1220 ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1221 }
1222 else {
1223 ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1224 }
1225 m_camera_info.isp.fd = fd;
1226
1227 /* Open ScalerC */
1228 memset(&node_name, 0x00, sizeof(char[30]));
1229 sprintf(node_name, "%s%d", NODE_PREFIX, 42);
1230 fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1231
1232 if (fd < 0) {
1233 ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1234 }
1235 else {
1236 ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1237 }
1238 m_camera_info.capture.fd = fd;
1239
1240 /* Open ScalerP */
1241 memset(&node_name, 0x00, sizeof(char[30]));
1242 sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1243 fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1244 if (fd < 0) {
1245 ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1246 }
1247 else {
1248 ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1249 }
1250 m_camera_info.scp.fd = fd;
1251
1252 if(m_cameraId == 0)
1253 m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1254 else
1255 m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1256
1257 memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1258 m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1259 m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1260
1261 m_camera_info.dummy_shot.dis_bypass = 1;
1262 m_camera_info.dummy_shot.dnr_bypass = 1;
1263 m_camera_info.dummy_shot.fd_bypass = 1;
1264
1265 /*sensor setting*/
1266 m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1267 m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1268 m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
1269
1270 m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1271 m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
1272
1273 /*request setting*/
1274 m_camera_info.dummy_shot.request_sensor = 1;
1275 m_camera_info.dummy_shot.request_scc = 0;
1276 m_camera_info.dummy_shot.request_scp = 0;
1277 m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1278
1279 m_camera_info.sensor.width = m_camera2->getSensorRawW();
1280 m_camera_info.sensor.height = m_camera2->getSensorRawH();
1281
1282 m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1283 m_camera_info.sensor.planes = 2;
1284 m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1285 m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1286 m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
1287
1288 for(i = 0; i < m_camera_info.sensor.buffers; i++){
1289 int res;
1290 initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1291 m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1292 m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
1293 res = allocCameraMemory(m_ionCameraClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes, 1<<1);
1294 if (res) {
1295 ALOGE("ERROR(%s): failed to allocateCameraMemory for sensor buffer %d", __FUNCTION__, i);
1296 // Free allocated sensor buffers
1297 for (int j = 0; j < i; j++) {
1298 freeCameraMemory(&m_camera_info.sensor.buffer[j], m_camera_info.sensor.planes);
1299 }
1300 return false;
1301 }
1302 }
1303
1304 m_camera_info.isp.width = m_camera_info.sensor.width;
1305 m_camera_info.isp.height = m_camera_info.sensor.height;
1306 m_camera_info.isp.format = m_camera_info.sensor.format;
1307 m_camera_info.isp.planes = m_camera_info.sensor.planes;
1308 m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1309 m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
1310 m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
1311
1312 for(i = 0; i < m_camera_info.isp.buffers; i++){
1313 initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
1314 m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0];
1315 m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1];
1316 m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0];
1317 m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1];
1318 m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0];
1319 m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1];
1320 };
1321
1322 /* init ISP */
1323 ret = cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
1324 if (ret < 0) {
1325 ALOGE("ERR(%s): cam_int_s_input(%d) failed!!!! ", __FUNCTION__, m_camera_info.sensor_id);
1326 return false;
1327 }
1328 cam_int_s_fmt(&(m_camera_info.isp));
1329 ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
1330 cam_int_reqbufs(&(m_camera_info.isp));
1331 ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
1332 ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__);
1333
1334 /* init Sensor */
1335 cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
1336 ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__);
1337 if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
1338 ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__);
1339 }
1340 ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__);
1341 cam_int_reqbufs(&(m_camera_info.sensor));
1342 ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__);
1343 for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1344 ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
1345 m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1346 m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1347 memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1348 sizeof(struct camera2_shot_ext));
1349 }
1350
1351 for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1352 cam_int_qbuf(&(m_camera_info.sensor), i);
1353
1354 for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1355 m_requestManager->pushSensorQ(i);
1356
1357 ALOGV("== stream_on :: sensor");
1358 cam_int_streamon(&(m_camera_info.sensor));
1359 m_camera_info.sensor.status = true;
1360
1361 /* init Capture */
1362 m_camera_info.capture.width = m_camera2->getSensorW();
1363 m_camera_info.capture.height = m_camera2->getSensorH();
1364 m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
1365 #ifdef ENABLE_FRAME_SYNC
1366 m_camera_info.capture.planes = 2;
1367 #else
1368 m_camera_info.capture.planes = 1;
1369 #endif
1370 m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1371 m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1372 m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
1373
1374 m_camera_info.capture.status = false;
1375
1376 return true;
1377 }
1378
1379 void ExynosCameraHWInterface2::StartSCCThread(bool threadExists)
1380 {
1381 ALOGV("(%s)", __FUNCTION__);
1382 StreamThread *AllocatedStream;
1383 stream_parameters_t newParameters;
1384 uint32_t format_actual;
1385
1386
1387 if (!threadExists) {
1388 m_streamThreads[1] = new StreamThread(this, 1);
1389 }
1390 AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1391 if (!threadExists) {
1392 AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1393 m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1394 AllocatedStream->m_numRegisteredStream = 1;
1395 }
1396 AllocatedStream->m_index = 1;
1397
1398 format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1399
1400 newParameters.width = m_camera2->getSensorW();
1401 newParameters.height = m_camera2->getSensorH();
1402 newParameters.format = format_actual;
1403 newParameters.streamOps = NULL;
1404 newParameters.numHwBuffers = NUM_SCC_BUFFERS;
1405 #ifdef ENABLE_FRAME_SYNC
1406 newParameters.planes = 2;
1407 #else
1408 newParameters.planes = 1;
1409 #endif
1410
1411 newParameters.numSvcBufsInHal = 0;
1412
1413 newParameters.node = &m_camera_info.capture;
1414
1415 AllocatedStream->streamType = STREAM_TYPE_INDIRECT;
1416 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1417
1418 if (!threadExists) {
1419 if (!m_sccLocalBufferValid) {
1420 for (int i = 0; i < m_camera_info.capture.buffers; i++){
1421 initCameraMemory(&m_camera_info.capture.buffer[i], newParameters.node->planes);
1422 m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
1423 #ifdef ENABLE_FRAME_SYNC
1424 m_camera_info.capture.buffer[i].size.extS[1] = 4*1024; // HACK, driver use 4*1024, should be use predefined value
1425 allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes, 1<<1);
1426 #else
1427 allocCameraMemory(m_ionCameraClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
1428 #endif
1429 m_sccLocalBuffer[i] = m_camera_info.capture.buffer[i];
1430 }
1431 m_sccLocalBufferValid = true;
1432 }
1433 } else {
1434 if (m_sccLocalBufferValid) {
1435 for (int i = 0; i < m_camera_info.capture.buffers; i++)
1436 m_camera_info.capture.buffer[i] = m_sccLocalBuffer[i];
1437 } else {
1438 ALOGE("(%s): SCC Thread starting with no buffer", __FUNCTION__);
1439 }
1440 }
1441 cam_int_s_input(newParameters.node, m_camera_info.sensor_id);
1442 m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1443 cam_int_s_fmt(newParameters.node);
1444 ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1445 cam_int_reqbufs(newParameters.node);
1446 ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1447
1448 for (int i = 0; i < newParameters.node->buffers; i++) {
1449 ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
1450 cam_int_qbuf(newParameters.node, i);
1451 newParameters.svcBufStatus[i] = ON_DRIVER;
1452 }
1453
1454 ALOGV("== stream_on :: capture");
1455 if (cam_int_streamon(newParameters.node) < 0) {
1456 ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1457 } else {
1458 m_camera_info.capture.status = true;
1459 }
1460
1461 AllocatedStream->setParameter(&newParameters);
1462 AllocatedStream->m_activated = true;
1463 AllocatedStream->m_isBufferInit = true;
1464 }
1465
1466 void ExynosCameraHWInterface2::StartISP()
1467 {
1468 ALOGV("== stream_on :: isp");
1469 cam_int_streamon(&(m_camera_info.isp));
1470 exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1471 }
1472
1473 int ExynosCameraHWInterface2::getCameraId() const
1474 {
1475 return m_cameraId;
1476 }
1477
1478 int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
1479 {
1480 ALOGV("DEBUG(%s):", __FUNCTION__);
1481 if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
1482 && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
1483 m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
1484 return 0;
1485 }
1486 else {
1487 ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
1488 return 1;
1489 }
1490 }
1491
1492 int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
1493 {
1494 int i = 0;
1495
1496 ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
1497 if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
1498 ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
1499 return 0;
1500 }
1501 m_isRequestQueueNull = false;
1502 if (m_requestManager->GetNumEntries() == 0)
1503 m_requestManager->SetInitialSkip(0);
1504
1505 if (m_isIspStarted == false) {
1506 /* isp */
1507 m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
1508 m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
1509 cam_int_s_fmt(&(m_camera_info.isp));
1510 cam_int_reqbufs(&(m_camera_info.isp));
1511
1512 /* sensor */
1513 if (m_camera_info.sensor.status == false) {
1514 cam_int_s_fmt(&(m_camera_info.sensor));
1515 cam_int_reqbufs(&(m_camera_info.sensor));
1516
1517 for (i = 0; i < m_camera_info.sensor.buffers; i++) {
1518 ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
1519 m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
1520 m_camera_info.dummy_shot.shot.ctl.request.frameCount = -1;
1521 memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
1522 sizeof(struct camera2_shot_ext));
1523 }
1524 for (i = 0; i < NUM_MIN_SENSOR_QBUF; i++)
1525 cam_int_qbuf(&(m_camera_info.sensor), i);
1526
1527 for (i = NUM_MIN_SENSOR_QBUF; i < m_camera_info.sensor.buffers; i++)
1528 m_requestManager->pushSensorQ(i);
1529 ALOGV("DEBUG(%s): calling sensor streamon", __FUNCTION__);
1530 cam_int_streamon(&(m_camera_info.sensor));
1531 m_camera_info.sensor.status = true;
1532 }
1533 }
1534 if (!(m_streamThreads[1].get())) {
1535 ALOGV("DEBUG(%s): stream thread 1 not exist. starting without stream", __FUNCTION__);
1536 StartSCCThread(false);
1537 } else {
1538 if (m_streamThreads[1]->m_activated == false) {
1539 ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1540 StartSCCThread(true);
1541 } else {
1542 if (m_camera_info.capture.status == false) {
1543 m_camera_info.capture.buffers = NUM_SCC_BUFFERS;
1544 cam_int_s_fmt(&(m_camera_info.capture));
1545 ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
1546 cam_int_reqbufs(&(m_camera_info.capture));
1547 ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
1548
1549 if (m_streamThreads[1]->streamType == STREAM_TYPE_DIRECT) {
1550 StreamThread * targetStream = m_streamThreads[1].get();
1551 stream_parameters_t *targetStreamParms = &(targetStream->m_parameters);
1552 node_info_t *currentNode = targetStreamParms->node;
1553
1554 struct v4l2_buffer v4l2_buf;
1555 struct v4l2_plane planes[VIDEO_MAX_PLANES];
1556
1557 for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
1558 v4l2_buf.m.planes = planes;
1559 v4l2_buf.type = currentNode->type;
1560 v4l2_buf.memory = currentNode->memory;
1561
1562 v4l2_buf.length = currentNode->planes;
1563 v4l2_buf.index = i;
1564 ExynosBuffer metaBuf = targetStreamParms->metaBuffers[i];
1565
1566 if (i < currentNode->buffers) {
1567 #ifdef ENABLE_FRAME_SYNC
1568 v4l2_buf.m.planes[0].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[0];
1569 v4l2_buf.m.planes[2].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[1];
1570 v4l2_buf.m.planes[1].m.fd = targetStreamParms->svcBuffers[i].fd.extFd[2];
1571 v4l2_buf.length += targetStreamParms->metaPlanes;
1572 v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
1573 v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
1574
1575 ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
1576 #endif
1577 if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
1578 ALOGE("ERR(%s): exynos_v4l2_qbuf() fail fd(%d)", __FUNCTION__, currentNode->fd);
1579 }
1580 ALOGV("DEBUG(%s): exynos_v4l2_qbuf() success fd(%d)", __FUNCTION__, currentNode->fd);
1581 targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC;
1582 }
1583 else {
1584 targetStreamParms->svcBufStatus[i] = ON_SERVICE;
1585 }
1586
1587 }
1588
1589 } else {
1590 for (int i = 0; i < m_camera_info.capture.buffers; i++) {
1591 ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
1592 cam_int_qbuf(&(m_camera_info.capture), i);
1593 }
1594 }
1595 ALOGV("== stream_on :: capture");
1596 if (cam_int_streamon(&(m_camera_info.capture)) < 0) {
1597 ALOGE("ERR(%s): capture stream on fail", __FUNCTION__);
1598 } else {
1599 m_camera_info.capture.status = true;
1600 }
1601 }
1602 if (m_scpForceSuspended) {
1603 m_scpForceSuspended = false;
1604 }
1605 }
1606 }
1607 if (m_isIspStarted == false) {
1608 StartISP();
1609 ALOGV("DEBUG(%s):starting sensor thread", __FUNCTION__);
1610 m_requestManager->SetInitialSkip(6);
1611 m_sensorThread->Start("SensorThread", PRIORITY_DEFAULT, 0);
1612 m_isIspStarted = true;
1613 }
1614 m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1615 return 0;
1616 }
1617
1618 int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
1619 {
1620 ALOGV("DEBUG(%s):", __FUNCTION__);
1621 if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
1622 && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
1623 m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
1624 return 0;
1625 }
1626 else {
1627 ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
1628 return 1;
1629 }
1630 }
1631
1632 int ExynosCameraHWInterface2::getInProgressCount()
1633 {
1634 int inProgressJpeg;
1635 int inProgressCount;
1636
1637 {
1638 Mutex::Autolock lock(m_jpegEncoderLock);
1639 inProgressJpeg = m_jpegEncodingCount;
1640 inProgressCount = m_requestManager->GetNumEntries();
1641 }
1642 ALOGV("DEBUG(%s): # of dequeued req (%d) jpeg(%d) = (%d)", __FUNCTION__,
1643 inProgressCount, inProgressJpeg, (inProgressCount + inProgressJpeg));
1644 return (inProgressCount + inProgressJpeg);
1645 }
1646
1647 int ExynosCameraHWInterface2::flushCapturesInProgress()
1648 {
1649 return 0;
1650 }
1651
1652 int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
1653 {
1654 ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
1655
1656 if (request == NULL) return BAD_VALUE;
1657 if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
1658 return BAD_VALUE;
1659 }
1660 status_t res;
1661 // Pass 1, calculate size and allocate
1662 res = m_camera2->constructDefaultRequest(request_template,
1663 request,
1664 true);
1665 if (res != OK) {
1666 return res;
1667 }
1668 // Pass 2, build request
1669 res = m_camera2->constructDefaultRequest(request_template,
1670 request,
1671 false);
1672 if (res != OK) {
1673 ALOGE("Unable to populate new request for template %d",
1674 request_template);
1675 }
1676
1677 return res;
1678 }
1679
1680 int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
1681 uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
1682 {
1683 ALOGD("(%s): stream width(%d) height(%d) format(%x)", __FUNCTION__, width, height, format);
1684 bool useDirectOutput = false;
1685 StreamThread *AllocatedStream;
1686 stream_parameters_t newParameters;
1687 substream_parameters_t *subParameters;
1688 StreamThread *parentStream;
1689 status_t res;
1690 int allocCase = 0;
1691
1692 if ((format == HAL_PIXEL_FORMAT_IMPLEMENTATION_DEFINED || format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE) &&
1693 m_camera2->isSupportedResolution(width, height)) {
1694 if (!(m_streamThreads[0].get())) {
1695 ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
1696 allocCase = 0;
1697 }
1698 else {
1699 if ((m_streamThreads[0].get())->m_activated == true) {
1700 ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1701 allocCase = 1;
1702 }
1703 else {
1704 ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1705 allocCase = 2;
1706 }
1707 }
1708
1709 // TODO : instead of that, use calculate aspect ratio and selection with calculated ratio.
1710 if ((width == 1920 && height == 1080) || (width == 1280 && height == 720)
1711 || (width == 720 && height == 480) || (width == 1440 && height == 960)
1712 || (width == 1344 && height == 896)) {
1713 m_wideAspect = true;
1714 } else {
1715 m_wideAspect = false;
1716 }
1717 ALOGV("DEBUG(%s): m_wideAspect (%d)", __FUNCTION__, m_wideAspect);
1718
1719 if (allocCase == 0 || allocCase == 2) {
1720 *stream_id = STREAM_ID_PREVIEW;
1721
1722 m_streamThreads[0] = new StreamThread(this, *stream_id);
1723
1724 AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
1725 AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1726 m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1727
1728 *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1729 *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1730 if (m_wideAspect)
1731 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1732 *max_buffers = 7;
1733
1734 newParameters.width = width;
1735 newParameters.height = height;
1736 newParameters.format = *format_actual;
1737 newParameters.streamOps = stream_ops;
1738 newParameters.usage = *usage;
1739 newParameters.numHwBuffers = NUM_SCP_BUFFERS;
1740 newParameters.numOwnSvcBuffers = *max_buffers;
1741 newParameters.planes = NUM_PLANES(*format_actual);
1742 newParameters.metaPlanes = 1;
1743 newParameters.numSvcBufsInHal = 0;
1744 newParameters.minUndequedBuffer = 3;
1745 newParameters.needsIonMap = true;
1746
1747 newParameters.node = &m_camera_info.scp;
1748 newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1749 newParameters.node->memory = V4L2_MEMORY_DMABUF;
1750
1751 AllocatedStream->streamType = STREAM_TYPE_DIRECT;
1752 AllocatedStream->m_index = 0;
1753 AllocatedStream->setParameter(&newParameters);
1754 AllocatedStream->m_activated = true;
1755 AllocatedStream->m_numRegisteredStream = 1;
1756 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1757 m_requestManager->SetDefaultParameters(m_camera2->getSensorW());
1758 m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = m_camera2->getSensorW();
1759 if (m_subStreams[STREAM_ID_RECORD].type != SUBSTREAM_TYPE_NONE)
1760 AllocatedStream->attachSubStream(STREAM_ID_RECORD, 10);
1761 if (m_subStreams[STREAM_ID_PRVCB].type != SUBSTREAM_TYPE_NONE)
1762 AllocatedStream->attachSubStream(STREAM_ID_PRVCB, 70);
1763
1764 // set video stabilization killswitch
1765 m_requestManager->m_vdisEnable = width > 352 && height > 288;
1766
1767 return 0;
1768 } else if (allocCase == 1) {
1769 *stream_id = STREAM_ID_RECORD;
1770
1771 subParameters = &m_subStreams[STREAM_ID_RECORD];
1772 memset(subParameters, 0, sizeof(substream_parameters_t));
1773
1774 parentStream = (StreamThread*)(m_streamThreads[0].get());
1775 if (!parentStream) {
1776 return 1;
1777 }
1778
1779 *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
1780 *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1781 if (m_wideAspect)
1782 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1783 *max_buffers = 7;
1784
1785 subParameters->type = SUBSTREAM_TYPE_RECORD;
1786 subParameters->width = width;
1787 subParameters->height = height;
1788 subParameters->format = *format_actual;
1789 subParameters->svcPlanes = NUM_PLANES(*format_actual);
1790 subParameters->streamOps = stream_ops;
1791 subParameters->usage = *usage;
1792 subParameters->numOwnSvcBuffers = *max_buffers;
1793 subParameters->numSvcBufsInHal = 0;
1794 subParameters->needBufferInit = false;
1795 subParameters->minUndequedBuffer = 2;
1796
1797 res = parentStream->attachSubStream(STREAM_ID_RECORD, 20);
1798 if (res != NO_ERROR) {
1799 ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1800 return 1;
1801 }
1802 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1803 ALOGV("(%s): Enabling Record", __FUNCTION__);
1804 return 0;
1805 }
1806 }
1807 else if ((format == CAMERA2_HAL_PIXEL_FORMAT_ZSL)
1808 && ((int32_t)width == m_camera2->getSensorW()) && ((int32_t)height == m_camera2->getSensorH())) {
1809
1810 if (!(m_streamThreads[1].get())) {
1811 ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1812 useDirectOutput = true;
1813 }
1814 else {
1815 ALOGV("DEBUG(%s): stream thread 1 exists and deactivated.", __FUNCTION__);
1816 useDirectOutput = false;
1817 }
1818 if (useDirectOutput) {
1819 *stream_id = STREAM_ID_ZSL;
1820
1821 m_streamThreads[1] = new StreamThread(this, *stream_id);
1822 AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1823 AllocatedStream->Start("StreamThread", PRIORITY_DEFAULT, 0);
1824 m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1825
1826 *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1827
1828 *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1829 *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1830 if (m_wideAspect)
1831 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1832 *max_buffers = 7;
1833
1834 newParameters.width = width;
1835 newParameters.height = height;
1836 newParameters.format = *format_actual;
1837 newParameters.streamOps = stream_ops;
1838 newParameters.usage = *usage;
1839 newParameters.numHwBuffers = NUM_SCC_BUFFERS;
1840 newParameters.numOwnSvcBuffers = *max_buffers;
1841 newParameters.planes = NUM_PLANES(*format_actual);
1842 newParameters.metaPlanes = 1;
1843
1844 newParameters.numSvcBufsInHal = 0;
1845 newParameters.minUndequedBuffer = 2;
1846 newParameters.needsIonMap = false;
1847
1848 newParameters.node = &m_camera_info.capture;
1849 newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1850 newParameters.node->memory = V4L2_MEMORY_DMABUF;
1851
1852 AllocatedStream->streamType = STREAM_TYPE_DIRECT;
1853 AllocatedStream->m_index = 1;
1854 AllocatedStream->setParameter(&newParameters);
1855 AllocatedStream->m_activated = true;
1856 AllocatedStream->m_numRegisteredStream = 1;
1857 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1858 return 0;
1859 } else {
1860 bool bJpegExists = false;
1861 AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
1862 subParameters = &m_subStreams[STREAM_ID_JPEG];
1863 if (subParameters->type == SUBSTREAM_TYPE_JPEG) {
1864 ALOGD("(%s): jpeg stream exists", __FUNCTION__);
1865 bJpegExists = true;
1866 AllocatedStream->detachSubStream(STREAM_ID_JPEG);
1867 }
1868 AllocatedStream->m_releasing = true;
1869 ALOGD("START stream thread 1 release %d", __LINE__);
1870 do {
1871 AllocatedStream->release();
1872 usleep(SIG_WAITING_TICK);
1873 } while (AllocatedStream->m_releasing);
1874 ALOGD("END stream thread 1 release %d", __LINE__);
1875
1876 *stream_id = STREAM_ID_ZSL;
1877
1878 m_streamThreadInitialize((SignalDrivenThread*)AllocatedStream);
1879
1880 *format_actual = HAL_PIXEL_FORMAT_EXYNOS_YV12;
1881
1882 *format_actual = HAL_PIXEL_FORMAT_YCbCr_422_I; // YUYV
1883 *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1884 if (m_wideAspect)
1885 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1886 *max_buffers = 7;
1887
1888 newParameters.width = width;
1889 newParameters.height = height;
1890 newParameters.format = *format_actual;
1891 newParameters.streamOps = stream_ops;
1892 newParameters.usage = *usage;
1893 newParameters.numHwBuffers = NUM_SCC_BUFFERS;
1894 newParameters.numOwnSvcBuffers = *max_buffers;
1895 newParameters.planes = NUM_PLANES(*format_actual);
1896 newParameters.metaPlanes = 1;
1897
1898 newParameters.numSvcBufsInHal = 0;
1899 newParameters.minUndequedBuffer = 2;
1900 newParameters.needsIonMap = false;
1901
1902 newParameters.node = &m_camera_info.capture;
1903 newParameters.node->type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1904 newParameters.node->memory = V4L2_MEMORY_DMABUF;
1905
1906 AllocatedStream->streamType = STREAM_TYPE_DIRECT;
1907 AllocatedStream->m_index = 1;
1908 AllocatedStream->setParameter(&newParameters);
1909 AllocatedStream->m_activated = true;
1910 AllocatedStream->m_numRegisteredStream = 1;
1911 if (bJpegExists) {
1912 AllocatedStream->attachSubStream(STREAM_ID_JPEG, 10);
1913 }
1914 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, AllocatedStream->m_numRegisteredStream);
1915 return 0;
1916
1917 }
1918 }
1919 else if (format == HAL_PIXEL_FORMAT_BLOB
1920 && m_camera2->isSupportedJpegResolution(width, height)) {
1921 *stream_id = STREAM_ID_JPEG;
1922
1923 subParameters = &m_subStreams[*stream_id];
1924 memset(subParameters, 0, sizeof(substream_parameters_t));
1925
1926 if (!(m_streamThreads[1].get())) {
1927 ALOGV("DEBUG(%s): stream thread 1 not exist", __FUNCTION__);
1928 StartSCCThread(false);
1929 }
1930 else if (m_streamThreads[1]->m_activated == false) {
1931 ALOGV("DEBUG(%s): stream thread 1 suspended. restarting", __FUNCTION__);
1932 StartSCCThread(true);
1933 }
1934 parentStream = (StreamThread*)(m_streamThreads[1].get());
1935
1936 *format_actual = HAL_PIXEL_FORMAT_BLOB;
1937 *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1938 if (m_wideAspect)
1939 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1940 *max_buffers = 5;
1941
1942 subParameters->type = SUBSTREAM_TYPE_JPEG;
1943 subParameters->width = width;
1944 subParameters->height = height;
1945 subParameters->format = *format_actual;
1946 subParameters->svcPlanes = 1;
1947 parentStream->m_parameters.streamOps =
1948 subParameters->streamOps = stream_ops;
1949 subParameters->usage = *usage;
1950 subParameters->numOwnSvcBuffers = *max_buffers;
1951 subParameters->numSvcBufsInHal = 0;
1952 subParameters->needBufferInit = false;
1953 subParameters->minUndequedBuffer = 2;
1954
1955 res = parentStream->attachSubStream(STREAM_ID_JPEG, 10);
1956 if (res != NO_ERROR) {
1957 ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
1958 return 1;
1959 }
1960 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
1961 ALOGV("(%s): Enabling Jpeg", __FUNCTION__);
1962 return 0;
1963 }
1964 else if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP || format == HAL_PIXEL_FORMAT_YV12) {
1965 *stream_id = STREAM_ID_PRVCB;
1966
1967 subParameters = &m_subStreams[STREAM_ID_PRVCB];
1968 memset(subParameters, 0, sizeof(substream_parameters_t));
1969
1970 parentStream = (StreamThread*)(m_streamThreads[0].get());
1971 if (!parentStream) {
1972 return 1;
1973 }
1974
1975 *format_actual = format;
1976 *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1977 if (m_wideAspect)
1978 *usage |= GRALLOC_USAGE_PRIVATE_CHROMA;
1979 *max_buffers = 7;
1980
1981 subParameters->type = SUBSTREAM_TYPE_PRVCB;
1982 subParameters->width = width;
1983 subParameters->height = height;
1984 subParameters->format = *format_actual;
1985 subParameters->svcPlanes = NUM_PLANES(*format_actual);
1986 parentStream->m_parameters.streamOps =
1987 subParameters->streamOps = stream_ops;
1988 subParameters->usage = *usage;
1989 subParameters->numOwnSvcBuffers = *max_buffers;
1990 subParameters->numSvcBufsInHal = 0;
1991 subParameters->needBufferInit = false;
1992 subParameters->minUndequedBuffer = 2;
1993
1994 if (format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
1995 subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP;
1996 subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YCrCb_420_SP);
1997 }
1998 else {
1999 subParameters->internalFormat = HAL_PIXEL_FORMAT_EXYNOS_YV12;
2000 subParameters->internalPlanes = NUM_PLANES(HAL_PIXEL_FORMAT_EXYNOS_YV12);
2001 }
2002
2003 res = parentStream->attachSubStream(STREAM_ID_PRVCB, 20);
2004 if (res != NO_ERROR) {
2005 ALOGE("(%s): substream attach failed. res(%d)", __FUNCTION__, res);
2006 return 1;
2007 }
2008 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, parentStream->m_numRegisteredStream);
2009 ALOGV("(%s): Enabling previewcb", __FUNCTION__);
2010 return 0;
2011 }
2012 ALOGE("(%s): Unsupported Pixel Format", __FUNCTION__);
2013 return 1;
2014 }
2015
2016 int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
2017 int num_buffers, buffer_handle_t *registeringBuffers)
2018 {
2019 int i,j;
2020 void *virtAddr[3];
2021 int plane_index = 0;
2022 StreamThread * targetStream;
2023 stream_parameters_t *targetStreamParms;
2024 node_info_t *currentNode;
2025
2026 struct v4l2_buffer v4l2_buf;
2027 struct v4l2_plane planes[VIDEO_MAX_PLANES];
2028
2029 ALOGD("(%s): stream_id(%d), num_buff(%d), handle(%x) ", __FUNCTION__,
2030 stream_id, num_buffers, (uint32_t)registeringBuffers);
2031
2032 if (stream_id == STREAM_ID_PREVIEW && m_streamThreads[0].get()) {
2033 targetStream = m_streamThreads[0].get();
2034 targetStreamParms = &(m_streamThreads[0]->m_parameters);
2035
2036 }
2037 else if (stream_id == STREAM_ID_JPEG || stream_id == STREAM_ID_RECORD || stream_id == STREAM_ID_PRVCB) {
2038 substream_parameters_t *targetParms;
2039 targetParms = &m_subStreams[stream_id];
2040
2041 targetParms->numSvcBuffers = num_buffers;
2042
2043 for (i = 0 ; i < targetParms->numSvcBuffers ; i++) {
2044 ALOGV("(%s): registering substream(%d) Buffers[%d] (%x) ", __FUNCTION__,
2045 i, stream_id, (uint32_t)(registeringBuffers[i]));
2046 if (m_grallocHal) {
2047 if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
2048 targetParms->usage, 0, 0,
2049 targetParms->width, targetParms->height, virtAddr) != 0) {
2050 ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2051 }
2052 else {
2053 ExynosBuffer currentBuf;
2054 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2055 if (targetParms->svcPlanes == 1) {
2056 currentBuf.fd.extFd[0] = priv_handle->fd;
2057 currentBuf.size.extS[0] = priv_handle->size;
2058 currentBuf.size.extS[1] = 0;
2059 currentBuf.size.extS[2] = 0;
2060 } else if (targetParms->svcPlanes == 2) {
2061 currentBuf.fd.extFd[0] = priv_handle->fd;
2062 currentBuf.fd.extFd[1] = priv_handle->fd1;
2063
2064 } else if (targetParms->svcPlanes == 3) {
2065 currentBuf.fd.extFd[0] = priv_handle->fd;
2066 currentBuf.fd.extFd[1] = priv_handle->fd1;
2067 currentBuf.fd.extFd[2] = priv_handle->fd2;
2068 }
2069 for (plane_index = 0 ; plane_index < targetParms->svcPlanes ; plane_index++) {
2070 currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
2071 CAM_LOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x) size(%d)",
2072 __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
2073 (unsigned int)currentBuf.virt.extP[plane_index], currentBuf.size.extS[plane_index]);
2074 }
2075 targetParms->svcBufStatus[i] = ON_SERVICE;
2076 targetParms->svcBuffers[i] = currentBuf;
2077 targetParms->svcBufHandle[i] = registeringBuffers[i];
2078 }
2079 }
2080 }
2081 targetParms->needBufferInit = true;
2082 return 0;
2083 }
2084 else if (stream_id == STREAM_ID_ZSL && m_streamThreads[1].get()) {
2085 targetStream = m_streamThreads[1].get();
2086 targetStreamParms = &(m_streamThreads[1]->m_parameters);
2087 }
2088 else {
2089 ALOGE("(%s): unregistered stream id (%d)", __FUNCTION__, stream_id);
2090 return 1;
2091 }
2092
2093 if (targetStream->streamType == STREAM_TYPE_DIRECT) {
2094 if (num_buffers < targetStreamParms->numHwBuffers) {
2095 ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
2096 __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
2097 return 1;
2098 }
2099 }
2100 CAM_LOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
2101 __FUNCTION__, targetStreamParms->format, targetStreamParms->width,
2102 targetStreamParms->height, targetStreamParms->planes);
2103 targetStreamParms->numSvcBuffers = num_buffers;
2104 currentNode = targetStreamParms->node;
2105 currentNode->width = targetStreamParms->width;
2106 currentNode->height = targetStreamParms->height;
2107 currentNode->format = HAL_PIXEL_FORMAT_2_V4L2_PIX(targetStreamParms->format);
2108 currentNode->planes = targetStreamParms->planes;
2109 currentNode->buffers = targetStreamParms->numHwBuffers;
2110 cam_int_s_input(currentNode, m_camera_info.sensor_id);
2111 cam_int_s_fmt(currentNode);
2112 cam_int_reqbufs(currentNode);
2113 for (i = 0 ; i < targetStreamParms->numSvcBuffers ; i++) {
2114 ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
2115 i, (uint32_t)(registeringBuffers[i]));
2116 v4l2_buf.m.planes = planes;
2117 v4l2_buf.type = currentNode->type;
2118 v4l2_buf.memory = currentNode->memory;
2119 v4l2_buf.index = i;
2120 v4l2_buf.length = currentNode->planes;
2121
2122 ExynosBuffer currentBuf;
2123 ExynosBuffer metaBuf;
2124 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
2125
2126 m_getAlignedYUVSize(currentNode->format,
2127 currentNode->width, currentNode->height, &currentBuf);
2128
2129 ALOGV("DEBUG(%s): ion_size(%d), stride(%d), ", __FUNCTION__, priv_handle->size, priv_handle->stride);
2130 if (currentNode->planes == 1) {
2131 v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2132 currentBuf.fd.extFd[0] = priv_handle->fd;
2133 currentBuf.size.extS[0] = priv_handle->size;
2134 currentBuf.size.extS[1] = 0;
2135 currentBuf.size.extS[2] = 0;
2136 } else if (currentNode->planes == 2) {
2137 v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2138 v4l2_buf.m.planes[1].m.fd = priv_handle->fd1;
2139 currentBuf.fd.extFd[0] = priv_handle->fd;
2140 currentBuf.fd.extFd[1] = priv_handle->fd1;
2141
2142 } else if (currentNode->planes == 3) {
2143 v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2144 v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
2145 v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
2146 currentBuf.fd.extFd[0] = priv_handle->fd;
2147 currentBuf.fd.extFd[2] = priv_handle->fd1;
2148 currentBuf.fd.extFd[1] = priv_handle->fd2;
2149 }
2150
2151 for (plane_index = 0 ; plane_index < (int)v4l2_buf.length ; plane_index++) {
2152 if (targetStreamParms->needsIonMap)
2153 currentBuf.virt.extP[plane_index] = (char *)ion_map(currentBuf.fd.extFd[plane_index], currentBuf.size.extS[plane_index], 0);
2154 v4l2_buf.m.planes[plane_index].length = currentBuf.size.extS[plane_index];
2155 ALOGV("(%s): MAPPING plane(%d): fd(%d) addr(%x), length(%d)",
2156 __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
2157 (unsigned int)currentBuf.virt.extP[plane_index],
2158 v4l2_buf.m.planes[plane_index].length);
2159 }
2160
2161 if (i < currentNode->buffers) {
2162
2163
2164 #ifdef ENABLE_FRAME_SYNC
2165 /* add plane for metadata*/
2166 metaBuf.size.extS[0] = 4*1024;
2167 allocCameraMemory(m_ionCameraClient , &metaBuf, 1, 1<<0);
2168
2169 v4l2_buf.length += targetStreamParms->metaPlanes;
2170 v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = metaBuf.fd.extFd[0];
2171 v4l2_buf.m.planes[v4l2_buf.length-1].length = metaBuf.size.extS[0];
2172
2173 ALOGV("Qbuf metaBuf: fd(%d), length(%d) plane(%d)", metaBuf.fd.extFd[0], metaBuf.size.extS[0], v4l2_buf.length);
2174 #endif
2175 if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
2176 ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
2177 __FUNCTION__, stream_id, currentNode->fd);
2178 }
2179 ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
2180 __FUNCTION__, stream_id, currentNode->fd);
2181 targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC;
2182 }
2183 else {
2184 targetStreamParms->svcBufStatus[i] = ON_SERVICE;
2185 }
2186
2187 targetStreamParms->svcBuffers[i] = currentBuf;
2188 targetStreamParms->metaBuffers[i] = metaBuf;
2189 targetStreamParms->svcBufHandle[i] = registeringBuffers[i];
2190 }
2191
2192 ALOGV("DEBUG(%s): calling streamon stream id = %d", __FUNCTION__, stream_id);
2193 cam_int_streamon(targetStreamParms->node);
2194 ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__);
2195 currentNode->status = true;
2196 ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
2197
2198 return 0;
2199 }
2200
2201 int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
2202 {
2203 StreamThread *targetStream;
2204 status_t res = NO_ERROR;
2205 ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2206 bool releasingScpMain = false;
2207
2208 if (stream_id == STREAM_ID_PREVIEW) {
2209 targetStream = (StreamThread*)(m_streamThreads[0].get());
2210 if (!targetStream) {
2211 ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2212 return NO_ERROR;
2213 }
2214 targetStream->m_numRegisteredStream--;
2215 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2216 releasingScpMain = true;
2217 if (targetStream->m_parameters.needsIonMap) {
2218 for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2219 for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2220 ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2221 targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2222 ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2223 targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
2224 }
2225 }
2226 }
2227 } else if (stream_id == STREAM_ID_JPEG) {
2228 if (m_resizeBuf.size.s != 0) {
2229 freeCameraMemory(&m_resizeBuf, 1);
2230 }
2231 memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2232
2233 targetStream = (StreamThread*)(m_streamThreads[1].get());
2234 if (!targetStream) {
2235 ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2236 return NO_ERROR;
2237 }
2238
2239 if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2240 ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2241 return 1;
2242 }
2243 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2244 return 0;
2245 } else if (stream_id == STREAM_ID_RECORD) {
2246 memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2247
2248 targetStream = (StreamThread*)(m_streamThreads[0].get());
2249 if (!targetStream) {
2250 ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2251 return NO_ERROR;
2252 }
2253
2254 if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2255 ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2256 return 1;
2257 }
2258
2259 if (targetStream->m_numRegisteredStream != 0)
2260 return 0;
2261 } else if (stream_id == STREAM_ID_PRVCB) {
2262 if (m_previewCbBuf.size.s != 0) {
2263 freeCameraMemory(&m_previewCbBuf, m_subStreams[stream_id].internalPlanes);
2264 }
2265 memset(&m_subStreams[stream_id], 0, sizeof(substream_parameters_t));
2266
2267 targetStream = (StreamThread*)(m_streamThreads[0].get());
2268 if (!targetStream) {
2269 ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2270 return NO_ERROR;
2271 }
2272
2273 if (targetStream->detachSubStream(stream_id) != NO_ERROR) {
2274 ALOGE("(%s): substream detach failed. res(%d)", __FUNCTION__, res);
2275 return 1;
2276 }
2277
2278 if (targetStream->m_numRegisteredStream != 0)
2279 return 0;
2280 } else if (stream_id == STREAM_ID_ZSL) {
2281 targetStream = (StreamThread*)(m_streamThreads[1].get());
2282 if (!targetStream) {
2283 ALOGW("(%s): Stream Not Exists", __FUNCTION__);
2284 return NO_ERROR;
2285 }
2286
2287 targetStream->m_numRegisteredStream--;
2288 ALOGV("(%s): m_numRegisteredStream = %d", __FUNCTION__, targetStream->m_numRegisteredStream);
2289 if (targetStream->m_parameters.needsIonMap) {
2290 for (int i = 0; i < targetStream->m_parameters.numSvcBuffers; i++) {
2291 for (int j = 0; j < targetStream->m_parameters.planes; j++) {
2292 ion_unmap(targetStream->m_parameters.svcBuffers[i].virt.extP[j],
2293 targetStream->m_parameters.svcBuffers[i].size.extS[j]);
2294 ALOGV("(%s) ummap stream buffer[%d], plane(%d), fd %d vaddr %x", __FUNCTION__, i, j,
2295 targetStream->m_parameters.svcBuffers[i].fd.extFd[j], (unsigned int)(targetStream->m_parameters.svcBuffers[i].virt.extP[j]));
2296 }
2297 }
2298 }
2299 } else {
2300 ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
2301 return 1;
2302 }
2303
2304 if (m_sensorThread != NULL && releasingScpMain) {
2305 m_sensorThread->release();
2306 ALOGD("(%s): START Waiting for (indirect) sensor thread termination", __FUNCTION__);
2307 while (!m_sensorThread->IsTerminated())
2308 usleep(SIG_WAITING_TICK);
2309 ALOGD("(%s): END Waiting for (indirect) sensor thread termination", __FUNCTION__);
2310 }
2311
2312 if (m_streamThreads[1]->m_numRegisteredStream == 0 && m_streamThreads[1]->m_activated) {
2313 ALOGV("(%s): deactivating stream thread 1 ", __FUNCTION__);
2314 targetStream = (StreamThread*)(m_streamThreads[1].get());
2315 targetStream->m_releasing = true;
2316 ALOGD("START stream thread release %d", __LINE__);
2317 do {
2318 targetStream->release();
2319 usleep(SIG_WAITING_TICK);
2320 } while (targetStream->m_releasing);
2321 m_camera_info.capture.status = false;
2322 ALOGD("END stream thread release %d", __LINE__);
2323 }
2324
2325 if (releasingScpMain || (m_streamThreads[0].get() != NULL && m_streamThreads[0]->m_numRegisteredStream == 0 && m_streamThreads[0]->m_activated)) {
2326 ALOGV("(%s): deactivating stream thread 0", __FUNCTION__);
2327 targetStream = (StreamThread*)(m_streamThreads[0].get());
2328 targetStream->m_releasing = true;
2329 ALOGD("(%s): START Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2330 do {
2331 targetStream->release();
2332 usleep(SIG_WAITING_TICK);
2333 } while (targetStream->m_releasing);
2334 ALOGD("(%s): END Waiting for (indirect) stream thread release - line(%d)", __FUNCTION__, __LINE__);
2335 targetStream->SetSignal(SIGNAL_THREAD_TERMINATE);
2336
2337 if (targetStream != NULL) {
2338 ALOGD("(%s): START Waiting for (indirect) stream thread termination", __FUNCTION__);
2339 while (!targetStream->IsTerminated())
2340 usleep(SIG_WAITING_TICK);
2341 ALOGD("(%s): END Waiting for (indirect) stream thread termination", __FUNCTION__);
2342 m_streamThreads[0] = NULL;
2343 }
2344 if (m_camera_info.capture.status == true) {
2345 m_scpForceSuspended = true;
2346 }
2347 m_isIspStarted = false;
2348 }
2349 ALOGV("(%s): END", __FUNCTION__);
2350 return 0;
2351 }
2352
2353 int ExynosCameraHWInterface2::allocateReprocessStream(
2354 uint32_t /*width*/, uint32_t /*height*/, uint32_t /*format*/,
2355 const camera2_stream_in_ops_t* /*reprocess_stream_ops*/,
2356 uint32_t* /*stream_id*/, uint32_t* /*consumer_usage*/, uint32_t* /*max_buffers*/)
2357 {
2358 ALOGV("DEBUG(%s):", __FUNCTION__);
2359 return 0;
2360 }
2361
2362 int ExynosCameraHWInterface2::allocateReprocessStreamFromStream(
2363 uint32_t output_stream_id,
2364 const camera2_stream_in_ops_t *reprocess_stream_ops,
2365 // outputs
2366 uint32_t *stream_id)
2367 {
2368 ALOGD("(%s): output_stream_id(%d)", __FUNCTION__, output_stream_id);
2369 *stream_id = STREAM_ID_JPEG_REPROCESS;
2370
2371 m_reprocessStreamId = *stream_id;
2372 m_reprocessOps = reprocess_stream_ops;
2373 m_reprocessOutputStreamId = output_stream_id;
2374 return 0;
2375 }
2376
2377 int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
2378 {
2379 ALOGD("(%s): stream_id(%d)", __FUNCTION__, stream_id);
2380 if (stream_id == STREAM_ID_JPEG_REPROCESS) {
2381 m_reprocessStreamId = 0;
2382 m_reprocessOps = NULL;
2383 m_reprocessOutputStreamId = 0;
2384 return 0;
2385 }
2386 return 1;
2387 }
2388
2389 int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
2390 {
2391 Mutex::Autolock lock(m_afModeTriggerLock);
2392 ALOGV("DEBUG(%s): id(%x), %d, %d", __FUNCTION__, trigger_id, ext1, ext2);
2393
2394 switch (trigger_id) {
2395 case CAMERA2_TRIGGER_AUTOFOCUS:
2396 ALOGV("DEBUG(%s):TRIGGER_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2397 OnAfTrigger(ext1);
2398 break;
2399
2400 case CAMERA2_TRIGGER_CANCEL_AUTOFOCUS:
2401 ALOGV("DEBUG(%s):CANCEL_AUTOFOCUS id(%d)", __FUNCTION__, ext1);
2402 OnAfCancel(ext1);
2403 break;
2404 case CAMERA2_TRIGGER_PRECAPTURE_METERING:
2405 ALOGV("DEBUG(%s):CAMERA2_TRIGGER_PRECAPTURE_METERING id(%d)", __FUNCTION__, ext1);
2406 OnPrecaptureMeteringTriggerStart(ext1);
2407 break;
2408 default:
2409 break;
2410 }
2411 return 0;
2412 }
2413
2414 int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
2415 {
2416 ALOGV("DEBUG(%s): cb_addr(%x)", __FUNCTION__, (unsigned int)notify_cb);
2417 m_notifyCb = notify_cb;
2418 m_callbackCookie = user;
2419 return 0;
2420 }
2421
2422 int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
2423 {
2424 ALOGV("DEBUG(%s):", __FUNCTION__);
2425 *ops = NULL;
2426 return 0;
2427 }
2428
2429 int ExynosCameraHWInterface2::dump(int /*fd*/)
2430 {
2431 ALOGV("DEBUG(%s):", __FUNCTION__);
2432 return 0;
2433 }
2434
2435 void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
2436 {
2437 switch (colorFormat) {
2438 // 1p
2439 case V4L2_PIX_FMT_RGB565 :
2440 case V4L2_PIX_FMT_YUYV :
2441 case V4L2_PIX_FMT_UYVY :
2442 case V4L2_PIX_FMT_VYUY :
2443 case V4L2_PIX_FMT_YVYU :
2444 buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
2445 buf->size.extS[1] = 0;
2446 buf->size.extS[2] = 0;
2447 break;
2448 // 2p
2449 case V4L2_PIX_FMT_NV12 :
2450 case V4L2_PIX_FMT_NV12T :
2451 case V4L2_PIX_FMT_NV21 :
2452 buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2453 buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
2454 buf->size.extS[2] = 0;
2455 break;
2456 case V4L2_PIX_FMT_NV12M :
2457 case V4L2_PIX_FMT_NV12MT_16X16 :
2458 case V4L2_PIX_FMT_NV21M:
2459 buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2460 buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
2461 buf->size.extS[2] = 0;
2462 break;
2463 case V4L2_PIX_FMT_NV16 :
2464 case V4L2_PIX_FMT_NV61 :
2465 buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2466 buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h, 16);
2467 buf->size.extS[2] = 0;
2468 break;
2469 // 3p
2470 case V4L2_PIX_FMT_YUV420 :
2471 case V4L2_PIX_FMT_YVU420 :
2472 buf->size.extS[0] = (w * h);
2473 buf->size.extS[1] = (w * h) >> 2;
2474 buf->size.extS[2] = (w * h) >> 2;
2475 break;
2476 case V4L2_PIX_FMT_YUV420M:
2477 case V4L2_PIX_FMT_YVU420M :
2478 buf->size.extS[0] = ALIGN(w, 32) * ALIGN(h, 16);
2479 buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2480 buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2481 break;
2482 case V4L2_PIX_FMT_YUV422P :
2483 buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
2484 buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2485 buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
2486 break;
2487 default:
2488 ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
2489 return;
2490 break;
2491 }
2492 }
2493
2494 bool ExynosCameraHWInterface2::m_getRatioSize(int src_w, int src_h,
2495 int dst_w, int dst_h,
2496 int *crop_x, int *crop_y,
2497 int *crop_w, int *crop_h,
2498 int zoom)
2499 {
2500 *crop_w = src_w;
2501 *crop_h = src_h;
2502
2503 if ( src_w != dst_w
2504 || src_h != dst_h) {
2505 float src_ratio = 1.0f;
2506 float dst_ratio = 1.0f;
2507
2508 // ex : 1024 / 768
2509 src_ratio = (float)src_w / (float)src_h;
2510
2511 // ex : 352 / 288
2512 dst_ratio = (float)dst_w / (float)dst_h;
2513
2514 if (dst_w * dst_h < src_w * src_h) {
2515 if (dst_ratio <= src_ratio) {
2516 // shrink w
2517 *crop_w = src_h * dst_ratio;
2518 *crop_h = src_h;
2519 } else {
2520 // shrink h
2521 *crop_w = src_w;
2522 *crop_h = src_w / dst_ratio;
2523 }
2524 } else {
2525 if (dst_ratio <= src_ratio) {
2526 // shrink w
2527 *crop_w = src_h * dst_ratio;
2528 *crop_h = src_h;
2529 } else {
2530 // shrink h
2531 *crop_w = src_w;
2532 *crop_h = src_w / dst_ratio;
2533 }
2534 }
2535 }
2536
2537 if (zoom != 0) {
2538 float zoomLevel = ((float)zoom + 10.0) / 10.0;
2539 *crop_w = (int)((float)*crop_w / zoomLevel);
2540 *crop_h = (int)((float)*crop_h / zoomLevel);
2541 }
2542
2543 #define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2)
2544 unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
2545 if (w_align != 0) {
2546 if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
2547 && (int)(*crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align)) <= dst_w) {
2548 *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
2549 }
2550 else
2551 *crop_w -= w_align;
2552 }
2553
2554 #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2)
2555 unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
2556 if (h_align != 0) {
2557 if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
2558 && (int)(*crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align)) <= dst_h) {
2559 *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
2560 }
2561 else
2562 *crop_h -= h_align;
2563 }
2564
2565 *crop_x = (src_w - *crop_w) >> 1;
2566 *crop_y = (src_h - *crop_h) >> 1;
2567
2568 if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
2569 *crop_x -= 1;
2570
2571 if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
2572 *crop_y -= 1;
2573
2574 return true;
2575 }
2576
2577 BayerBufManager::BayerBufManager()
2578 {
2579 ALOGV("DEBUG(%s): ", __FUNCTION__);
2580 for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
2581 entries[i].status = BAYER_ON_HAL_EMPTY;
2582 entries[i].reqFrameCnt = 0;
2583 }
2584 sensorEnqueueHead = 0;
2585 sensorDequeueHead = 0;
2586 ispEnqueueHead = 0;
2587 ispDequeueHead = 0;
2588 numOnSensor = 0;
2589 numOnIsp = 0;
2590 numOnHalFilled = 0;
2591 numOnHalEmpty = NUM_BAYER_BUFFERS;
2592 }
2593
2594 BayerBufManager::~BayerBufManager()
2595 {
2596 ALOGV("%s", __FUNCTION__);
2597 }
2598
2599 int BayerBufManager::GetIndexForSensorEnqueue()
2600 {
2601 int ret = 0;
2602 if (numOnHalEmpty == 0)
2603 ret = -1;
2604 else
2605 ret = sensorEnqueueHead;
2606 ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
2607 return ret;
2608 }
2609
2610 int BayerBufManager::MarkSensorEnqueue(int index)
2611 {
2612 ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index);
2613
2614 // sanity check
2615 if (index != sensorEnqueueHead) {
2616 ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
2617 return -1;
2618 }
2619 if (entries[index].status != BAYER_ON_HAL_EMPTY) {
2620 ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2621 index, entries[index].status, BAYER_ON_HAL_EMPTY);
2622 return -1;
2623 }
2624
2625 entries[index].status = BAYER_ON_SENSOR;
2626 entries[index].reqFrameCnt = 0;
2627 numOnHalEmpty--;
2628 numOnSensor++;
2629 sensorEnqueueHead = GetNextIndex(index);
2630 ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2631 __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2632 return 0;
2633 }
2634
2635 int BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t* /*timeStamp*/)
2636 {
2637 ALOGV("DEBUG(%s) : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
2638
2639 if (entries[index].status != BAYER_ON_SENSOR) {
2640 ALOGE("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2641 index, entries[index].status, BAYER_ON_SENSOR);
2642 return -1;
2643 }
2644
2645 entries[index].status = BAYER_ON_HAL_FILLED;
2646 numOnHalFilled++;
2647 numOnSensor--;
2648
2649 return 0;
2650 }
2651
2652 int BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
2653 {
2654 int ret = 0;
2655 if (numOnHalFilled == 0)
2656 ret = -1;
2657 else {
2658 *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
2659 ret = ispEnqueueHead;
2660 }
2661 ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2662 return ret;
2663 }
2664
2665 int BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
2666 {
2667 int ret = 0;
2668 if (numOnIsp == 0)
2669 ret = -1;
2670 else {
2671 *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
2672 ret = ispDequeueHead;
2673 }
2674 ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
2675 return ret;
2676 }
2677
2678 int BayerBufManager::MarkIspEnqueue(int index)
2679 {
2680 ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index);
2681
2682 // sanity check
2683 if (index != ispEnqueueHead) {
2684 ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
2685 return -1;
2686 }
2687 if (entries[index].status != BAYER_ON_HAL_FILLED) {
2688 ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2689 index, entries[index].status, BAYER_ON_HAL_FILLED);
2690 return -1;
2691 }
2692
2693 entries[index].status = BAYER_ON_ISP;
2694 numOnHalFilled--;
2695 numOnIsp++;
2696 ispEnqueueHead = GetNextIndex(index);
2697 ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2698 __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2699 return 0;
2700 }
2701
2702 int BayerBufManager::MarkIspDequeue(int index)
2703 {
2704 ALOGV("DEBUG(%s) : BayerIndex[%d]", __FUNCTION__, index);
2705
2706 // sanity check
2707 if (index != ispDequeueHead) {
2708 ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
2709 return -1;
2710 }
2711 if (entries[index].status != BAYER_ON_ISP) {
2712 ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
2713 index, entries[index].status, BAYER_ON_ISP);
2714 return -1;
2715 }
2716
2717 entries[index].status = BAYER_ON_HAL_EMPTY;
2718 entries[index].reqFrameCnt = 0;
2719 numOnHalEmpty++;
2720 numOnIsp--;
2721 ispDequeueHead = GetNextIndex(index);
2722 ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
2723 __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
2724 return 0;
2725 }
2726
2727 int BayerBufManager::GetNumOnSensor()
2728 {
2729 return numOnSensor;
2730 }
2731
2732 int BayerBufManager::GetNumOnHalFilled()
2733 {
2734 return numOnHalFilled;
2735 }
2736
2737 int BayerBufManager::GetNumOnIsp()
2738 {
2739 return numOnIsp;
2740 }
2741
2742 int BayerBufManager::GetNextIndex(int index)
2743 {
2744 index++;
2745 if (index >= NUM_BAYER_BUFFERS)
2746 index = 0;
2747
2748 return index;
2749 }
2750
2751 void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
2752 {
2753 camera_metadata_t *currentRequest = NULL;
2754 camera_metadata_t *currentFrame = NULL;
2755 size_t numEntries = 0;
2756 size_t frameSize = 0;
2757 camera_metadata_t * preparedFrame = NULL;
2758 camera_metadata_t *deregisteredRequest = NULL;
2759 uint32_t currentSignal = self->GetProcessingSignal();
2760 MainThread * selfThread = ((MainThread*)self);
2761 int res = 0;
2762
2763 int ret;
2764 int afMode;
2765 uint32_t afRegion[4];
2766
2767 ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
2768
2769 if (currentSignal & SIGNAL_THREAD_RELEASE) {
2770 ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2771
2772 ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
2773 selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2774 return;
2775 }
2776
2777 if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
2778 ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
2779 if (m_requestManager->IsRequestQueueFull()==false) {
2780 Mutex::Autolock lock(m_afModeTriggerLock);
2781 m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
2782 if (NULL == currentRequest) {
2783 ALOGD("DEBUG(%s)(0x%x): No more service requests left in the queue ", __FUNCTION__, currentSignal);
2784 m_isRequestQueueNull = true;
2785 if (m_requestManager->IsVdisEnable())
2786 m_vdisBubbleCnt = 1;
2787 }
2788 else {
2789 m_requestManager->RegisterRequest(currentRequest, &afMode, afRegion);
2790
2791 SetAfMode((enum aa_afmode)afMode);
2792 SetAfRegion(afRegion);
2793
2794 m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
2795 ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
2796 if (m_requestManager->IsRequestQueueFull()==false)
2797 selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
2798
2799 m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2800 }
2801 }
2802 else {
2803 m_isRequestQueuePending = true;
2804 }
2805 }
2806
2807 if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
2808 ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
2809 /*while (1)*/ {
2810 ret = m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame, GetAfStateForService());
2811 if (ret == false)
2812 CAM_LOGE("ERR(%s): PrepareFrame ret = %d", __FUNCTION__, ret);
2813
2814 m_requestManager->DeregisterRequest(&deregisteredRequest);
2815
2816 ret = m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
2817 if (ret < 0)
2818 CAM_LOGE("ERR(%s): free_request ret = %d", __FUNCTION__, ret);
2819
2820 ret = m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
2821 if (ret < 0)
2822 CAM_LOGE("ERR(%s): dequeue_frame ret = %d", __FUNCTION__, ret);
2823
2824 if (currentFrame==NULL) {
2825 ALOGV("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
2826 }
2827 else {
2828 ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
2829 }
2830 res = append_camera_metadata(currentFrame, preparedFrame);
2831 if (res==0) {
2832 ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
2833 m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
2834 }
2835 else {
2836 ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
2837 }
2838 }
2839 if (!m_isRequestQueueNull) {
2840 selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
2841 }
2842
2843 if (getInProgressCount()>0) {
2844 ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
2845 m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
2846 }
2847 }
2848 ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
2849 return;
2850 }
2851
2852 void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
2853 {
2854 ALOGD("#### common Section");
2855 ALOGD("#### magic(%x) ",
2856 shot_ext->shot.magicNumber);
2857 ALOGD("#### ctl Section");
2858 ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
2859 shot_ext->shot.ctl.request.metadataMode,
2860 shot_ext->shot.ctl.lens.aperture,
2861 shot_ext->shot.ctl.sensor.exposureTime,
2862 shot_ext->shot.ctl.sensor.frameDuration,
2863 shot_ext->shot.ctl.sensor.sensitivity,
2864 shot_ext->shot.ctl.aa.awbMode);
2865
2866 ALOGD("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) streams(%x)",
2867 shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
2868 shot_ext->shot.ctl.request.outputStreams[0]);
2869
2870 ALOGD("#### DM Section");
2871 ALOGD("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
2872 shot_ext->shot.dm.request.metadataMode,
2873 shot_ext->shot.dm.lens.aperture,
2874 shot_ext->shot.dm.sensor.exposureTime,
2875 shot_ext->shot.dm.sensor.frameDuration,
2876 shot_ext->shot.dm.sensor.sensitivity,
2877 shot_ext->shot.dm.sensor.timeStamp,
2878 shot_ext->shot.dm.aa.awbMode,
2879 shot_ext->shot.dm.request.frameCount );
2880 }
2881
2882 void ExynosCameraHWInterface2::m_preCaptureSetter(struct camera2_shot_ext * shot_ext)
2883 {
2884 // Flash
2885 switch (m_ctlInfo.flash.m_flashCnt) {
2886 case IS_FLASH_STATE_ON:
2887 ALOGV("(%s): [Flash] Flash ON for Capture (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2888 // check AF locked
2889 if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
2890 if (m_ctlInfo.flash.m_flashTimeOut == 0) {
2891 if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2892 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2893 m_ctlInfo.flash.m_flashTimeOut = 5;
2894 } else
2895 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2896 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2897 } else {
2898 m_ctlInfo.flash.m_flashTimeOut--;
2899 }
2900 } else {
2901 if (m_ctlInfo.flash.i_flashMode == AA_AEMODE_ON_ALWAYS_FLASH) {
2902 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON_ALWAYS;
2903 m_ctlInfo.flash.m_flashTimeOut = 5;
2904 } else
2905 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_ON;
2906 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_WAIT;
2907 }
2908 break;
2909 case IS_FLASH_STATE_ON_WAIT:
2910 break;
2911 case IS_FLASH_STATE_ON_DONE:
2912 if (!m_ctlInfo.flash.m_afFlashDoneFlg)
2913 // auto transition at pre-capture trigger
2914 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
2915 break;
2916 case IS_FLASH_STATE_AUTO_AE_AWB_LOCK:
2917 ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO_AE_AWB_LOCK (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2918 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_AUTO;
2919 //shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_LOCKED;
2920 shot_ext->shot.ctl.aa.awbMode = AA_AWBMODE_LOCKED;
2921 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AE_AWB_LOCK_WAIT;
2922 break;
2923 case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
2924 case IS_FLASH_STATE_AUTO_WAIT:
2925 shot_ext->shot.ctl.aa.aeMode =(enum aa_aemode)0;
2926 shot_ext->shot.ctl.aa.awbMode = (enum aa_awbmode)0;
2927 break;
2928 case IS_FLASH_STATE_AUTO_DONE:
2929 ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO DONE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2930 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2931 break;
2932 case IS_FLASH_STATE_AUTO_OFF:
2933 ALOGV("(%s): [Flash] IS_FLASH_AF_AUTO Clear (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2934 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2935 m_ctlInfo.flash.m_flashEnableFlg = false;
2936 break;
2937 case IS_FLASH_STATE_CAPTURE:
2938 ALOGV("(%s): [Flash] IS_FLASH_CAPTURE (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2939 m_ctlInfo.flash.m_flashTimeOut = FLASH_STABLE_WAIT_TIMEOUT;
2940 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_CAPTURE;
2941 shot_ext->request_scc = 0;
2942 shot_ext->request_scp = 0;
2943 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_WAIT; // auto transition
2944 break;
2945 case IS_FLASH_STATE_CAPTURE_WAIT:
2946 shot_ext->request_scc = 0;
2947 shot_ext->request_scp = 0;
2948 break;
2949 case IS_FLASH_STATE_CAPTURE_JPEG:
2950 ALOGV("(%s): [Flash] Flash Capture (%d)!!!!!", __FUNCTION__, (FLASH_STABLE_WAIT_TIMEOUT -m_ctlInfo.flash.m_flashTimeOut));
2951 shot_ext->request_scc = 1;
2952 shot_ext->request_scp = 1;
2953 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_END; // auto transition
2954 break;
2955 case IS_FLASH_STATE_CAPTURE_END:
2956 ALOGV("(%s): [Flash] Flash Capture END (%d)", __FUNCTION__, shot_ext->shot.ctl.request.frameCount);
2957 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
2958 shot_ext->request_scc = 0;
2959 shot_ext->request_scp = 0;
2960 m_ctlInfo.flash.m_flashEnableFlg = false;
2961 m_ctlInfo.flash.m_flashCnt = 0;
2962 m_ctlInfo.flash.m_afFlashDoneFlg= false;
2963 break;
2964 case IS_FLASH_STATE_NONE:
2965 break;
2966 default:
2967 ALOGE("(%s): [Flash] flash state error!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
2968 }
2969 }
2970
2971 void ExynosCameraHWInterface2::m_preCaptureListenerSensor(struct camera2_shot_ext * shot_ext)
2972 {
2973 // Flash
2974 switch (m_ctlInfo.flash.m_flashCnt) {
2975 case IS_FLASH_STATE_AUTO_WAIT:
2976 if (m_ctlInfo.flash.m_flashDecisionResult) {
2977 if (shot_ext->shot.dm.flash.flashMode == CAM2_FLASH_MODE_OFF) {
2978 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2979 ALOGV("(%s): [Flash] Lis : AUTO -> OFF (%d)", __FUNCTION__, shot_ext->shot.dm.flash.flashMode);
2980 } else {
2981 ALOGV("(%s): [Flash] Waiting : AUTO -> OFF", __FUNCTION__);
2982 }
2983 } else {
2984 //If flash isn't activated at flash auto mode, skip flash auto control
2985 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
2986 ALOGV("(%s): [Flash] Skip : AUTO -> OFF", __FUNCTION__);
2987 }
2988 break;
2989 }
2990 }
2991
2992 void ExynosCameraHWInterface2::m_preCaptureListenerISP(struct camera2_shot_ext * shot_ext)
2993 {
2994 // Flash
2995 switch (m_ctlInfo.flash.m_flashCnt) {
2996 case IS_FLASH_STATE_ON_WAIT:
2997 if (shot_ext->shot.dm.flash.decision > 0) {
2998 // store decision result to skip capture sequenece
2999 ALOGV("(%s): [Flash] IS_FLASH_ON, decision - %d", __FUNCTION__, shot_ext->shot.dm.flash.decision);
3000 if (shot_ext->shot.dm.flash.decision == 2)
3001 m_ctlInfo.flash.m_flashDecisionResult = false;
3002 else
3003 m_ctlInfo.flash.m_flashDecisionResult = true;
3004 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
3005 } else {
3006 if (m_ctlInfo.flash.m_flashTimeOut == 0) {
3007 ALOGV("(%s): [Flash] Timeout IS_FLASH_ON, decision is false setting", __FUNCTION__);
3008 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON_DONE;
3009 m_ctlInfo.flash.m_flashDecisionResult = false;
3010 } else {
3011 m_ctlInfo.flash.m_flashTimeOut--;
3012 }
3013 }
3014 break;
3015 case IS_FLASH_STATE_AE_AWB_LOCK_WAIT:
3016 if (shot_ext->shot.dm.aa.awbMode == AA_AWBMODE_LOCKED) {
3017 ALOGV("(%s): [Flash] FLASH_AUTO_AE_AWB_LOCK_WAIT - %d", __FUNCTION__, shot_ext->shot.dm.aa.awbMode);
3018 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_WAIT;
3019 } else {
3020 ALOGV("(%s): [Flash] Waiting : AA_AWBMODE_LOCKED", __FUNCTION__);
3021 }
3022 break;
3023 case IS_FLASH_STATE_CAPTURE_WAIT:
3024 if (m_ctlInfo.flash.m_flashDecisionResult) {
3025 if (shot_ext->shot.dm.flash.firingStable) {
3026 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3027 } else {
3028 if (m_ctlInfo.flash.m_flashTimeOut == 0) {
3029 ALOGE("(%s): [Flash] Wait firingStable time-out!!", __FUNCTION__);
3030 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3031 } else {
3032 ALOGV("(%s): [Flash] Wait firingStable - %d", __FUNCTION__, m_ctlInfo.flash.m_flashTimeOut);
3033 m_ctlInfo.flash.m_flashTimeOut--;
3034 }
3035 }
3036 } else {
3037 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE_JPEG;
3038 }
3039 break;
3040 }
3041 }
3042
3043 void ExynosCameraHWInterface2::m_preCaptureAeState(struct camera2_shot_ext * shot_ext)
3044 {
3045 switch (m_ctlInfo.flash.i_flashMode) {
3046 case AA_AEMODE_ON:
3047 // At flash off mode, capture can be done as zsl capture
3048 shot_ext->shot.dm.aa.aeState = AE_STATE_CONVERGED;
3049 break;
3050 case AA_AEMODE_ON_AUTO_FLASH:
3051 // At flash auto mode, main flash have to be done if pre-flash was done.
3052 if (m_ctlInfo.flash.m_flashDecisionResult && m_ctlInfo.flash.m_afFlashDoneFlg)
3053 shot_ext->shot.dm.aa.aeState = AE_STATE_FLASH_REQUIRED;
3054 // FALLTHRU
3055 default:
3056 break;
3057 }
3058 }
3059
3060 void ExynosCameraHWInterface2::m_updateAfRegion(struct camera2_shot_ext * shot_ext)
3061 {
3062 shot_ext->shot.ctl.aa.afRegions[0] = currentAfRegion[0];
3063 shot_ext->shot.ctl.aa.afRegions[1] = currentAfRegion[1];
3064 shot_ext->shot.ctl.aa.afRegions[2] = currentAfRegion[2];
3065 shot_ext->shot.ctl.aa.afRegions[3] = currentAfRegion[3];
3066 }
3067
3068 void ExynosCameraHWInterface2::SetAfRegion(uint32_t * afRegion)
3069 {
3070 currentAfRegion[0] = afRegion[0];
3071 currentAfRegion[1] = afRegion[1];
3072 currentAfRegion[2] = afRegion[2];
3073 currentAfRegion[3] = afRegion[3];
3074 }
3075
3076 void ExynosCameraHWInterface2::m_afTrigger(struct camera2_shot_ext * shot_ext, int mode)
3077 {
3078 if (m_afState == HAL_AFSTATE_SCANNING) {
3079 ALOGD("(%s): restarting trigger ", __FUNCTION__);
3080 } else if (!mode) {
3081 if (m_afState != HAL_AFSTATE_NEEDS_COMMAND)
3082 ALOGD("(%s): wrong trigger state %d", __FUNCTION__, m_afState);
3083 else
3084 m_afState = HAL_AFSTATE_STARTED;
3085 }
3086 ALOGD("### AF Triggering with mode (%d) (%d)", m_afMode, m_afState);
3087 shot_ext->shot.ctl.aa.afTrigger = 1;
3088 shot_ext->shot.ctl.aa.afMode = m_afMode;
3089 m_IsAfTriggerRequired = false;
3090 }
3091
3092 void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
3093 {
3094 uint32_t currentSignal = self->GetProcessingSignal();
3095 SensorThread * selfThread = ((SensorThread*)self);
3096 int index;
3097 int index_isp;
3098 status_t res;
3099 nsecs_t frameTime;
3100 int bayersOnSensor = 0, bayersOnIsp = 0;
3101 int j = 0;
3102 bool isCapture = false;
3103 ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
3104
3105 if (currentSignal & SIGNAL_THREAD_RELEASE) {
3106 CAM_LOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3107
3108 ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
3109 cam_int_streamoff(&(m_camera_info.sensor));
3110 ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
3111
3112 m_camera_info.sensor.buffers = 0;
3113 ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
3114 cam_int_reqbufs(&(m_camera_info.sensor));
3115 ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
3116 m_camera_info.sensor.status = false;
3117
3118 ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
3119 isp_int_streamoff(&(m_camera_info.isp));
3120 ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
3121
3122 m_camera_info.isp.buffers = 0;
3123 ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
3124 cam_int_reqbufs(&(m_camera_info.isp));
3125 ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
3126
3127 exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
3128
3129 m_requestManager->releaseSensorQ();
3130 m_requestManager->ResetEntry();
3131 ALOGV("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
3132 selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
3133 return;
3134 }
3135
3136 if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
3137 {
3138 ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
3139 int targetStreamIndex = 0, i=0;
3140 int matchedFrameCnt = -1, processingReqIndex;
3141 struct camera2_shot_ext *shot_ext;
3142 struct camera2_shot_ext *shot_ext_capture;
3143 bool triggered = false;
3144
3145 /* dqbuf from sensor */
3146 ALOGV("Sensor DQbuf start");
3147 index = cam_int_dqbuf(&(m_camera_info.sensor));
3148 m_requestManager->pushSensorQ(index);
3149 ALOGV("Sensor DQbuf done(%d)", index);
3150 shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3151
3152 if (m_nightCaptureCnt != 0) {
3153 matchedFrameCnt = m_nightCaptureFrameCnt;
3154 } else if (m_ctlInfo.flash.m_flashCnt >= IS_FLASH_STATE_CAPTURE) {
3155 matchedFrameCnt = m_ctlInfo.flash.m_flashFrameCount;
3156 ALOGV("Skip frame, request is fixed at %d", matchedFrameCnt);
3157 } else {
3158 matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
3159 }
3160
3161 if (matchedFrameCnt == -1 && m_vdisBubbleCnt > 0) {
3162 matchedFrameCnt = m_vdisDupFrame;
3163 }
3164
3165 if (matchedFrameCnt != -1) {
3166 if (m_vdisBubbleCnt == 0 || m_vdisDupFrame != matchedFrameCnt) {
3167 frameTime = systemTime();
3168 m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
3169 m_requestManager->UpdateIspParameters(shot_ext, matchedFrameCnt, &m_ctlInfo);
3170 } else {
3171 ALOGV("bubble for vids: m_vdisBubbleCnt %d, matchedFrameCnt %d", m_vdisDupFrame, matchedFrameCnt);
3172 }
3173
3174 // face af mode setting in case of face priority scene mode
3175 if (m_ctlInfo.scene.prevSceneMode != shot_ext->shot.ctl.aa.sceneMode) {
3176 ALOGV("(%s): Scene mode changed (%d)", __FUNCTION__, shot_ext->shot.ctl.aa.sceneMode);
3177 m_ctlInfo.scene.prevSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3178 }
3179
3180 m_zoomRatio = (float)m_camera2->getSensorW() / (float)shot_ext->shot.ctl.scaler.cropRegion[2];
3181 float zoomLeft, zoomTop, zoomWidth, zoomHeight;
3182 int crop_x = 0, crop_y = 0, crop_w = 0, crop_h = 0;
3183
3184 m_getRatioSize(m_camera2->getSensorW(), m_camera2->getSensorH(),
3185 m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
3186 &crop_x, &crop_y,
3187 &crop_w, &crop_h,
3188 0);
3189
3190 if (m_streamThreads[0]->m_parameters.width >= m_streamThreads[0]->m_parameters.height) {
3191 zoomWidth = m_camera2->getSensorW() / m_zoomRatio;
3192 zoomHeight = zoomWidth *
3193 m_streamThreads[0]->m_parameters.height / m_streamThreads[0]->m_parameters.width;
3194 } else {
3195 zoomHeight = m_camera2->getSensorH() / m_zoomRatio;
3196 zoomWidth = zoomHeight *
3197 m_streamThreads[0]->m_parameters.width / m_streamThreads[0]->m_parameters.height;
3198 }
3199 zoomLeft = (crop_w - zoomWidth) / 2;
3200 zoomTop = (crop_h - zoomHeight) / 2;
3201
3202 int32_t new_cropRegion[3] = { (int32_t)zoomLeft, (int32_t)zoomTop, (int32_t)zoomWidth };
3203
3204 int cropCompensation = (new_cropRegion[0] * 2 + new_cropRegion[2]) - ALIGN(crop_w, 4);
3205 if (cropCompensation)
3206 new_cropRegion[2] -= cropCompensation;
3207
3208 shot_ext->shot.ctl.scaler.cropRegion[0] = new_cropRegion[0];
3209 shot_ext->shot.ctl.scaler.cropRegion[1] = new_cropRegion[1];
3210 shot_ext->shot.ctl.scaler.cropRegion[2] = new_cropRegion[2];
3211 if (m_IsAfModeUpdateRequired && (m_ctlInfo.flash.m_precaptureTriggerId == 0)) {
3212 ALOGD("### Applying AF Mode change(Mode %d) ", m_afMode);
3213 shot_ext->shot.ctl.aa.afMode = m_afMode;
3214 if (m_afMode == AA_AFMODE_CONTINUOUS_VIDEO || m_afMode == AA_AFMODE_CONTINUOUS_PICTURE) {
3215 ALOGD("### With Automatic triger for continuous modes");
3216 m_afState = HAL_AFSTATE_STARTED;
3217 shot_ext->shot.ctl.aa.afTrigger = 1;
3218 triggered = true;
3219 if ((m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_UNSUPPORTED) ||
3220 (m_ctlInfo.scene.prevSceneMode == AA_SCENE_MODE_FACE_PRIORITY)) {
3221 switch (m_afMode) {
3222 case AA_AFMODE_CONTINUOUS_PICTURE:
3223 shot_ext->shot.ctl.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
3224 ALOGD("### Face AF Mode change (Mode %d) ", shot_ext->shot.ctl.aa.afMode);
3225 // FALLTHRU
3226 default:
3227 break;
3228 }
3229 }
3230 // reset flash result
3231 if (m_ctlInfo.flash.m_afFlashDoneFlg) {
3232 m_ctlInfo.flash.m_flashEnableFlg = false;
3233 m_ctlInfo.flash.m_afFlashDoneFlg = false;
3234 m_ctlInfo.flash.m_flashDecisionResult = false;
3235 m_ctlInfo.flash.m_flashCnt = 0;
3236 }
3237 m_ctlInfo.af.m_afTriggerTimeOut = 1;
3238 }
3239
3240 m_IsAfModeUpdateRequired = false;
3241 // support inifinity focus mode
3242 if ((m_afMode == AA_AFMODE_MANUAL) && ( shot_ext->shot.ctl.lens.focusDistance == 0)) {
3243 shot_ext->shot.ctl.aa.afMode = AA_AFMODE_INFINITY;
3244 shot_ext->shot.ctl.aa.afTrigger = 1;
3245 triggered = true;
3246 }
3247 if (m_afMode2 != NO_CHANGE) {
3248 enum aa_afmode tempAfMode = m_afMode2;
3249 m_afMode2 = NO_CHANGE;
3250 SetAfMode(tempAfMode);
3251 }
3252 }
3253 else {
3254 shot_ext->shot.ctl.aa.afMode = NO_CHANGE;
3255 }
3256 if (m_IsAfTriggerRequired) {
3257 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
3258 // flash case
3259 if (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_ON_DONE) {
3260 if ((m_afMode != AA_AFMODE_AUTO) && (m_afMode != AA_AFMODE_MACRO)) {
3261 // Flash is enabled and start AF
3262 m_afTrigger(shot_ext, 1);
3263 } else {
3264 m_afTrigger(shot_ext, 0);
3265 }
3266 }
3267 } else {
3268 // non-flash case
3269 m_afTrigger(shot_ext, 0);
3270 }
3271 } else {
3272 shot_ext->shot.ctl.aa.afTrigger = 0;
3273 }
3274
3275 if (m_wideAspect) {
3276 shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3277 } else {
3278 shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3279 }
3280 if (triggered)
3281 shot_ext->shot.ctl.aa.afTrigger = 1;
3282
3283 // TODO : check collision with AFMode Update
3284 if (m_IsAfLockRequired) {
3285 shot_ext->shot.ctl.aa.afMode = AA_AFMODE_OFF;
3286 m_IsAfLockRequired = false;
3287 }
3288 ALOGV("### Isp Qbuf start(%d) count (%d), SCP(%d) SCC(%d) DIS(%d) shot_size(%d)",
3289 index,
3290 shot_ext->shot.ctl.request.frameCount,
3291 shot_ext->request_scp,
3292 shot_ext->request_scc,
3293 shot_ext->dis_bypass, sizeof(camera2_shot));
3294
3295 // update AF region
3296 m_updateAfRegion(shot_ext);
3297
3298 m_lastSceneMode = shot_ext->shot.ctl.aa.sceneMode;
3299 if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT
3300 && shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_LOCKED)
3301 shot_ext->shot.ctl.aa.aeMode = AA_AEMODE_ON;
3302 if (m_nightCaptureCnt == 0) {
3303 if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE
3304 && shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3305 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3306 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3307 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3308 m_nightCaptureCnt = 4;
3309 m_nightCaptureFrameCnt = matchedFrameCnt;
3310 shot_ext->request_scc = 0;
3311 }
3312 }
3313 else if (m_nightCaptureCnt == 1) {
3314 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3315 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 30;
3316 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3317 m_nightCaptureCnt--;
3318 m_nightCaptureFrameCnt = 0;
3319 shot_ext->request_scc = 1;
3320 }
3321 else if (m_nightCaptureCnt == 2) {
3322 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3323 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3324 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3325 m_nightCaptureCnt--;
3326 shot_ext->request_scc = 0;
3327 }
3328 else if (m_nightCaptureCnt == 3) {
3329 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3330 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3331 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3332 m_nightCaptureCnt--;
3333 shot_ext->request_scc = 0;
3334 }
3335 else if (m_nightCaptureCnt == 4) {
3336 shot_ext->shot.ctl.aa.sceneMode = AA_SCENE_MODE_NIGHT_CAPTURE;
3337 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 2;
3338 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3339 m_nightCaptureCnt--;
3340 shot_ext->request_scc = 0;
3341 }
3342
3343 switch (shot_ext->shot.ctl.aa.aeTargetFpsRange[1]) {
3344 case 15:
3345 shot_ext->shot.ctl.sensor.frameDuration = (66666 * 1000);
3346 break;
3347
3348 case 24:
3349 shot_ext->shot.ctl.sensor.frameDuration = (41666 * 1000);
3350 break;
3351
3352 case 25:
3353 shot_ext->shot.ctl.sensor.frameDuration = (40000 * 1000);
3354 break;
3355
3356 case 30:
3357 default:
3358 shot_ext->shot.ctl.sensor.frameDuration = (33333 * 1000);
3359 break;
3360 }
3361 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3362
3363 // Flash mode
3364 // Keep and Skip request_scc = 1 at flash enable mode to operate flash sequence
3365 if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
3366 && (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE)
3367 && (m_cameraId == 0)) {
3368 if (!m_ctlInfo.flash.m_flashDecisionResult) {
3369 m_ctlInfo.flash.m_flashEnableFlg = false;
3370 m_ctlInfo.flash.m_afFlashDoneFlg = false;
3371 m_ctlInfo.flash.m_flashCnt = 0;
3372 } else if ((m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_DONE) ||
3373 (m_ctlInfo.flash.m_flashCnt == IS_FLASH_STATE_AUTO_OFF)) {
3374 ALOGD("(%s): [Flash] Flash capture start : skip request scc 1#####", __FUNCTION__);
3375 shot_ext->request_scc = 0;
3376 m_ctlInfo.flash.m_flashFrameCount = matchedFrameCnt;
3377 m_ctlInfo.flash.m_flashEnableFlg = true;
3378 m_ctlInfo.flash.m_afFlashDoneFlg = false;
3379 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_CAPTURE;
3380 } else if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_AUTO_DONE) {
3381 ALOGE("(%s): [Flash] Flash capture Error- wrong state !!!!!! (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
3382 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3383 m_ctlInfo.flash.m_flashEnableFlg = false;
3384 m_ctlInfo.flash.m_afFlashDoneFlg= false;
3385 m_ctlInfo.flash.m_flashCnt = 0;
3386 }
3387 } else if (shot_ext->shot.ctl.aa.captureIntent == AA_CAPTURE_INTENT_STILL_CAPTURE) {
3388 m_ctlInfo.flash.m_flashDecisionResult = false;
3389 }
3390
3391 if (shot_ext->shot.ctl.flash.flashMode == CAM2_FLASH_MODE_TORCH) {
3392 if (m_ctlInfo.flash.m_flashTorchMode == false) {
3393 m_ctlInfo.flash.m_flashTorchMode = true;
3394 }
3395 } else {
3396 if (m_ctlInfo.flash.m_flashTorchMode == true) {
3397 shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3398 shot_ext->shot.ctl.flash.firingPower = 0;
3399 m_ctlInfo.flash.m_flashTorchMode = false;
3400 } else {
3401 shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_NOP;
3402 }
3403 }
3404
3405 if (shot_ext->isReprocessing) {
3406 ALOGV("(%s): Sending signal for Reprocess request", __FUNCTION__);
3407 m_currentReprocessOutStreams = shot_ext->shot.ctl.request.outputStreams[0];
3408 shot_ext->request_scp = 0;
3409 shot_ext->request_scc = 0;
3410 m_reprocessingFrameCnt = shot_ext->shot.ctl.request.frameCount;
3411 m_ctlInfo.flash.m_flashDecisionResult = false;
3412 memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(m_reprocessingFrameCnt)),
3413 sizeof(struct camera2_shot_ext));
3414 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_REPROCESSING_START);
3415 m_ctlInfo.flash.m_flashEnableFlg = false;
3416 }
3417
3418 if (m_ctlInfo.flash.m_flashEnableFlg) {
3419 m_preCaptureListenerSensor(shot_ext);
3420 m_preCaptureSetter(shot_ext);
3421 }
3422
3423 ALOGV("(%s): queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)", __FUNCTION__,
3424 (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3425 (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3426 (int)(shot_ext->shot.ctl.aa.afTrigger));
3427
3428 if (m_vdisBubbleCnt > 0 && m_vdisDupFrame == matchedFrameCnt) {
3429 shot_ext->dis_bypass = 1;
3430 shot_ext->dnr_bypass = 1;
3431 shot_ext->request_scp = 0;
3432 shot_ext->request_scc = 0;
3433 m_vdisBubbleCnt--;
3434 matchedFrameCnt = -1;
3435 } else {
3436 m_vdisDupFrame = matchedFrameCnt;
3437 }
3438 if (m_scpForceSuspended)
3439 shot_ext->request_scc = 0;
3440
3441 uint32_t current_scp = shot_ext->request_scp;
3442 uint32_t current_scc = shot_ext->request_scc;
3443
3444 if (shot_ext->shot.dm.request.frameCount == 0) {
3445 CAM_LOGE("ERR(%s): dm.request.frameCount = %d", __FUNCTION__, shot_ext->shot.dm.request.frameCount);
3446 }
3447
3448 cam_int_qbuf(&(m_camera_info.isp), index);
3449
3450 ALOGV("### isp DQBUF start");
3451 index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3452
3453 shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3454
3455 if (m_ctlInfo.flash.m_flashEnableFlg)
3456 m_preCaptureListenerISP(shot_ext);
3457
3458 ALOGV("### Isp DQbuf done(%d) count (%d), SCP(%d) SCC(%d) dis_bypass(%d) dnr_bypass(%d) shot_size(%d)",
3459 index,
3460 shot_ext->shot.ctl.request.frameCount,
3461 shot_ext->request_scp,
3462 shot_ext->request_scc,
3463 shot_ext->dis_bypass,
3464 shot_ext->dnr_bypass, sizeof(camera2_shot));
3465
3466 ALOGV("(%s): DM aa(%d) aemode(%d) awb(%d) afmode(%d)", __FUNCTION__,
3467 (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3468 (int)(shot_ext->shot.dm.aa.awbMode),
3469 (int)(shot_ext->shot.dm.aa.afMode));
3470
3471 #ifndef ENABLE_FRAME_SYNC
3472 m_currentOutputStreams = shot_ext->shot.ctl.request.outputStreams[0];
3473 #endif
3474
3475 if (!shot_ext->fd_bypass) {
3476 /* FD orientation axis transformation */
3477 for (int i=0; i < CAMERA2_MAX_FACES; i++) {
3478 if (shot_ext->shot.dm.stats.faceRectangles[i][0] > 0)
3479 shot_ext->shot.dm.stats.faceRectangles[i][0] = (m_camera2->m_curCameraInfo->sensorW
3480 * shot_ext->shot.dm.stats.faceRectangles[i][0])
3481 / m_streamThreads[0].get()->m_parameters.width;
3482 if (shot_ext->shot.dm.stats.faceRectangles[i][1] > 0)
3483 shot_ext->shot.dm.stats.faceRectangles[i][1] = (m_camera2->m_curCameraInfo->sensorH
3484 * shot_ext->shot.dm.stats.faceRectangles[i][1])
3485 / m_streamThreads[0].get()->m_parameters.height;
3486 if (shot_ext->shot.dm.stats.faceRectangles[i][2] > 0)
3487 shot_ext->shot.dm.stats.faceRectangles[i][2] = (m_camera2->m_curCameraInfo->sensorW
3488 * shot_ext->shot.dm.stats.faceRectangles[i][2])
3489 / m_streamThreads[0].get()->m_parameters.width;
3490 if (shot_ext->shot.dm.stats.faceRectangles[i][3] > 0)
3491 shot_ext->shot.dm.stats.faceRectangles[i][3] = (m_camera2->m_curCameraInfo->sensorH
3492 * shot_ext->shot.dm.stats.faceRectangles[i][3])
3493 / m_streamThreads[0].get()->m_parameters.height;
3494 }
3495 }
3496 // aeState control
3497 if (shot_ext->shot.ctl.aa.sceneMode != AA_SCENE_MODE_NIGHT)
3498 m_preCaptureAeState(shot_ext);
3499
3500 // At scene mode face priority
3501 if (shot_ext->shot.dm.aa.afMode == AA_AFMODE_CONTINUOUS_PICTURE_FACE)
3502 shot_ext->shot.dm.aa.afMode = AA_AFMODE_CONTINUOUS_PICTURE;
3503
3504 if (matchedFrameCnt != -1 && m_nightCaptureCnt == 0 && (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)) {
3505 m_requestManager->ApplyDynamicMetadata(shot_ext);
3506 }
3507
3508 if (current_scc != shot_ext->request_scc) {
3509 ALOGD("(%s): scc frame drop1 request_scc(%d to %d)",
3510 __FUNCTION__, current_scc, shot_ext->request_scc);
3511 m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3512 }
3513 if (shot_ext->request_scc) {
3514 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCC)");
3515 if (shot_ext->shot.ctl.request.outputStreams[0] & STREAM_MASK_JPEG) {
3516 if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_CAPTURE)
3517 memcpy(&m_jpegMetadata, (void*)(m_requestManager->GetInternalShotExtByFrameCnt(shot_ext->shot.ctl.request.frameCount)),
3518 sizeof(struct camera2_shot_ext));
3519 else
3520 memcpy(&m_jpegMetadata, (void*)shot_ext, sizeof(struct camera2_shot_ext));
3521 }
3522 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3523 }
3524 if (current_scp != shot_ext->request_scp) {
3525 ALOGD("(%s): scp frame drop1 request_scp(%d to %d)",
3526 __FUNCTION__, current_scp, shot_ext->request_scp);
3527 m_requestManager->NotifyStreamOutput(shot_ext->shot.ctl.request.frameCount);
3528 }
3529 if (shot_ext->request_scp) {
3530 ALOGV("send SIGNAL_STREAM_DATA_COMING (SCP)");
3531 m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
3532 }
3533
3534 ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
3535 shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
3536 if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
3537 ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
3538 m_scp_closed = true;
3539 }
3540 else
3541 m_scp_closed = false;
3542
3543 OnAfNotification(shot_ext->shot.dm.aa.afState);
3544 OnPrecaptureMeteringNotificationISP();
3545 } else {
3546 memcpy(&shot_ext->shot.ctl, &m_camera_info.dummy_shot.shot.ctl, sizeof(struct camera2_ctl));
3547 shot_ext->shot.ctl.request.frameCount = 0xfffffffe;
3548 shot_ext->request_sensor = 1;
3549 shot_ext->dis_bypass = 1;
3550 shot_ext->dnr_bypass = 1;
3551 shot_ext->fd_bypass = 1;
3552 shot_ext->drc_bypass = 1;
3553 shot_ext->request_scc = 0;
3554 shot_ext->request_scp = 0;
3555 if (m_wideAspect) {
3556 shot_ext->setfile = ISS_SUB_SCENARIO_VIDEO;
3557 } else {
3558 shot_ext->setfile = ISS_SUB_SCENARIO_STILL;
3559 }
3560 shot_ext->shot.ctl.aa.sceneMode = (enum aa_scene_mode)m_lastSceneMode;
3561 if (shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT_CAPTURE || shot_ext->shot.ctl.aa.sceneMode == AA_SCENE_MODE_NIGHT) {
3562 shot_ext->shot.ctl.aa.aeTargetFpsRange[0] = 8;
3563 shot_ext->shot.ctl.aa.aeTargetFpsRange[1] = 30;
3564 }
3565 shot_ext->shot.ctl.aa.aeflashMode = AA_FLASHMODE_OFF;
3566 shot_ext->shot.ctl.flash.flashMode = CAM2_FLASH_MODE_OFF;
3567 ALOGV("### isp QBUF start (bubble)");
3568 ALOGV("bubble: queued aa(%d) aemode(%d) awb(%d) afmode(%d) trigger(%d)",
3569 (int)(shot_ext->shot.ctl.aa.mode), (int)(shot_ext->shot.ctl.aa.aeMode),
3570 (int)(shot_ext->shot.ctl.aa.awbMode), (int)(shot_ext->shot.ctl.aa.afMode),
3571 (int)(shot_ext->shot.ctl.aa.afTrigger));
3572
3573 cam_int_qbuf(&(m_camera_info.isp), index);
3574 ALOGV("### isp DQBUF start (bubble)");
3575 index_isp = cam_int_dqbuf(&(m_camera_info.isp));
3576 shot_ext = (struct camera2_shot_ext *)(m_camera_info.isp.buffer[index_isp].virt.extP[1]);
3577 ALOGV("bubble: DM aa(%d) aemode(%d) awb(%d) afmode(%d)",
3578 (int)(shot_ext->shot.dm.aa.mode), (int)(shot_ext->shot.dm.aa.aeMode),
3579 (int)(shot_ext->shot.dm.aa.awbMode),
3580 (int)(shot_ext->shot.dm.aa.afMode));
3581
3582 OnAfNotification(shot_ext->shot.dm.aa.afState);
3583 }
3584
3585 index = m_requestManager->popSensorQ();
3586 if(index < 0){
3587 ALOGE("sensorQ is empty");
3588 return;
3589 }
3590
3591 processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
3592 shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
3593 if (m_scp_closing || m_scp_closed) {
3594 ALOGD("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
3595 shot_ext->request_scc = 0;
3596 shot_ext->request_scp = 0;
3597 shot_ext->request_sensor = 0;
3598 }
3599 cam_int_qbuf(&(m_camera_info.sensor), index);
3600 ALOGV("Sensor Qbuf done(%d)", index);
3601
3602 if (!m_scp_closing
3603 && ((matchedFrameCnt == -1) || (processingReqIndex == -1))){
3604 ALOGV("make bubble shot: matchedFramcnt(%d) processingReqIndex(%d)",
3605 matchedFrameCnt, processingReqIndex);
3606 selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
3607 }
3608 }
3609 return;
3610 }
3611
3612 void ExynosCameraHWInterface2::m_streamBufferInit(SignalDrivenThread *self)
3613 {
3614 uint32_t currentSignal = self->GetProcessingSignal();
3615 StreamThread * selfThread = ((StreamThread*)self);
3616 stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
3617 node_info_t *currentNode = selfStreamParms->node;
3618 substream_parameters_t *subParms;
3619 buffer_handle_t * buf = NULL;
3620 status_t res;
3621 void *virtAddr[3];
3622 int i, j;
3623 int index;
3624 nsecs_t timestamp;
3625
3626 if (!(selfThread->m_isBufferInit))
3627 {
3628 for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
3629 res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3630 if (res != NO_ERROR || buf == NULL) {
3631 ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3632 return;
3633 }
3634 ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3635 ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3636
3637 index = selfThread->findBufferIndex(buf);
3638 if (index == -1) {
3639 ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
3640 }
3641 else {
3642 ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3643 __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
3644 if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
3645 selfStreamParms->svcBufStatus[index] = ON_DRIVER;
3646 else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
3647 selfStreamParms->svcBufStatus[index] = ON_HAL;
3648 else {
3649 ALOGV("DBG(%s): buffer status abnormal (%d) "
3650 , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
3651 }
3652 selfStreamParms->numSvcBufsInHal++;
3653 }
3654 selfStreamParms->bufIndex = 0;
3655 }
3656 selfThread->m_isBufferInit = true;
3657 }
3658 for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3659 if (selfThread->m_attachedSubStreams[i].streamId == -1)
3660 continue;
3661
3662 subParms = &m_subStreams[selfThread->m_attachedSubStreams[i].streamId];
3663 if (subParms->type && subParms->needBufferInit) {
3664 ALOGV("(%s): [subStream] (id:%d) Buffer Initialization numsvcbuf(%d)",
3665 __FUNCTION__, selfThread->m_attachedSubStreams[i].streamId, subParms->numSvcBuffers);
3666 int checkingIndex = 0;
3667 bool found = false;
3668 for ( i = 0 ; i < subParms->numSvcBuffers; i++) {
3669 res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
3670 if (res != NO_ERROR || buf == NULL) {
3671 ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
3672 return;
3673 }
3674 subParms->numSvcBufsInHal++;
3675 ALOGV("DEBUG(%s): [subStream] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
3676 subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3677
3678 if (m_grallocHal->lock(m_grallocHal, *buf,
3679 subParms->usage, 0, 0,
3680 subParms->width, subParms->height, virtAddr) != 0) {
3681 ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
3682 }
3683 else {
3684 ALOGV("DEBUG(%s): [subStream] locked img buf plane0(%x) plane1(%x) plane2(%x)",
3685 __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
3686 }
3687 found = false;
3688 for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
3689 if (subParms->svcBufHandle[checkingIndex] == *buf ) {
3690 found = true;
3691 break;
3692 }
3693 }
3694 ALOGV("DEBUG(%s): [subStream] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
3695 if (!found) break;
3696
3697 index = checkingIndex;
3698
3699 if (index == -1) {
3700 ALOGV("ERR(%s): could not find buffer index", __FUNCTION__);
3701 }
3702 else {
3703 ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
3704 __FUNCTION__, index, subParms->svcBufStatus[index]);
3705 if (subParms->svcBufStatus[index]== ON_SERVICE)
3706 subParms->svcBufStatus[index] = ON_HAL;
3707 else {
3708 ALOGV("DBG(%s): buffer status abnormal (%d) "
3709 , __FUNCTION__, subParms->svcBufStatus[index]);
3710 }
3711 if (*buf != subParms->svcBufHandle[index])
3712 ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
3713 else
3714 ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
3715 }
3716 subParms->svcBufIndex = 0;
3717 }
3718 if (subParms->type == SUBSTREAM_TYPE_JPEG) {
3719 m_resizeBuf.size.extS[0] = ALIGN(subParms->width, 16) * ALIGN(subParms->height, 16) * 2;
3720 m_resizeBuf.size.extS[1] = 0;
3721 m_resizeBuf.size.extS[2] = 0;
3722
3723 if (allocCameraMemory(m_ionCameraClient, &m_resizeBuf, 1) == -1) {
3724 ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
3725 }
3726 }
3727 if (subParms->type == SUBSTREAM_TYPE_PRVCB) {
3728 m_getAlignedYUVSize(HAL_PIXEL_FORMAT_2_V4L2_PIX(subParms->internalFormat), subParms->width,
3729 subParms->height, &m_previewCbBuf);
3730
3731 if (allocCameraMemory(m_ionCameraClient, &m_previewCbBuf, subParms->internalPlanes) == -1) {
3732 ALOGE("ERR(%s): Failed to allocate prvcb buf", __FUNCTION__);
3733 }
3734 }
3735 subParms->needBufferInit= false;
3736 }
3737 }
3738 }
3739
3740 void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
3741 {
3742 StreamThread * selfThread = ((StreamThread*)self);
3743 ALOGV("DEBUG(%s): ", __FUNCTION__ );
3744 memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
3745 selfThread->m_isBufferInit = false;
3746 for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3747 selfThread->m_attachedSubStreams[i].streamId = -1;
3748 selfThread->m_attachedSubStreams[i].priority = 0;
3749 }
3750 return;
3751 }
3752
3753 int ExynosCameraHWInterface2::m_runSubStreamFunc(StreamThread *selfThread, ExynosBuffer *srcImageBuf,
3754 int stream_id, nsecs_t frameTimeStamp)
3755 {
3756 substream_parameters_t *subParms = &m_subStreams[stream_id];
3757
3758 switch (stream_id) {
3759
3760 case STREAM_ID_JPEG:
3761 return m_jpegCreator(selfThread, srcImageBuf, frameTimeStamp);
3762
3763 case STREAM_ID_RECORD:
3764 return m_recordCreator(selfThread, srcImageBuf, frameTimeStamp);
3765
3766 case STREAM_ID_PRVCB:
3767 return m_prvcbCreator(selfThread, srcImageBuf, frameTimeStamp);
3768
3769 default:
3770 return 0;
3771 }
3772 }
3773 void ExynosCameraHWInterface2::m_streamFunc_direct(SignalDrivenThread *self)
3774 {
3775 uint32_t currentSignal = self->GetProcessingSignal();
3776 StreamThread * selfThread = ((StreamThread*)self);
3777 stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
3778 node_info_t *currentNode = selfStreamParms->node;
3779 int i = 0;
3780 nsecs_t frameTimeStamp;
3781
3782 if (currentSignal & SIGNAL_THREAD_RELEASE) {
3783 CAM_LOGD("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3784
3785 if (selfThread->m_isBufferInit) {
3786 if (!(currentNode->fd == m_camera_info.capture.fd && m_camera_info.capture.status == false)) {
3787 ALOGV("(%s): [%d] calling streamoff (fd:%d)", __FUNCTION__,
3788 selfThread->m_index, currentNode->fd);
3789 if (cam_int_streamoff(currentNode) < 0 ) {
3790 ALOGE("ERR(%s): stream off fail", __FUNCTION__);
3791 }
3792 ALOGV("(%s): [%d] streamoff done and calling reqbuf 0 (fd:%d)", __FUNCTION__,
3793 selfThread->m_index, currentNode->fd);
3794 currentNode->buffers = 0;
3795 cam_int_reqbufs(currentNode);
3796 ALOGV("(%s): [%d] reqbuf 0 DONE (fd:%d)", __FUNCTION__,
3797 selfThread->m_index, currentNode->fd);
3798 }
3799 }
3800 #ifdef ENABLE_FRAME_SYNC
3801 // free metabuffers
3802 for (i = 0; i < NUM_MAX_CAMERA_BUFFERS; i++)
3803 if (selfStreamParms->metaBuffers[i].fd.extFd[0] != 0) {
3804 freeCameraMemory(&(selfStreamParms->metaBuffers[i]), 1);
3805 selfStreamParms->metaBuffers[i].fd.extFd[0] = 0;
3806 selfStreamParms->metaBuffers[i].size.extS[0] = 0;
3807 }
3808 #endif
3809 selfThread->m_isBufferInit = false;
3810 selfThread->m_releasing = false;
3811 selfThread->m_activated = false;
3812 ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
3813 return;
3814 }
3815 if (currentSignal & SIGNAL_STREAM_REPROCESSING_START) {
3816 status_t res;
3817 buffer_handle_t * buf = NULL;
3818 bool found = false;
3819 ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_REPROCESSING_START",
3820 __FUNCTION__, selfThread->m_index);
3821 res = m_reprocessOps->acquire_buffer(m_reprocessOps, &buf);
3822 if (res != NO_ERROR || buf == NULL) {
3823 ALOGE("ERR(%s): [reprocess] unable to acquire_buffer : %d",__FUNCTION__ , res);
3824 return;
3825 }
3826 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3827 int checkingIndex = 0;
3828 for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
3829 if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
3830 found = true;
3831 break;
3832 }
3833 }
3834 ALOGV("DEBUG(%s): dequeued buf %x => found(%d) index(%d) ",
3835 __FUNCTION__, (unsigned int)buf, found, checkingIndex);
3836
3837 if (!found) return;
3838
3839 for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3840 if (selfThread->m_attachedSubStreams[i].streamId == -1)
3841 continue;
3842
3843 #ifdef ENABLE_FRAME_SYNC
3844 frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(m_reprocessingFrameCnt);
3845 m_requestManager->NotifyStreamOutput(m_reprocessingFrameCnt);
3846 #else
3847 frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
3848 #endif
3849 if (m_currentReprocessOutStreams & (1<<selfThread->m_attachedSubStreams[i].streamId))
3850 m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[checkingIndex]),
3851 selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3852 }
3853
3854 res = m_reprocessOps->release_buffer(m_reprocessOps, buf);
3855 if (res != NO_ERROR) {
3856 ALOGE("ERR(%s): [reprocess] unable to release_buffer : %d",__FUNCTION__ , res);
3857 return;
3858 }
3859 ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_REPROCESSING_START",
3860 __FUNCTION__,selfThread->m_index);
3861
3862 return;
3863 }
3864 if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
3865 buffer_handle_t * buf = NULL;
3866 status_t res = 0;
3867 int i, j;
3868 int index;
3869 nsecs_t timestamp;
3870 #ifdef ENABLE_FRAME_SYNC
3871 camera2_stream *frame;
3872 uint8_t currentOutputStreams;
3873 bool directOutputEnabled = false;
3874 #endif
3875 int numOfUndqbuf = 0;
3876
3877 ALOGV("(%s): streamthread[%d] START SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
3878
3879 m_streamBufferInit(self);
3880
3881 do {
3882 ALOGV("DEBUG(%s): streamthread[%d] type(%d) DQBUF START ",__FUNCTION__,
3883 selfThread->m_index, selfThread->streamType);
3884
3885 #ifdef ENABLE_FRAME_SYNC
3886 selfStreamParms->bufIndex = cam_int_dqbuf(currentNode, selfStreamParms->planes + selfStreamParms->metaPlanes);
3887 frame = (struct camera2_stream *)(selfStreamParms->metaBuffers[selfStreamParms->bufIndex].virt.extP[0]);
3888 frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
3889 currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
3890 ALOGV("frame count streamthread[%d] : %d, outputStream(%x)", selfThread->m_index, frame->rcount, currentOutputStreams);
3891 if (((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0)||
3892 ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1)) {
3893 directOutputEnabled = true;
3894 }
3895 if (!directOutputEnabled) {
3896 if (!m_nightCaptureFrameCnt)
3897 m_requestManager->NotifyStreamOutput(frame->rcount);
3898 }
3899 #else
3900 selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
3901 frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex())
3902 #endif
3903 ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d) sigcnt(%d)",__FUNCTION__,
3904 selfThread->m_index, selfStreamParms->bufIndex, m_scpOutputSignalCnt);
3905
3906 if (selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] != ON_DRIVER)
3907 ALOGV("DBG(%s): DQed buffer status abnormal (%d) ",
3908 __FUNCTION__, selfStreamParms->svcBufStatus[selfStreamParms->bufIndex]);
3909 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3910
3911 for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
3912 if (selfThread->m_attachedSubStreams[i].streamId == -1)
3913 continue;
3914 #ifdef ENABLE_FRAME_SYNC
3915 if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3916 m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3917 selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3918 }
3919 #else
3920 if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
3921 m_runSubStreamFunc(selfThread, &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]),
3922 selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
3923 }
3924 #endif
3925 }
3926
3927 if (m_requestManager->GetSkipCnt() <= 0) {
3928 #ifdef ENABLE_FRAME_SYNC
3929 if ((currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3930 ALOGV("** Display Preview(frameCnt:%d)", frame->rcount);
3931 res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3932 frameTimeStamp,
3933 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3934 }
3935 else if ((currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3936 ALOGV("** SCC output (frameCnt:%d)", frame->rcount);
3937 res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3938 frameTimeStamp,
3939 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3940 }
3941 else {
3942 res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3943 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3944 ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3945 }
3946 #else
3947 if ((m_currentOutputStreams & STREAM_MASK_PREVIEW) && selfThread->m_index == 0) {
3948 ALOGV("** Display Preview(frameCnt:%d)", m_requestManager->GetFrameIndex());
3949 res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3950 frameTimeStamp,
3951 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3952 }
3953 else if ((m_currentOutputStreams & STREAM_MASK_ZSL) && selfThread->m_index == 1) {
3954 ALOGV("** SCC output (frameCnt:%d), last(%d)", m_requestManager->GetFrameIndex());
3955 res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
3956 frameTimeStamp,
3957 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3958 }
3959 #endif
3960 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3961 }
3962 else {
3963 res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
3964 &(selfStreamParms->svcBufHandle[selfStreamParms->bufIndex]));
3965 ALOGV("DEBUG(%s): streamthread[%d] cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
3966 }
3967 #ifdef ENABLE_FRAME_SYNC
3968 if (directOutputEnabled) {
3969 if (!m_nightCaptureFrameCnt)
3970 m_requestManager->NotifyStreamOutput(frame->rcount);
3971 }
3972 #endif
3973 if (res == 0) {
3974 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_SERVICE;
3975 selfStreamParms->numSvcBufsInHal--;
3976 }
3977 else {
3978 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_HAL;
3979 }
3980
3981 }
3982 while(0);
3983
3984 while ((selfStreamParms->numSvcBufsInHal - (selfStreamParms->numSvcBuffers - NUM_SCP_BUFFERS))
3985 < selfStreamParms->minUndequedBuffer) {
3986 res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
3987 if (res != NO_ERROR || buf == NULL) {
3988 ALOGV("DEBUG(%s): streamthread[%d] dequeue_buffer fail res(%d) numInHal(%d)",__FUNCTION__ , selfThread->m_index, res, selfStreamParms->numSvcBufsInHal);
3989 break;
3990 }
3991 selfStreamParms->numSvcBufsInHal++;
3992 ALOGV("DEBUG(%s): streamthread[%d] got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
3993 selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
3994 ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
3995 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
3996
3997 bool found = false;
3998 int checkingIndex = 0;
3999 for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
4000 if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4001 found = true;
4002 break;
4003 }
4004 }
4005 if (!found) break;
4006 selfStreamParms->bufIndex = checkingIndex;
4007 if (selfStreamParms->bufIndex < selfStreamParms->numHwBuffers) {
4008 uint32_t plane_index = 0;
4009 ExynosBuffer* currentBuf = &(selfStreamParms->svcBuffers[selfStreamParms->bufIndex]);
4010 struct v4l2_buffer v4l2_buf;
4011 struct v4l2_plane planes[VIDEO_MAX_PLANES];
4012
4013 v4l2_buf.m.planes = planes;
4014 v4l2_buf.type = currentNode->type;
4015 v4l2_buf.memory = currentNode->memory;
4016 v4l2_buf.index = selfStreamParms->bufIndex;
4017 v4l2_buf.length = currentNode->planes;
4018
4019 v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
4020 v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
4021 v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
4022 for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
4023 v4l2_buf.m.planes[plane_index].length = currentBuf->size.extS[plane_index];
4024 }
4025 #ifdef ENABLE_FRAME_SYNC
4026 /* add plane for metadata*/
4027 v4l2_buf.length += selfStreamParms->metaPlanes;
4028 v4l2_buf.m.planes[v4l2_buf.length-1].m.fd = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].fd.extFd[0];
4029 v4l2_buf.m.planes[v4l2_buf.length-1].length = selfStreamParms->metaBuffers[selfStreamParms->bufIndex].size.extS[0];
4030 #endif
4031 if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
4032 ALOGE("ERR(%s): streamthread[%d] exynos_v4l2_qbuf() fail",
4033 __FUNCTION__, selfThread->m_index);
4034 return;
4035 }
4036 selfStreamParms->svcBufStatus[selfStreamParms->bufIndex] = ON_DRIVER;
4037 ALOGV("DEBUG(%s): streamthread[%d] QBUF done index(%d)",
4038 __FUNCTION__, selfThread->m_index, selfStreamParms->bufIndex);
4039 }
4040 }
4041
4042 ALOGV("(%s): streamthread[%d] END SIGNAL_STREAM_DATA_COMING", __FUNCTION__,selfThread->m_index);
4043 }
4044 return;
4045 }
4046
4047 void ExynosCameraHWInterface2::m_streamFunc_indirect(SignalDrivenThread *self)
4048 {
4049 uint32_t currentSignal = self->GetProcessingSignal();
4050 StreamThread * selfThread = ((StreamThread*)self);
4051 stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
4052 node_info_t *currentNode = selfStreamParms->node;
4053
4054
4055 if (currentSignal & SIGNAL_THREAD_RELEASE) {
4056 CAM_LOGV("(%s): [%d] START SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4057
4058 if (selfThread->m_isBufferInit) {
4059 if (currentNode->fd == m_camera_info.capture.fd) {
4060 if (m_camera_info.capture.status == true) {
4061 ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4062 selfThread->m_index, currentNode->fd);
4063 if (cam_int_streamoff(currentNode) < 0 ){
4064 ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4065 } else {
4066 m_camera_info.capture.status = false;
4067 }
4068 }
4069 } else {
4070 ALOGV("DEBUG(%s): calling streamthread[%d] streamoff (fd:%d)", __FUNCTION__,
4071 selfThread->m_index, currentNode->fd);
4072 if (cam_int_streamoff(currentNode) < 0 ){
4073 ALOGE("ERR(%s): stream off fail", __FUNCTION__);
4074 }
4075 }
4076 ALOGV("DEBUG(%s): calling streamthread[%d] streamoff done", __FUNCTION__, selfThread->m_index);
4077 ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 (fd:%d)", __FUNCTION__,
4078 selfThread->m_index, currentNode->fd);
4079 currentNode->buffers = 0;
4080 cam_int_reqbufs(currentNode);
4081 ALOGV("DEBUG(%s): calling streamthread[%d] reqbuf 0 DONE(fd:%d)", __FUNCTION__,
4082 selfThread->m_index, currentNode->fd);
4083 }
4084
4085 selfThread->m_isBufferInit = false;
4086 selfThread->m_releasing = false;
4087 selfThread->m_activated = false;
4088 ALOGV("(%s): [%d] END SIGNAL_THREAD_RELEASE", __FUNCTION__, selfThread->m_index);
4089 return;
4090 }
4091
4092 if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
4093 #ifdef ENABLE_FRAME_SYNC
4094 camera2_stream *frame;
4095 uint8_t currentOutputStreams;
4096 #endif
4097 nsecs_t frameTimeStamp;
4098
4099 ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING",
4100 __FUNCTION__,selfThread->m_index);
4101
4102 m_streamBufferInit(self);
4103
4104 ALOGV("DEBUG(%s): streamthread[%d] DQBUF START", __FUNCTION__, selfThread->m_index);
4105 selfStreamParms->bufIndex = cam_int_dqbuf(currentNode);
4106 ALOGV("DEBUG(%s): streamthread[%d] DQBUF done index(%d)",__FUNCTION__,
4107 selfThread->m_index, selfStreamParms->bufIndex);
4108
4109 #ifdef ENABLE_FRAME_SYNC
4110 frame = (struct camera2_stream *)(currentNode->buffer[selfStreamParms->bufIndex].virt.extP[selfStreamParms->planes -1]);
4111 frameTimeStamp = m_requestManager->GetTimestampByFrameCnt(frame->rcount);
4112 currentOutputStreams = m_requestManager->GetOutputStreamByFrameCnt(frame->rcount);
4113 ALOGV("frame count(SCC) : %d outputStream(%x)", frame->rcount, currentOutputStreams);
4114 #else
4115 frameTimeStamp = m_requestManager->GetTimestamp(m_requestManager->GetFrameIndex());
4116 #endif
4117
4118 for (int i = 0 ; i < NUM_MAX_SUBSTREAM ; i++) {
4119 if (selfThread->m_attachedSubStreams[i].streamId == -1)
4120 continue;
4121 #ifdef ENABLE_FRAME_SYNC
4122 if (currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4123 m_requestManager->NotifyStreamOutput(frame->rcount);
4124 m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4125 selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4126 }
4127 #else
4128 if (m_currentOutputStreams & (1<<selfThread->m_attachedSubStreams[i].streamId)) {
4129 m_runSubStreamFunc(selfThread, &(currentNode->buffer[selfStreamParms->bufIndex]),
4130 selfThread->m_attachedSubStreams[i].streamId, frameTimeStamp);
4131 }
4132 #endif
4133 }
4134 cam_int_qbuf(currentNode, selfStreamParms->bufIndex);
4135 ALOGV("DEBUG(%s): streamthread[%d] QBUF DONE", __FUNCTION__, selfThread->m_index);
4136
4137
4138
4139 ALOGV("DEBUG(%s): streamthread[%d] processing SIGNAL_STREAM_DATA_COMING DONE",
4140 __FUNCTION__, selfThread->m_index);
4141 }
4142
4143
4144 return;
4145 }
4146
4147 void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
4148 {
4149 uint32_t currentSignal = self->GetProcessingSignal();
4150 StreamThread * selfThread = ((StreamThread*)self);
4151 stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
4152 node_info_t *currentNode = selfStreamParms->node;
4153
4154 ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
4155
4156 // Do something in Child thread handler
4157 // Should change function to class that inherited StreamThread class to support dynamic stream allocation
4158 if (selfThread->streamType == STREAM_TYPE_DIRECT) {
4159 m_streamFunc_direct(self);
4160 } else if (selfThread->streamType == STREAM_TYPE_INDIRECT) {
4161 m_streamFunc_indirect(self);
4162 }
4163
4164 return;
4165 }
4166 int ExynosCameraHWInterface2::m_jpegCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4167 {
4168 stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
4169 substream_parameters_t *subParms = &m_subStreams[STREAM_ID_JPEG];
4170 status_t res;
4171 ExynosRect jpegRect;
4172 bool found = false;
4173 int srcW, srcH, srcCropX, srcCropY;
4174 int pictureW, pictureH, pictureFramesize = 0;
4175 int pictureFormat;
4176 int cropX, cropY, cropW, cropH = 0;
4177 ExynosBuffer resizeBufInfo;
4178 ExynosRect m_jpegPictureRect;
4179 buffer_handle_t * buf = NULL;
4180 camera2_jpeg_blob * jpegBlob = NULL;
4181 int jpegBufSize = 0;
4182
4183 ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4184 for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4185 if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4186 found = true;
4187 break;
4188 }
4189 subParms->svcBufIndex++;
4190 if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4191 subParms->svcBufIndex = 0;
4192 }
4193 if (!found) {
4194 ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4195 subParms->svcBufIndex++;
4196 return 1;
4197 }
4198
4199 {
4200 Mutex::Autolock lock(m_jpegEncoderLock);
4201 m_jpegEncodingCount++;
4202 }
4203
4204 m_getRatioSize(selfStreamParms->width, selfStreamParms->height,
4205 m_streamThreads[0]->m_parameters.width, m_streamThreads[0]->m_parameters.height,
4206 &srcCropX, &srcCropY,
4207 &srcW, &srcH,
4208 0);
4209
4210 m_jpegPictureRect.w = subParms->width;
4211 m_jpegPictureRect.h = subParms->height;
4212
4213 ALOGV("DEBUG(%s):w = %d, h = %d, w = %d, h = %d",
4214 __FUNCTION__, selfStreamParms->width, selfStreamParms->height,
4215 m_jpegPictureRect.w, m_jpegPictureRect.h);
4216
4217 m_getRatioSize(srcW, srcH,
4218 m_jpegPictureRect.w, m_jpegPictureRect.h,
4219 &cropX, &cropY,
4220 &pictureW, &pictureH,
4221 0);
4222 pictureFormat = V4L2_PIX_FMT_YUYV;
4223 pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
4224
4225 if (m_exynosPictureCSC) {
4226 float zoom_w = 0, zoom_h = 0;
4227 if (m_zoomRatio == 0)
4228 m_zoomRatio = 1;
4229
4230 if (m_jpegPictureRect.w >= m_jpegPictureRect.h) {
4231 zoom_w = pictureW / m_zoomRatio;
4232 zoom_h = zoom_w * m_jpegPictureRect.h / m_jpegPictureRect.w;
4233 } else {
4234 zoom_h = pictureH / m_zoomRatio;
4235 zoom_w = zoom_h * m_jpegPictureRect.w / m_jpegPictureRect.h;
4236 }
4237 cropX = (srcW - zoom_w) / 2;
4238 cropY = (srcH - zoom_h) / 2;
4239 cropW = zoom_w;
4240 cropH = zoom_h;
4241
4242 ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4243 __FUNCTION__, cropX, cropY, cropW, cropH);
4244
4245 csc_set_src_format(m_exynosPictureCSC,
4246 ALIGN(srcW, 16), ALIGN(srcH, 16),
4247 cropX, cropY, cropW, cropH,
4248 V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
4249 0);
4250
4251 csc_set_dst_format(m_exynosPictureCSC,
4252 m_jpegPictureRect.w, m_jpegPictureRect.h,
4253 0, 0, m_jpegPictureRect.w, m_jpegPictureRect.h,
4254 V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
4255 0);
4256 for (int i = 0 ; i < 3 ; i++)
4257 ALOGV("DEBUG(%s): m_pictureBuf.fd.extFd[%d]=%d ",
4258 __FUNCTION__, i, srcImageBuf->fd.extFd[i]);
4259 csc_set_src_buffer(m_exynosPictureCSC,
4260 (void **)&srcImageBuf->fd.fd);
4261
4262 csc_set_dst_buffer(m_exynosPictureCSC,
4263 (void **)&m_resizeBuf.fd.fd);
4264 for (int i = 0 ; i < 3 ; i++)
4265 ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
4266 __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
4267
4268 if (csc_convert(m_exynosPictureCSC) != 0)
4269 ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
4270
4271 }
4272 else {
4273 ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
4274 }
4275
4276 resizeBufInfo = m_resizeBuf;
4277
4278 m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_jpegPictureRect.w, m_jpegPictureRect.h, &m_resizeBuf);
4279
4280 for (int i = 1; i < 3; i++) {
4281 if (m_resizeBuf.size.extS[i] != 0)
4282 m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
4283
4284 ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
4285 }
4286
4287 jpegRect.w = m_jpegPictureRect.w;
4288 jpegRect.h = m_jpegPictureRect.h;
4289 jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
4290
4291 for (int j = 0 ; j < 3 ; j++)
4292 ALOGV("DEBUG(%s): dest buf node fd.extFd[%d]=%d size=%d virt=%x ",
4293 __FUNCTION__, j, subParms->svcBuffers[subParms->svcBufIndex].fd.extFd[j],
4294 (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].size.extS[j],
4295 (unsigned int)subParms->svcBuffers[subParms->svcBufIndex].virt.extP[j]);
4296
4297 jpegBufSize = subParms->svcBuffers[subParms->svcBufIndex].size.extS[0];
4298 if (yuv2Jpeg(&m_resizeBuf, &subParms->svcBuffers[subParms->svcBufIndex], &jpegRect) == false) {
4299 ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
4300 } else {
4301 m_resizeBuf = resizeBufInfo;
4302
4303 int jpegSize = subParms->svcBuffers[subParms->svcBufIndex].size.s;
4304 ALOGD("(%s): (%d x %d) jpegbuf size(%d) encoded size(%d)", __FUNCTION__,
4305 m_jpegPictureRect.w, m_jpegPictureRect.h, jpegBufSize, jpegSize);
4306 char * jpegBuffer = (char*)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4307 jpegBlob = (camera2_jpeg_blob*)(&jpegBuffer[jpegBufSize - sizeof(camera2_jpeg_blob)]);
4308
4309 if (jpegBuffer[jpegSize-1] == 0)
4310 jpegSize--;
4311 jpegBlob->jpeg_size = jpegSize;
4312 jpegBlob->jpeg_blob_id = CAMERA2_JPEG_BLOB_ID;
4313 }
4314 subParms->svcBuffers[subParms->svcBufIndex].size.extS[0] = jpegBufSize;
4315 res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4316
4317 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4318 __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4319 if (res == 0) {
4320 subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4321 subParms->numSvcBufsInHal--;
4322 }
4323 else {
4324 subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4325 }
4326
4327 while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4328 {
4329 bool found = false;
4330 int checkingIndex = 0;
4331
4332 ALOGV("DEBUG(%s): jpeg currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4333
4334 res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4335 if (res != NO_ERROR || buf == NULL) {
4336 ALOGV("DEBUG(%s): jpeg stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res);
4337 break;
4338 }
4339 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4340 subParms->numSvcBufsInHal ++;
4341 ALOGV("DEBUG(%s): jpeg got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4342 subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4343
4344
4345 for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4346 if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4347 found = true;
4348 break;
4349 }
4350 }
4351 ALOGV("DEBUG(%s): jpeg dequeueed_buffer found index(%d)", __FUNCTION__, found);
4352
4353 if (!found) {
4354 break;
4355 }
4356
4357 subParms->svcBufIndex = checkingIndex;
4358 if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4359 subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4360 }
4361 else {
4362 ALOGV("DEBUG(%s): jpeg bufstatus abnormal [%d] status = %d", __FUNCTION__,
4363 subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]);
4364 }
4365 }
4366 {
4367 Mutex::Autolock lock(m_jpegEncoderLock);
4368 m_jpegEncodingCount--;
4369 }
4370 return 0;
4371 }
4372
4373 int ExynosCameraHWInterface2::m_recordCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4374 {
4375 stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
4376 substream_parameters_t *subParms = &m_subStreams[STREAM_ID_RECORD];
4377 status_t res;
4378 ExynosRect jpegRect;
4379 bool found = false;
4380 int cropX, cropY, cropW, cropH = 0;
4381 buffer_handle_t * buf = NULL;
4382
4383 ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4384 for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4385 if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4386 found = true;
4387 break;
4388 }
4389 subParms->svcBufIndex++;
4390 if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4391 subParms->svcBufIndex = 0;
4392 }
4393 if (!found) {
4394 ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4395 subParms->svcBufIndex++;
4396 return 1;
4397 }
4398
4399 if (m_exynosVideoCSC) {
4400 int videoW = subParms->width, videoH = subParms->height;
4401 int cropX, cropY, cropW, cropH = 0;
4402 int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4403 m_getRatioSize(previewW, previewH,
4404 videoW, videoH,
4405 &cropX, &cropY,
4406 &cropW, &cropH,
4407 0);
4408
4409 ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4410 __FUNCTION__, cropX, cropY, cropW, cropH);
4411
4412 csc_set_src_format(m_exynosVideoCSC,
4413 ALIGN(previewW, 32), previewH,
4414 cropX, cropY, cropW, cropH,
4415 selfStreamParms->format,
4416 0);
4417
4418 csc_set_dst_format(m_exynosVideoCSC,
4419 videoW, videoH,
4420 0, 0, videoW, videoH,
4421 subParms->format,
4422 1);
4423
4424 csc_set_src_buffer(m_exynosVideoCSC,
4425 (void **)&srcImageBuf->fd.fd);
4426
4427 csc_set_dst_buffer(m_exynosVideoCSC,
4428 (void **)(&(subParms->svcBuffers[subParms->svcBufIndex].fd.fd)));
4429
4430 if (csc_convert(m_exynosVideoCSC) != 0) {
4431 ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
4432 }
4433 else {
4434 ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
4435 }
4436 }
4437 else {
4438 ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4439 }
4440
4441 res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4442
4443 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4444 __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4445 if (res == 0) {
4446 subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4447 subParms->numSvcBufsInHal--;
4448 }
4449 else {
4450 subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4451 }
4452
4453 while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4454 {
4455 bool found = false;
4456 int checkingIndex = 0;
4457
4458 ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4459
4460 res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4461 if (res != NO_ERROR || buf == NULL) {
4462 ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res);
4463 break;
4464 }
4465 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4466 subParms->numSvcBufsInHal ++;
4467 ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4468 subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4469
4470 for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4471 if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4472 found = true;
4473 break;
4474 }
4475 }
4476 ALOGV("DEBUG(%s): record dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4477
4478 if (!found) {
4479 break;
4480 }
4481
4482 subParms->svcBufIndex = checkingIndex;
4483 if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4484 subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4485 }
4486 else {
4487 ALOGV("DEBUG(%s): record bufstatus abnormal [%d] status = %d", __FUNCTION__,
4488 subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]);
4489 }
4490 }
4491 return 0;
4492 }
4493
4494 int ExynosCameraHWInterface2::m_prvcbCreator(StreamThread *selfThread, ExynosBuffer *srcImageBuf, nsecs_t frameTimeStamp)
4495 {
4496 stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
4497 substream_parameters_t *subParms = &m_subStreams[STREAM_ID_PRVCB];
4498 status_t res;
4499 bool found = false;
4500 int cropX, cropY, cropW, cropH = 0;
4501 buffer_handle_t * buf = NULL;
4502
4503 ALOGV("DEBUG(%s): index(%d)",__FUNCTION__, subParms->svcBufIndex);
4504 for (int i = 0 ; subParms->numSvcBuffers ; i++) {
4505 if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_HAL) {
4506 found = true;
4507 break;
4508 }
4509 subParms->svcBufIndex++;
4510 if (subParms->svcBufIndex >= subParms->numSvcBuffers)
4511 subParms->svcBufIndex = 0;
4512 }
4513 if (!found) {
4514 ALOGE("(%s): cannot find free svc buffer", __FUNCTION__);
4515 subParms->svcBufIndex++;
4516 return 1;
4517 }
4518
4519 if (subParms->format == HAL_PIXEL_FORMAT_YCrCb_420_SP) {
4520 if (m_exynosVideoCSC) {
4521 int previewCbW = subParms->width, previewCbH = subParms->height;
4522 int cropX, cropY, cropW, cropH = 0;
4523 int previewW = selfStreamParms->width, previewH = selfStreamParms->height;
4524 m_getRatioSize(previewW, previewH,
4525 previewCbW, previewCbH,
4526 &cropX, &cropY,
4527 &cropW, &cropH,
4528 0);
4529
4530 ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
4531 __FUNCTION__, cropX, cropY, cropW, cropH);
4532 csc_set_src_format(m_exynosVideoCSC,
4533 ALIGN(previewW, 32), previewH,
4534 cropX, cropY, cropW, cropH,
4535 selfStreamParms->format,
4536 0);
4537
4538 csc_set_dst_format(m_exynosVideoCSC,
4539 previewCbW, previewCbH,
4540 0, 0, previewCbW, previewCbH,
4541 subParms->internalFormat,
4542 1);
4543
4544 csc_set_src_buffer(m_exynosVideoCSC,
4545 (void **)&srcImageBuf->fd.fd);
4546
4547 csc_set_dst_buffer(m_exynosVideoCSC,
4548 (void **)(&(m_previewCbBuf.fd.fd)));
4549
4550 if (csc_convert(m_exynosVideoCSC) != 0) {
4551 ALOGE("ERR(%s):previewcb csc_convert() fail", __FUNCTION__);
4552 }
4553 else {
4554 ALOGV("(%s):previewcb csc_convert() SUCCESS", __FUNCTION__);
4555 }
4556 if (previewCbW == ALIGN(previewCbW, 16)) {
4557 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4558 m_previewCbBuf.virt.extP[0], previewCbW * previewCbH);
4559 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + previewCbW * previewCbH,
4560 m_previewCbBuf.virt.extP[1], previewCbW * previewCbH / 2 );
4561 }
4562 else {
4563 // TODO : copy line by line ?
4564 }
4565 }
4566 else {
4567 ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
4568 }
4569 }
4570 else if (subParms->format == HAL_PIXEL_FORMAT_YV12) {
4571 int previewCbW = subParms->width, previewCbH = subParms->height;
4572 int stride = ALIGN(previewCbW, 16);
4573 int uv_stride = ALIGN(previewCbW/2, 16);
4574 int c_stride = ALIGN(stride / 2, 16);
4575
4576 if (previewCbW == ALIGN(previewCbW, 32)) {
4577 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0],
4578 srcImageBuf->virt.extP[0], stride * previewCbH);
4579 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH,
4580 srcImageBuf->virt.extP[1], c_stride * previewCbH / 2 );
4581 memcpy(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + (stride * previewCbH) + (c_stride * previewCbH / 2),
4582 srcImageBuf->virt.extP[2], c_stride * previewCbH / 2 );
4583 } else {
4584 char * dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0]);
4585 char * srcAddr = (char *)(srcImageBuf->virt.extP[0]);
4586 for (int i = 0 ; i < previewCbH ; i++) {
4587 memcpy(dstAddr, srcAddr, previewCbW);
4588 dstAddr += stride;
4589 srcAddr += ALIGN(stride, 32);
4590 }
4591 dstAddr = (char *)(subParms->svcBuffers[subParms->svcBufIndex].virt.extP[0] + stride * previewCbH);
4592 srcAddr = (char *)(srcImageBuf->virt.extP[1]);
4593 for (int i = 0 ; i < previewCbH/2 ; i++) {
4594 memcpy(dstAddr, srcAddr, previewCbW/2);
4595 dstAddr += c_stride;
4596 srcAddr += uv_stride;
4597 }
4598 srcAddr = (char *)(srcImageBuf->virt.extP[2]);
4599 for (int i = 0 ; i < previewCbH/2 ; i++) {
4600 memcpy(dstAddr, srcAddr, previewCbW/2);
4601 dstAddr += c_stride;
4602 srcAddr += uv_stride;
4603 }
4604 }
4605 }
4606 res = subParms->streamOps->enqueue_buffer(subParms->streamOps, frameTimeStamp, &(subParms->svcBufHandle[subParms->svcBufIndex]));
4607
4608 ALOGV("DEBUG(%s): streamthread[%d] enqueue_buffer index(%d) to svc done res(%d)",
4609 __FUNCTION__, selfThread->m_index, subParms->svcBufIndex, res);
4610 if (res == 0) {
4611 subParms->svcBufStatus[subParms->svcBufIndex] = ON_SERVICE;
4612 subParms->numSvcBufsInHal--;
4613 }
4614 else {
4615 subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4616 }
4617
4618 while (subParms->numSvcBufsInHal <= subParms->minUndequedBuffer)
4619 {
4620 bool found = false;
4621 int checkingIndex = 0;
4622
4623 ALOGV("DEBUG(%s): prvcb currentBuf#(%d)", __FUNCTION__ , subParms->numSvcBufsInHal);
4624
4625 res = subParms->streamOps->dequeue_buffer(subParms->streamOps, &buf);
4626 if (res != NO_ERROR || buf == NULL) {
4627 ALOGV("DEBUG(%s): prvcb stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res);
4628 break;
4629 }
4630 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
4631 subParms->numSvcBufsInHal ++;
4632 ALOGV("DEBUG(%s): prvcb got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
4633 subParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
4634
4635
4636 for (checkingIndex = 0; checkingIndex < subParms->numSvcBuffers ; checkingIndex++) {
4637 if (priv_handle->fd == subParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
4638 found = true;
4639 break;
4640 }
4641 }
4642 ALOGV("DEBUG(%s): prvcb dequeueed_buffer found(%d) index = %d", __FUNCTION__, found, checkingIndex);
4643
4644 if (!found) {
4645 break;
4646 }
4647
4648 subParms->svcBufIndex = checkingIndex;
4649 if (subParms->svcBufStatus[subParms->svcBufIndex] == ON_SERVICE) {
4650 subParms->svcBufStatus[subParms->svcBufIndex] = ON_HAL;
4651 }
4652 else {
4653 ALOGV("DEBUG(%s): prvcb bufstatus abnormal [%d] status = %d", __FUNCTION__,
4654 subParms->svcBufIndex, subParms->svcBufStatus[subParms->svcBufIndex]);
4655 }
4656 }
4657 return 0;
4658 }
4659
4660 bool ExynosCameraHWInterface2::m_checkThumbnailSize(int w, int h)
4661 {
4662 int sizeOfSupportList;
4663
4664 //REAR Camera
4665 if(this->getCameraId() == 0) {
4666 sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_REAR_SIZE) / (sizeof(int)*2);
4667
4668 for(int i = 0; i < sizeOfSupportList; i++) {
4669 if((SUPPORT_THUMBNAIL_REAR_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_REAR_SIZE[i][1] == h))
4670 return true;
4671 }
4672
4673 }
4674 else {
4675 sizeOfSupportList = sizeof(SUPPORT_THUMBNAIL_FRONT_SIZE) / (sizeof(int)*2);
4676
4677 for(int i = 0; i < sizeOfSupportList; i++) {
4678 if((SUPPORT_THUMBNAIL_FRONT_SIZE[i][0] == w) &&(SUPPORT_THUMBNAIL_FRONT_SIZE[i][1] == h))
4679 return true;
4680 }
4681 }
4682
4683 return false;
4684 }
4685 bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
4686 ExynosBuffer *jpegBuf,
4687 ExynosRect *rect)
4688 {
4689 unsigned char *addr;
4690
4691 ExynosJpegEncoderForCamera jpegEnc;
4692 bool ret = false;
4693 int res = 0;
4694
4695 unsigned int *yuvSize = yuvBuf->size.extS;
4696
4697 if (jpegEnc.create()) {
4698 ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
4699 goto jpeg_encode_done;
4700 }
4701
4702 if (jpegEnc.setQuality(m_jpegMetadata.shot.ctl.jpeg.quality)) {
4703 ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
4704 goto jpeg_encode_done;
4705 }
4706
4707 if (jpegEnc.setSize(rect->w, rect->h)) {
4708 ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
4709 goto jpeg_encode_done;
4710 }
4711 ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
4712
4713 if (jpegEnc.setColorFormat(rect->colorFormat)) {
4714 ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
4715 goto jpeg_encode_done;
4716 }
4717
4718 if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
4719 ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
4720 goto jpeg_encode_done;
4721 }
4722
4723 if((m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0] != 0) && (m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1] != 0)) {
4724 mExifInfo.enableThumb = true;
4725 if(!m_checkThumbnailSize(m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0], m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1])) {
4726 // in the case of unsupported parameter, disable thumbnail
4727 mExifInfo.enableThumb = false;
4728 } else {
4729 m_thumbNailW = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[0];
4730 m_thumbNailH = m_jpegMetadata.shot.ctl.jpeg.thumbnailSize[1];
4731 }
4732
4733 ALOGV("(%s) m_thumbNailW = %d, m_thumbNailH = %d", __FUNCTION__, m_thumbNailW, m_thumbNailH);
4734
4735 } else {
4736 mExifInfo.enableThumb = false;
4737 }
4738
4739 if (jpegEnc.setThumbnailSize(m_thumbNailW, m_thumbNailH)) {
4740 ALOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, m_thumbNailH, m_thumbNailH);
4741 goto jpeg_encode_done;
4742 }
4743
4744 ALOGV("(%s):jpegEnc.setThumbnailSize(%d, %d) ", __FUNCTION__, m_thumbNailW, m_thumbNailW);
4745 if (jpegEnc.setThumbnailQuality(m_jpegMetadata.shot.ctl.jpeg.thumbnailQuality)) {
4746 ALOGE("ERR(%s):jpegEnc.setThumbnailQuality fail", __FUNCTION__);
4747 goto jpeg_encode_done;
4748 }
4749
4750 m_setExifChangedAttribute(&mExifInfo, rect, &m_jpegMetadata);
4751 ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
4752 if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), &(yuvBuf->virt.p), (int *)yuvSize)) {
4753 ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
4754 goto jpeg_encode_done;
4755 }
4756 if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->virt.p, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
4757 ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
4758 goto jpeg_encode_done;
4759 }
4760
4761 if (jpegEnc.updateConfig()) {
4762 ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
4763 goto jpeg_encode_done;
4764 }
4765
4766 if ((res = jpegEnc.encode((int *)&jpegBuf->size.s, &mExifInfo))) {
4767 ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
4768 goto jpeg_encode_done;
4769 }
4770
4771 ret = true;
4772
4773 jpeg_encode_done:
4774
4775 if (jpegEnc.flagCreate() == true)
4776 jpegEnc.destroy();
4777
4778 return ret;
4779 }
4780
4781 void ExynosCameraHWInterface2::OnPrecaptureMeteringTriggerStart(int id)
4782 {
4783 m_ctlInfo.flash.m_precaptureTriggerId = id;
4784 m_ctlInfo.ae.aeStateNoti = AE_STATE_INACTIVE;
4785 if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH) && (m_cameraId == 0)) {
4786 // flash is required
4787 switch (m_ctlInfo.flash.m_flashCnt) {
4788 case IS_FLASH_STATE_AUTO_DONE:
4789 case IS_FLASH_STATE_AUTO_OFF:
4790 // Flash capture sequence, AF flash was executed before
4791 break;
4792 default:
4793 // Full flash sequence
4794 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4795 m_ctlInfo.flash.m_flashEnableFlg = true;
4796 m_ctlInfo.flash.m_flashTimeOut = 0;
4797 }
4798 } else {
4799 // Skip pre-capture in case of non-flash.
4800 ALOGV("[PreCap] Flash OFF mode ");
4801 m_ctlInfo.flash.m_flashEnableFlg = false;
4802 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_NONE;
4803 }
4804 ALOGV("[PreCap] OnPrecaptureMeteringTriggerStart (ID %d) (flag : %d) (cnt : %d)", id, m_ctlInfo.flash.m_flashEnableFlg, m_ctlInfo.flash.m_flashCnt);
4805 OnPrecaptureMeteringNotificationSensor();
4806 }
4807
4808 void ExynosCameraHWInterface2::OnAfTrigger(int id)
4809 {
4810 m_afTriggerId = id;
4811
4812 switch (m_afMode) {
4813 case AA_AFMODE_AUTO:
4814 case AA_AFMODE_MACRO:
4815 case AA_AFMODE_MANUAL:
4816 ALOGV("[AF] OnAfTrigger - AUTO,MACRO,OFF (Mode %d) ", m_afMode);
4817 // If flash is enable, Flash operation is executed before triggering AF
4818 if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4819 && (m_ctlInfo.flash.m_flashEnableFlg == false)
4820 && (m_cameraId == 0)) {
4821 ALOGV("[Flash] AF Flash start with Mode (%d)", m_afMode);
4822 m_ctlInfo.flash.m_flashEnableFlg = true;
4823 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4824 m_ctlInfo.flash.m_flashDecisionResult = false;
4825 m_ctlInfo.flash.m_afFlashDoneFlg = true;
4826 }
4827 OnAfTriggerAutoMacro(id);
4828 break;
4829 case AA_AFMODE_CONTINUOUS_VIDEO:
4830 ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_VIDEO (Mode %d) ", m_afMode);
4831 OnAfTriggerCAFVideo(id);
4832 break;
4833 case AA_AFMODE_CONTINUOUS_PICTURE:
4834 ALOGV("[AF] OnAfTrigger - AA_AFMODE_CONTINUOUS_PICTURE (Mode %d) ", m_afMode);
4835 OnAfTriggerCAFPicture(id);
4836 break;
4837
4838 case AA_AFMODE_OFF:
4839 default:
4840 break;
4841 }
4842 }
4843
4844 void ExynosCameraHWInterface2::OnAfTriggerAutoMacro(int /*id*/)
4845 {
4846 int nextState = NO_TRANSITION;
4847
4848 switch (m_afState) {
4849 case HAL_AFSTATE_INACTIVE:
4850 case HAL_AFSTATE_PASSIVE_FOCUSED:
4851 case HAL_AFSTATE_SCANNING:
4852 nextState = HAL_AFSTATE_NEEDS_COMMAND;
4853 m_IsAfTriggerRequired = true;
4854 break;
4855 case HAL_AFSTATE_NEEDS_COMMAND:
4856 nextState = NO_TRANSITION;
4857 break;
4858 case HAL_AFSTATE_STARTED:
4859 nextState = NO_TRANSITION;
4860 break;
4861 case HAL_AFSTATE_LOCKED:
4862 nextState = HAL_AFSTATE_NEEDS_COMMAND;
4863 m_IsAfTriggerRequired = true;
4864 break;
4865 case HAL_AFSTATE_FAILED:
4866 nextState = HAL_AFSTATE_NEEDS_COMMAND;
4867 m_IsAfTriggerRequired = true;
4868 break;
4869 default:
4870 break;
4871 }
4872 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4873 if (nextState != NO_TRANSITION)
4874 m_afState = nextState;
4875 }
4876
4877 void ExynosCameraHWInterface2::OnAfTriggerCAFPicture(int id)
4878 {
4879 int nextState = NO_TRANSITION;
4880
4881 switch (m_afState) {
4882 case HAL_AFSTATE_INACTIVE:
4883 nextState = HAL_AFSTATE_FAILED;
4884 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4885 break;
4886 case HAL_AFSTATE_NEEDS_COMMAND:
4887 // not used
4888 break;
4889 case HAL_AFSTATE_STARTED:
4890 nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4891 m_AfHwStateFailed = false;
4892 break;
4893 case HAL_AFSTATE_SCANNING:
4894 nextState = HAL_AFSTATE_NEEDS_DETERMINATION;
4895 m_AfHwStateFailed = false;
4896 // If flash is enable, Flash operation is executed before triggering AF
4897 if ((m_ctlInfo.flash.i_flashMode >= AA_AEMODE_ON_AUTO_FLASH)
4898 && (m_ctlInfo.flash.m_flashEnableFlg == false)
4899 && (m_cameraId == 0)) {
4900 ALOGV("[AF Flash] AF Flash start with Mode (%d) state (%d) id (%d)", m_afMode, m_afState, id);
4901 m_ctlInfo.flash.m_flashEnableFlg = true;
4902 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_ON;
4903 m_ctlInfo.flash.m_flashDecisionResult = false;
4904 m_ctlInfo.flash.m_afFlashDoneFlg = true;
4905 }
4906 break;
4907 case HAL_AFSTATE_NEEDS_DETERMINATION:
4908 nextState = NO_TRANSITION;
4909 break;
4910 case HAL_AFSTATE_PASSIVE_FOCUSED:
4911 m_IsAfLockRequired = true;
4912 if (m_AfHwStateFailed) {
4913 ALOGE("(%s): [CAF] LAST : fail", __FUNCTION__);
4914 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4915 nextState = HAL_AFSTATE_FAILED;
4916 }
4917 else {
4918 ALOGV("(%s): [CAF] LAST : success", __FUNCTION__);
4919 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4920 nextState = HAL_AFSTATE_LOCKED;
4921 }
4922 m_AfHwStateFailed = false;
4923 break;
4924 case HAL_AFSTATE_LOCKED:
4925 nextState = NO_TRANSITION;
4926 break;
4927 case HAL_AFSTATE_FAILED:
4928 nextState = NO_TRANSITION;
4929 break;
4930 default:
4931 break;
4932 }
4933 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4934 if (nextState != NO_TRANSITION)
4935 m_afState = nextState;
4936 }
4937
4938
4939 void ExynosCameraHWInterface2::OnAfTriggerCAFVideo(int /*id*/)
4940 {
4941 int nextState = NO_TRANSITION;
4942
4943 switch (m_afState) {
4944 case HAL_AFSTATE_INACTIVE:
4945 nextState = HAL_AFSTATE_FAILED;
4946 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4947 break;
4948 case HAL_AFSTATE_NEEDS_COMMAND:
4949 // not used
4950 break;
4951 case HAL_AFSTATE_STARTED:
4952 m_IsAfLockRequired = true;
4953 nextState = HAL_AFSTATE_FAILED;
4954 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4955 break;
4956 case HAL_AFSTATE_SCANNING:
4957 m_IsAfLockRequired = true;
4958 nextState = HAL_AFSTATE_FAILED;
4959 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
4960 break;
4961 case HAL_AFSTATE_NEEDS_DETERMINATION:
4962 // not used
4963 break;
4964 case HAL_AFSTATE_PASSIVE_FOCUSED:
4965 m_IsAfLockRequired = true;
4966 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
4967 nextState = HAL_AFSTATE_LOCKED;
4968 break;
4969 case HAL_AFSTATE_LOCKED:
4970 nextState = NO_TRANSITION;
4971 break;
4972 case HAL_AFSTATE_FAILED:
4973 nextState = NO_TRANSITION;
4974 break;
4975 default:
4976 break;
4977 }
4978 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
4979 if (nextState != NO_TRANSITION)
4980 m_afState = nextState;
4981 }
4982
4983 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationSensor()
4984 {
4985 if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
4986 // Just noti of pre-capture start
4987 if (m_ctlInfo.ae.aeStateNoti != AE_STATE_PRECAPTURE) {
4988 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
4989 ANDROID_CONTROL_AE_STATE_PRECAPTURE,
4990 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4991 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
4992 m_notifyCb(CAMERA2_MSG_AUTOWB,
4993 ANDROID_CONTROL_AWB_STATE_CONVERGED,
4994 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
4995 m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
4996 }
4997 }
4998 }
4999
5000 void ExynosCameraHWInterface2::OnPrecaptureMeteringNotificationISP()
5001 {
5002 if (m_ctlInfo.flash.m_precaptureTriggerId > 0) {
5003 if (m_ctlInfo.flash.m_flashEnableFlg) {
5004 // flash case
5005 switch (m_ctlInfo.flash.m_flashCnt) {
5006 case IS_FLASH_STATE_AUTO_DONE:
5007 case IS_FLASH_STATE_AUTO_OFF:
5008 if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
5009 // End notification
5010 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5011 ANDROID_CONTROL_AE_STATE_CONVERGED,
5012 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5013 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5014 m_notifyCb(CAMERA2_MSG_AUTOWB,
5015 ANDROID_CONTROL_AWB_STATE_CONVERGED,
5016 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5017 m_ctlInfo.flash.m_precaptureTriggerId = 0;
5018 } else {
5019 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5020 ANDROID_CONTROL_AE_STATE_PRECAPTURE,
5021 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5022 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_PRECAPTURE (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5023 m_notifyCb(CAMERA2_MSG_AUTOWB,
5024 ANDROID_CONTROL_AWB_STATE_CONVERGED,
5025 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5026 m_ctlInfo.ae.aeStateNoti = AE_STATE_PRECAPTURE;
5027 }
5028 break;
5029 case IS_FLASH_STATE_CAPTURE:
5030 case IS_FLASH_STATE_CAPTURE_WAIT:
5031 case IS_FLASH_STATE_CAPTURE_JPEG:
5032 case IS_FLASH_STATE_CAPTURE_END:
5033 ALOGV("(%s) INVALID flash state count. (%d)", __FUNCTION__, (int)m_ctlInfo.flash.m_flashCnt);
5034 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_DONE;
5035 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5036 ANDROID_CONTROL_AE_STATE_CONVERGED,
5037 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5038 m_notifyCb(CAMERA2_MSG_AUTOWB,
5039 ANDROID_CONTROL_AWB_STATE_CONVERGED,
5040 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5041 m_ctlInfo.flash.m_precaptureTriggerId = 0;
5042 break;
5043 }
5044 } else {
5045 // non-flash case
5046 if (m_ctlInfo.ae.aeStateNoti == AE_STATE_PRECAPTURE) {
5047 m_notifyCb(CAMERA2_MSG_AUTOEXPOSURE,
5048 ANDROID_CONTROL_AE_STATE_CONVERGED,
5049 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5050 ALOGV("(%s) ANDROID_CONTROL_AE_STATE_CONVERGED (%d)", __FUNCTION__, m_ctlInfo.flash.m_flashCnt);
5051 m_notifyCb(CAMERA2_MSG_AUTOWB,
5052 ANDROID_CONTROL_AWB_STATE_CONVERGED,
5053 m_ctlInfo.flash.m_precaptureTriggerId, 0, m_callbackCookie);
5054 m_ctlInfo.flash.m_precaptureTriggerId = 0;
5055 }
5056 }
5057 }
5058 }
5059
5060 void ExynosCameraHWInterface2::OnAfNotification(enum aa_afstate noti)
5061 {
5062 switch (m_afMode) {
5063 case AA_AFMODE_AUTO:
5064 case AA_AFMODE_MACRO:
5065 OnAfNotificationAutoMacro(noti);
5066 break;
5067 case AA_AFMODE_CONTINUOUS_VIDEO:
5068 OnAfNotificationCAFVideo(noti);
5069 break;
5070 case AA_AFMODE_CONTINUOUS_PICTURE:
5071 OnAfNotificationCAFPicture(noti);
5072 break;
5073 case AA_AFMODE_OFF:
5074 default:
5075 break;
5076 }
5077 }
5078
5079 void ExynosCameraHWInterface2::OnAfNotificationAutoMacro(enum aa_afstate noti)
5080 {
5081 int nextState = NO_TRANSITION;
5082 bool bWrongTransition = false;
5083
5084 if (m_afState == HAL_AFSTATE_INACTIVE || m_afState == HAL_AFSTATE_NEEDS_COMMAND) {
5085 switch (noti) {
5086 case AA_AFSTATE_INACTIVE:
5087 case AA_AFSTATE_ACTIVE_SCAN:
5088 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5089 case AA_AFSTATE_AF_FAILED_FOCUS:
5090 default:
5091 nextState = NO_TRANSITION;
5092 break;
5093 }
5094 }
5095 else if (m_afState == HAL_AFSTATE_STARTED) {
5096 switch (noti) {
5097 case AA_AFSTATE_INACTIVE:
5098 nextState = NO_TRANSITION;
5099 break;
5100 case AA_AFSTATE_ACTIVE_SCAN:
5101 nextState = HAL_AFSTATE_SCANNING;
5102 SetAfStateForService(ANDROID_CONTROL_AF_STATE_ACTIVE_SCAN);
5103 break;
5104 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5105 nextState = NO_TRANSITION;
5106 break;
5107 case AA_AFSTATE_AF_FAILED_FOCUS:
5108 nextState = NO_TRANSITION;
5109 break;
5110 default:
5111 bWrongTransition = true;
5112 break;
5113 }
5114 }
5115 else if (m_afState == HAL_AFSTATE_SCANNING) {
5116 switch (noti) {
5117 case AA_AFSTATE_INACTIVE:
5118 bWrongTransition = true;
5119 break;
5120 case AA_AFSTATE_ACTIVE_SCAN:
5121 nextState = NO_TRANSITION;
5122 break;
5123 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5124 // If Flash mode is enable, after AF execute pre-capture metering
5125 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5126 switch (m_ctlInfo.flash.m_flashCnt) {
5127 case IS_FLASH_STATE_ON_DONE:
5128 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5129 nextState = NO_TRANSITION;
5130 break;
5131 case IS_FLASH_STATE_AUTO_DONE:
5132 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5133 nextState = HAL_AFSTATE_LOCKED;
5134 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5135 break;
5136 default:
5137 nextState = NO_TRANSITION;
5138 }
5139 } else {
5140 nextState = HAL_AFSTATE_LOCKED;
5141 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5142 }
5143 break;
5144 case AA_AFSTATE_AF_FAILED_FOCUS:
5145 // If Flash mode is enable, after AF execute pre-capture metering
5146 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5147 switch (m_ctlInfo.flash.m_flashCnt) {
5148 case IS_FLASH_STATE_ON_DONE:
5149 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5150 nextState = NO_TRANSITION;
5151 break;
5152 case IS_FLASH_STATE_AUTO_DONE:
5153 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5154 nextState = HAL_AFSTATE_FAILED;
5155 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5156 break;
5157 default:
5158 nextState = NO_TRANSITION;
5159 }
5160 } else {
5161 nextState = HAL_AFSTATE_FAILED;
5162 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5163 }
5164 break;
5165 default:
5166 bWrongTransition = true;
5167 break;
5168 }
5169 }
5170 else if (m_afState == HAL_AFSTATE_LOCKED) {
5171 switch (noti) {
5172 case AA_AFSTATE_INACTIVE:
5173 case AA_AFSTATE_ACTIVE_SCAN:
5174 bWrongTransition = true;
5175 break;
5176 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5177 nextState = NO_TRANSITION;
5178 break;
5179 case AA_AFSTATE_AF_FAILED_FOCUS:
5180 default:
5181 bWrongTransition = true;
5182 break;
5183 }
5184 }
5185 else if (m_afState == HAL_AFSTATE_FAILED) {
5186 switch (noti) {
5187 case AA_AFSTATE_INACTIVE:
5188 case AA_AFSTATE_ACTIVE_SCAN:
5189 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5190 bWrongTransition = true;
5191 break;
5192 case AA_AFSTATE_AF_FAILED_FOCUS:
5193 nextState = NO_TRANSITION;
5194 break;
5195 default:
5196 bWrongTransition = true;
5197 break;
5198 }
5199 }
5200 if (bWrongTransition) {
5201 ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5202 return;
5203 }
5204 ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5205 if (nextState != NO_TRANSITION)
5206 m_afState = nextState;
5207 }
5208
5209 void ExynosCameraHWInterface2::OnAfNotificationCAFPicture(enum aa_afstate noti)
5210 {
5211 int nextState = NO_TRANSITION;
5212 bool bWrongTransition = false;
5213
5214 if (m_afState == HAL_AFSTATE_INACTIVE) {
5215 switch (noti) {
5216 case AA_AFSTATE_INACTIVE:
5217 case AA_AFSTATE_ACTIVE_SCAN:
5218 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5219 case AA_AFSTATE_AF_FAILED_FOCUS:
5220 default:
5221 nextState = NO_TRANSITION;
5222 break;
5223 }
5224 // Check AF notification after triggering
5225 if (m_ctlInfo.af.m_afTriggerTimeOut > 0) {
5226 if (m_ctlInfo.af.m_afTriggerTimeOut > 5) {
5227 ALOGE("(%s) AF notification error - try to re-trigger mode (%)", __FUNCTION__, m_afMode);
5228 SetAfMode(AA_AFMODE_OFF);
5229 SetAfMode(m_afMode);
5230 m_ctlInfo.af.m_afTriggerTimeOut = 0;
5231 } else {
5232 m_ctlInfo.af.m_afTriggerTimeOut++;
5233 }
5234 }
5235 }
5236 else if (m_afState == HAL_AFSTATE_STARTED) {
5237 switch (noti) {
5238 case AA_AFSTATE_INACTIVE:
5239 nextState = NO_TRANSITION;
5240 break;
5241 case AA_AFSTATE_ACTIVE_SCAN:
5242 nextState = HAL_AFSTATE_SCANNING;
5243 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5244 m_ctlInfo.af.m_afTriggerTimeOut = 0;
5245 break;
5246 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5247 nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5248 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5249 m_ctlInfo.af.m_afTriggerTimeOut = 0;
5250 break;
5251 case AA_AFSTATE_AF_FAILED_FOCUS:
5252 //nextState = HAL_AFSTATE_FAILED;
5253 //SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5254 nextState = NO_TRANSITION;
5255 break;
5256 default:
5257 bWrongTransition = true;
5258 break;
5259 }
5260 }
5261 else if (m_afState == HAL_AFSTATE_SCANNING) {
5262 switch (noti) {
5263 case AA_AFSTATE_INACTIVE:
5264 nextState = NO_TRANSITION;
5265 break;
5266 case AA_AFSTATE_ACTIVE_SCAN:
5267 nextState = NO_TRANSITION;
5268 m_AfHwStateFailed = false;
5269 break;
5270 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5271 nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5272 m_AfHwStateFailed = false;
5273 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5274 break;
5275 case AA_AFSTATE_AF_FAILED_FOCUS:
5276 nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5277 m_AfHwStateFailed = true;
5278 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5279 break;
5280 default:
5281 bWrongTransition = true;
5282 break;
5283 }
5284 }
5285 else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5286 switch (noti) {
5287 case AA_AFSTATE_INACTIVE:
5288 nextState = NO_TRANSITION;
5289 break;
5290 case AA_AFSTATE_ACTIVE_SCAN:
5291 nextState = HAL_AFSTATE_SCANNING;
5292 m_AfHwStateFailed = false;
5293 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5294 break;
5295 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5296 nextState = NO_TRANSITION;
5297 m_AfHwStateFailed = false;
5298 break;
5299 case AA_AFSTATE_AF_FAILED_FOCUS:
5300 nextState = NO_TRANSITION;
5301 m_AfHwStateFailed = true;
5302 break;
5303 default:
5304 bWrongTransition = true;
5305 break;
5306 }
5307 }
5308 else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5309 //Skip notification in case of flash, wait the end of flash on
5310 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5311 if (m_ctlInfo.flash.m_flashCnt < IS_FLASH_STATE_ON_DONE)
5312 return;
5313 }
5314 switch (noti) {
5315 case AA_AFSTATE_INACTIVE:
5316 nextState = NO_TRANSITION;
5317 break;
5318 case AA_AFSTATE_ACTIVE_SCAN:
5319 nextState = NO_TRANSITION;
5320 break;
5321 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5322 // If Flash mode is enable, after AF execute pre-capture metering
5323 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5324 switch (m_ctlInfo.flash.m_flashCnt) {
5325 case IS_FLASH_STATE_ON_DONE:
5326 ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5327 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5328 nextState = NO_TRANSITION;
5329 break;
5330 case IS_FLASH_STATE_AUTO_DONE:
5331 ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5332 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5333 m_IsAfLockRequired = true;
5334 nextState = HAL_AFSTATE_LOCKED;
5335 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5336 break;
5337 default:
5338 nextState = NO_TRANSITION;
5339 }
5340 } else {
5341 m_IsAfLockRequired = true;
5342 nextState = HAL_AFSTATE_LOCKED;
5343 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5344 }
5345 break;
5346 case AA_AFSTATE_AF_FAILED_FOCUS:
5347 // If Flash mode is enable, after AF execute pre-capture metering
5348 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5349 switch (m_ctlInfo.flash.m_flashCnt) {
5350 case IS_FLASH_STATE_ON_DONE:
5351 ALOGV("[AF Flash] AUTO start with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5352 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_AE_AWB_LOCK;
5353 nextState = NO_TRANSITION;
5354 break;
5355 case IS_FLASH_STATE_AUTO_DONE:
5356 ALOGV("[AF Flash] AUTO end with Mode (%d) state (%d) noti (%d)", m_afMode, m_afState, (int)noti);
5357 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5358 m_IsAfLockRequired = true;
5359 nextState = HAL_AFSTATE_FAILED;
5360 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5361 break;
5362 default:
5363 nextState = NO_TRANSITION;
5364 }
5365 } else {
5366 m_IsAfLockRequired = true;
5367 nextState = HAL_AFSTATE_FAILED;
5368 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5369 }
5370 break;
5371 default:
5372 bWrongTransition = true;
5373 break;
5374 }
5375 }
5376 else if (m_afState == HAL_AFSTATE_LOCKED) {
5377 switch (noti) {
5378 case AA_AFSTATE_INACTIVE:
5379 nextState = NO_TRANSITION;
5380 break;
5381 case AA_AFSTATE_ACTIVE_SCAN:
5382 bWrongTransition = true;
5383 break;
5384 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5385 nextState = NO_TRANSITION;
5386 break;
5387 case AA_AFSTATE_AF_FAILED_FOCUS:
5388 default:
5389 bWrongTransition = true;
5390 break;
5391 }
5392 }
5393 else if (m_afState == HAL_AFSTATE_FAILED) {
5394 switch (noti) {
5395 case AA_AFSTATE_INACTIVE:
5396 bWrongTransition = true;
5397 break;
5398 case AA_AFSTATE_ACTIVE_SCAN:
5399 nextState = HAL_AFSTATE_SCANNING;
5400 break;
5401 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5402 bWrongTransition = true;
5403 break;
5404 case AA_AFSTATE_AF_FAILED_FOCUS:
5405 nextState = NO_TRANSITION;
5406 break;
5407 default:
5408 bWrongTransition = true;
5409 break;
5410 }
5411 }
5412 if (bWrongTransition) {
5413 ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5414 return;
5415 }
5416 ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5417 if (nextState != NO_TRANSITION)
5418 m_afState = nextState;
5419 }
5420
5421 void ExynosCameraHWInterface2::OnAfNotificationCAFVideo(enum aa_afstate noti)
5422 {
5423 int nextState = NO_TRANSITION;
5424 bool bWrongTransition = false;
5425
5426 if (m_afState == HAL_AFSTATE_INACTIVE) {
5427 switch (noti) {
5428 case AA_AFSTATE_INACTIVE:
5429 case AA_AFSTATE_ACTIVE_SCAN:
5430 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5431 case AA_AFSTATE_AF_FAILED_FOCUS:
5432 default:
5433 nextState = NO_TRANSITION;
5434 break;
5435 }
5436 }
5437 else if (m_afState == HAL_AFSTATE_STARTED) {
5438 switch (noti) {
5439 case AA_AFSTATE_INACTIVE:
5440 nextState = NO_TRANSITION;
5441 break;
5442 case AA_AFSTATE_ACTIVE_SCAN:
5443 nextState = HAL_AFSTATE_SCANNING;
5444 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5445 break;
5446 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5447 nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5448 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5449 break;
5450 case AA_AFSTATE_AF_FAILED_FOCUS:
5451 nextState = HAL_AFSTATE_FAILED;
5452 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5453 break;
5454 default:
5455 bWrongTransition = true;
5456 break;
5457 }
5458 }
5459 else if (m_afState == HAL_AFSTATE_SCANNING) {
5460 switch (noti) {
5461 case AA_AFSTATE_INACTIVE:
5462 bWrongTransition = true;
5463 break;
5464 case AA_AFSTATE_ACTIVE_SCAN:
5465 nextState = NO_TRANSITION;
5466 break;
5467 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5468 nextState = HAL_AFSTATE_PASSIVE_FOCUSED;
5469 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_FOCUSED);
5470 break;
5471 case AA_AFSTATE_AF_FAILED_FOCUS:
5472 nextState = NO_TRANSITION;
5473 break;
5474 default:
5475 bWrongTransition = true;
5476 break;
5477 }
5478 }
5479 else if (m_afState == HAL_AFSTATE_PASSIVE_FOCUSED) {
5480 switch (noti) {
5481 case AA_AFSTATE_INACTIVE:
5482 bWrongTransition = true;
5483 break;
5484 case AA_AFSTATE_ACTIVE_SCAN:
5485 nextState = HAL_AFSTATE_SCANNING;
5486 SetAfStateForService(ANDROID_CONTROL_AF_STATE_PASSIVE_SCAN);
5487 break;
5488 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5489 nextState = NO_TRANSITION;
5490 break;
5491 case AA_AFSTATE_AF_FAILED_FOCUS:
5492 nextState = HAL_AFSTATE_FAILED;
5493 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5494 // TODO : needs NO_TRANSITION ?
5495 break;
5496 default:
5497 bWrongTransition = true;
5498 break;
5499 }
5500 }
5501 else if (m_afState == HAL_AFSTATE_NEEDS_DETERMINATION) {
5502 switch (noti) {
5503 case AA_AFSTATE_INACTIVE:
5504 bWrongTransition = true;
5505 break;
5506 case AA_AFSTATE_ACTIVE_SCAN:
5507 nextState = NO_TRANSITION;
5508 break;
5509 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5510 m_IsAfLockRequired = true;
5511 nextState = HAL_AFSTATE_LOCKED;
5512 SetAfStateForService(ANDROID_CONTROL_AF_STATE_FOCUSED_LOCKED);
5513 break;
5514 case AA_AFSTATE_AF_FAILED_FOCUS:
5515 nextState = HAL_AFSTATE_FAILED;
5516 SetAfStateForService(ANDROID_CONTROL_AF_STATE_NOT_FOCUSED_LOCKED);
5517 break;
5518 default:
5519 bWrongTransition = true;
5520 break;
5521 }
5522 }
5523 else if (m_afState == HAL_AFSTATE_LOCKED) {
5524 switch (noti) {
5525 case AA_AFSTATE_INACTIVE:
5526 nextState = NO_TRANSITION;
5527 break;
5528 case AA_AFSTATE_ACTIVE_SCAN:
5529 bWrongTransition = true;
5530 break;
5531 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5532 nextState = NO_TRANSITION;
5533 break;
5534 case AA_AFSTATE_AF_FAILED_FOCUS:
5535 default:
5536 bWrongTransition = true;
5537 break;
5538 }
5539 }
5540 else if (m_afState == HAL_AFSTATE_FAILED) {
5541 switch (noti) {
5542 case AA_AFSTATE_INACTIVE:
5543 case AA_AFSTATE_ACTIVE_SCAN:
5544 case AA_AFSTATE_AF_ACQUIRED_FOCUS:
5545 bWrongTransition = true;
5546 break;
5547 case AA_AFSTATE_AF_FAILED_FOCUS:
5548 nextState = NO_TRANSITION;
5549 break;
5550 default:
5551 bWrongTransition = true;
5552 break;
5553 }
5554 }
5555 if (bWrongTransition) {
5556 ALOGV("(%s): Wrong Transition state(%d) noti(%d)", __FUNCTION__, m_afState, noti);
5557 return;
5558 }
5559 ALOGV("(%s): State (%d) -> (%d) by (%d)", __FUNCTION__, m_afState, nextState, noti);
5560 if (nextState != NO_TRANSITION)
5561 m_afState = nextState;
5562 }
5563
5564 void ExynosCameraHWInterface2::OnAfCancel(int id)
5565 {
5566 m_afTriggerId = id;
5567
5568 switch (m_afMode) {
5569 case AA_AFMODE_AUTO:
5570 case AA_AFMODE_MACRO:
5571 case AA_AFMODE_OFF:
5572 case AA_AFMODE_MANUAL:
5573 OnAfCancelAutoMacro(id);
5574 break;
5575 case AA_AFMODE_CONTINUOUS_VIDEO:
5576 OnAfCancelCAFVideo(id);
5577 break;
5578 case AA_AFMODE_CONTINUOUS_PICTURE:
5579 OnAfCancelCAFPicture(id);
5580 break;
5581 default:
5582 break;
5583 }
5584 }
5585
5586 void ExynosCameraHWInterface2::OnAfCancelAutoMacro(int /*id*/)
5587 {
5588 int nextState = NO_TRANSITION;
5589
5590 if (m_ctlInfo.flash.m_flashEnableFlg && m_ctlInfo.flash.m_afFlashDoneFlg) {
5591 m_ctlInfo.flash.m_flashCnt = IS_FLASH_STATE_AUTO_OFF;
5592 }
5593 switch (m_afState) {
5594 case HAL_AFSTATE_INACTIVE:
5595 nextState = NO_TRANSITION;
5596 SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5597 break;
5598 case HAL_AFSTATE_NEEDS_COMMAND:
5599 case HAL_AFSTATE_STARTED:
5600 case HAL_AFSTATE_SCANNING:
5601 case HAL_AFSTATE_LOCKED:
5602 case HAL_AFSTATE_FAILED:
5603 SetAfMode(AA_AFMODE_OFF);
5604 SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5605 nextState = HAL_AFSTATE_INACTIVE;
5606 break;
5607 default:
5608 break;
5609 }
5610 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5611 if (nextState != NO_TRANSITION)
5612 m_afState = nextState;
5613 }
5614
5615 void ExynosCameraHWInterface2::OnAfCancelCAFPicture(int /*id*/)
5616 {
5617 int nextState = NO_TRANSITION;
5618
5619 switch (m_afState) {
5620 case HAL_AFSTATE_INACTIVE:
5621 nextState = NO_TRANSITION;
5622 break;
5623 case HAL_AFSTATE_NEEDS_COMMAND:
5624 case HAL_AFSTATE_STARTED:
5625 case HAL_AFSTATE_SCANNING:
5626 case HAL_AFSTATE_LOCKED:
5627 case HAL_AFSTATE_FAILED:
5628 case HAL_AFSTATE_NEEDS_DETERMINATION:
5629 case HAL_AFSTATE_PASSIVE_FOCUSED:
5630 SetAfMode(AA_AFMODE_OFF);
5631 SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5632 SetAfMode(AA_AFMODE_CONTINUOUS_PICTURE);
5633 nextState = HAL_AFSTATE_INACTIVE;
5634 break;
5635 default:
5636 break;
5637 }
5638 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5639 if (nextState != NO_TRANSITION)
5640 m_afState = nextState;
5641 }
5642
5643 void ExynosCameraHWInterface2::OnAfCancelCAFVideo(int /*id*/)
5644 {
5645 int nextState = NO_TRANSITION;
5646
5647 switch (m_afState) {
5648 case HAL_AFSTATE_INACTIVE:
5649 nextState = NO_TRANSITION;
5650 break;
5651 case HAL_AFSTATE_NEEDS_COMMAND:
5652 case HAL_AFSTATE_STARTED:
5653 case HAL_AFSTATE_SCANNING:
5654 case HAL_AFSTATE_LOCKED:
5655 case HAL_AFSTATE_FAILED:
5656 case HAL_AFSTATE_NEEDS_DETERMINATION:
5657 case HAL_AFSTATE_PASSIVE_FOCUSED:
5658 SetAfMode(AA_AFMODE_OFF);
5659 SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5660 SetAfMode(AA_AFMODE_CONTINUOUS_VIDEO);
5661 nextState = HAL_AFSTATE_INACTIVE;
5662 break;
5663 default:
5664 break;
5665 }
5666 ALOGV("(%s): State (%d) -> (%d)", __FUNCTION__, m_afState, nextState);
5667 if (nextState != NO_TRANSITION)
5668 m_afState = nextState;
5669 }
5670
5671 void ExynosCameraHWInterface2::SetAfStateForService(int newState)
5672 {
5673 if (m_serviceAfState != newState || newState == 0)
5674 m_notifyCb(CAMERA2_MSG_AUTOFOCUS, newState, m_afTriggerId, 0, m_callbackCookie);
5675 m_serviceAfState = newState;
5676 }
5677
5678 int ExynosCameraHWInterface2::GetAfStateForService()
5679 {
5680 return m_serviceAfState;
5681 }
5682
5683 void ExynosCameraHWInterface2::SetAfMode(enum aa_afmode afMode)
5684 {
5685 if (m_afMode != afMode) {
5686 if (m_IsAfModeUpdateRequired && m_afMode != AA_AFMODE_OFF) {
5687 m_afMode2 = afMode;
5688 ALOGV("(%s): pending(%d) and new(%d)", __FUNCTION__, m_afMode, afMode);
5689 }
5690 else {
5691 ALOGV("(%s): current(%d) new(%d)", __FUNCTION__, m_afMode, afMode);
5692 m_IsAfModeUpdateRequired = true;
5693 m_afMode = afMode;
5694 SetAfStateForService(ANDROID_CONTROL_AF_STATE_INACTIVE);
5695 m_afState = HAL_AFSTATE_INACTIVE;
5696 }
5697 }
5698 }
5699
5700 void ExynosCameraHWInterface2::m_setExifFixedAttribute(void)
5701 {
5702 char property[PROPERTY_VALUE_MAX];
5703
5704 //2 0th IFD TIFF Tags
5705 //3 Maker
5706 property_get("ro.product.brand", property, EXIF_DEF_MAKER);
5707 strncpy((char *)mExifInfo.maker, property,
5708 sizeof(mExifInfo.maker) - 1);
5709 mExifInfo.maker[sizeof(mExifInfo.maker) - 1] = '\0';
5710 //3 Model
5711 property_get("ro.product.model", property, EXIF_DEF_MODEL);
5712 strncpy((char *)mExifInfo.model, property,
5713 sizeof(mExifInfo.model) - 1);
5714 mExifInfo.model[sizeof(mExifInfo.model) - 1] = '\0';
5715 //3 Software
5716 property_get("ro.build.id", property, EXIF_DEF_SOFTWARE);
5717 strncpy((char *)mExifInfo.software, property,
5718 sizeof(mExifInfo.software) - 1);
5719 mExifInfo.software[sizeof(mExifInfo.software) - 1] = '\0';
5720
5721 //3 YCbCr Positioning
5722 mExifInfo.ycbcr_positioning = EXIF_DEF_YCBCR_POSITIONING;
5723
5724 //2 0th IFD Exif Private Tags
5725 //3 F Number
5726 mExifInfo.fnumber.num = (uint32_t)(m_camera2->m_curCameraInfo->fnumber * EXIF_DEF_FNUMBER_DEN);
5727 mExifInfo.fnumber.den = EXIF_DEF_FNUMBER_DEN;
5728 //3 Exposure Program
5729 mExifInfo.exposure_program = EXIF_DEF_EXPOSURE_PROGRAM;
5730 //3 Exif Version
5731 memcpy(mExifInfo.exif_version, EXIF_DEF_EXIF_VERSION, sizeof(mExifInfo.exif_version));
5732 //3 Aperture
5733 double av = APEX_FNUM_TO_APERTURE((double)mExifInfo.fnumber.num/mExifInfo.fnumber.den);
5734 mExifInfo.aperture.num = (uint32_t)(av*EXIF_DEF_APEX_DEN);
5735 mExifInfo.aperture.den = EXIF_DEF_APEX_DEN;
5736 //3 Maximum lens aperture
5737 mExifInfo.max_aperture.num = mExifInfo.aperture.num;
5738 mExifInfo.max_aperture.den = mExifInfo.aperture.den;
5739 //3 Lens Focal Length
5740 mExifInfo.focal_length.num = (uint32_t)(m_camera2->m_curCameraInfo->focalLength * 100);
5741
5742 mExifInfo.focal_length.den = EXIF_DEF_FOCAL_LEN_DEN;
5743 //3 User Comments
5744 strcpy((char *)mExifInfo.user_comment, EXIF_DEF_USERCOMMENTS);
5745 //3 Color Space information
5746 mExifInfo.color_space = EXIF_DEF_COLOR_SPACE;
5747 //3 Exposure Mode
5748 mExifInfo.exposure_mode = EXIF_DEF_EXPOSURE_MODE;
5749
5750 //2 0th IFD GPS Info Tags
5751 unsigned char gps_version[4] = { 0x02, 0x02, 0x00, 0x00 };
5752 memcpy(mExifInfo.gps_version_id, gps_version, sizeof(gps_version));
5753
5754 //2 1th IFD TIFF Tags
5755 mExifInfo.compression_scheme = EXIF_DEF_COMPRESSION;
5756 mExifInfo.x_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5757 mExifInfo.x_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5758 mExifInfo.y_resolution.num = EXIF_DEF_RESOLUTION_NUM;
5759 mExifInfo.y_resolution.den = EXIF_DEF_RESOLUTION_DEN;
5760 mExifInfo.resolution_unit = EXIF_DEF_RESOLUTION_UNIT;
5761 }
5762
5763 void ExynosCameraHWInterface2::m_setExifChangedAttribute(exif_attribute_t *exifInfo, ExynosRect *rect,
5764 camera2_shot_ext *currentEntry)
5765 {
5766 camera2_dm *dm = &(currentEntry->shot.dm);
5767 camera2_ctl *ctl = &(currentEntry->shot.ctl);
5768
5769 ALOGV("(%s): framecnt(%d) exp(%lld) iso(%d)", __FUNCTION__, ctl->request.frameCount, dm->sensor.exposureTime,dm->aa.isoValue );
5770 if (!ctl->request.frameCount)
5771 return;
5772 //2 0th IFD TIFF Tags
5773 //3 Width
5774 exifInfo->width = rect->w;
5775 //3 Height
5776 exifInfo->height = rect->h;
5777 //3 Orientation
5778 switch (ctl->jpeg.orientation) {
5779 case 90:
5780 exifInfo->orientation = EXIF_ORIENTATION_90;
5781 break;
5782 case 180:
5783 exifInfo->orientation = EXIF_ORIENTATION_180;
5784 break;
5785 case 270:
5786 exifInfo->orientation = EXIF_ORIENTATION_270;
5787 break;
5788 case 0:
5789 default:
5790 exifInfo->orientation = EXIF_ORIENTATION_UP;
5791 break;
5792 }
5793
5794 //3 Date time
5795 time_t rawtime;
5796 struct tm *timeinfo;
5797 time(&rawtime);
5798 timeinfo = localtime(&rawtime);
5799 strftime((char *)exifInfo->date_time, 20, "%Y:%m:%d %H:%M:%S", timeinfo);
5800
5801 //2 0th IFD Exif Private Tags
5802 //3 Exposure Time
5803 int shutterSpeed = (dm->sensor.exposureTime/1000);
5804
5805 // To display exposure time just above 500ms as 1/2sec, not 1 sec.
5806 if (shutterSpeed > 500000)
5807 shutterSpeed -= 100000;
5808
5809 if (shutterSpeed < 0) {
5810 shutterSpeed = 100;
5811 }
5812
5813 exifInfo->exposure_time.num = 1;
5814 // x us -> 1/x s */
5815 //exifInfo->exposure_time.den = (uint32_t)(1000000 / shutterSpeed);
5816 exifInfo->exposure_time.den = (uint32_t)((double)1000000 / shutterSpeed);
5817
5818 //3 ISO Speed Rating
5819 exifInfo->iso_speed_rating = dm->aa.isoValue;
5820
5821 uint32_t av, tv, bv, sv, ev;
5822 av = APEX_FNUM_TO_APERTURE((double)exifInfo->fnumber.num / exifInfo->fnumber.den);
5823 tv = APEX_EXPOSURE_TO_SHUTTER((double)exifInfo->exposure_time.num / exifInfo->exposure_time.den);
5824 sv = APEX_ISO_TO_FILMSENSITIVITY(exifInfo->iso_speed_rating);
5825 bv = av + tv - sv;
5826 ev = av + tv;
5827 //ALOGD("Shutter speed=%d us, iso=%d", shutterSpeed, exifInfo->iso_speed_rating);
5828 ALOGV("AV=%d, TV=%d, SV=%d", av, tv, sv);
5829
5830 //3 Shutter Speed
5831 exifInfo->shutter_speed.num = tv * EXIF_DEF_APEX_DEN;
5832 exifInfo->shutter_speed.den = EXIF_DEF_APEX_DEN;
5833 //3 Brightness
5834 exifInfo->brightness.num = bv*EXIF_DEF_APEX_DEN;
5835 exifInfo->brightness.den = EXIF_DEF_APEX_DEN;
5836 //3 Exposure Bias
5837 if (ctl->aa.sceneMode== AA_SCENE_MODE_BEACH||
5838 ctl->aa.sceneMode== AA_SCENE_MODE_SNOW) {
5839 exifInfo->exposure_bias.num = EXIF_DEF_APEX_DEN;
5840 exifInfo->exposure_bias.den = EXIF_DEF_APEX_DEN;
5841 } else {
5842 exifInfo->exposure_bias.num = 0;
5843 exifInfo->exposure_bias.den = 0;
5844 }
5845 //3 Metering Mode
5846 /*switch (m_curCameraInfo->metering) {
5847 case METERING_MODE_CENTER:
5848 exifInfo->metering_mode = EXIF_METERING_CENTER;
5849 break;
5850 case METERING_MODE_MATRIX:
5851 exifInfo->metering_mode = EXIF_METERING_MULTISPOT;
5852 break;
5853 case METERING_MODE_SPOT:
5854 exifInfo->metering_mode = EXIF_METERING_SPOT;
5855 break;
5856 case METERING_MODE_AVERAGE:
5857 default:
5858 exifInfo->metering_mode = EXIF_METERING_AVERAGE;
5859 break;
5860 }*/
5861 exifInfo->metering_mode = EXIF_METERING_CENTER;
5862
5863 //3 Flash
5864 if (m_ctlInfo.flash.m_flashDecisionResult)
5865 exifInfo->flash = 1;
5866 else
5867 exifInfo->flash = EXIF_DEF_FLASH;
5868
5869 //3 White Balance
5870 if (currentEntry->awb_mode_dm == AA_AWBMODE_WB_AUTO)
5871 exifInfo->white_balance = EXIF_WB_AUTO;
5872 else
5873 exifInfo->white_balance = EXIF_WB_MANUAL;
5874
5875 //3 Scene Capture Type
5876 switch (ctl->aa.sceneMode) {
5877 case AA_SCENE_MODE_PORTRAIT:
5878 exifInfo->scene_capture_type = EXIF_SCENE_PORTRAIT;
5879 break;
5880 case AA_SCENE_MODE_LANDSCAPE:
5881 exifInfo->scene_capture_type = EXIF_SCENE_LANDSCAPE;
5882 break;
5883 case AA_SCENE_MODE_NIGHT_PORTRAIT:
5884 exifInfo->scene_capture_type = EXIF_SCENE_NIGHT;
5885 break;
5886 default:
5887 exifInfo->scene_capture_type = EXIF_SCENE_STANDARD;
5888 break;
5889 }
5890
5891 //2 0th IFD GPS Info Tags
5892 if (ctl->jpeg.gpsCoordinates[0] != 0 && ctl->jpeg.gpsCoordinates[1] != 0) {
5893
5894 if (ctl->jpeg.gpsCoordinates[0] > 0)
5895 strcpy((char *)exifInfo->gps_latitude_ref, "N");
5896 else
5897 strcpy((char *)exifInfo->gps_latitude_ref, "S");
5898
5899 if (ctl->jpeg.gpsCoordinates[1] > 0)
5900 strcpy((char *)exifInfo->gps_longitude_ref, "E");
5901 else
5902 strcpy((char *)exifInfo->gps_longitude_ref, "W");
5903
5904 if (ctl->jpeg.gpsCoordinates[2] > 0)
5905 exifInfo->gps_altitude_ref = 0;
5906 else
5907 exifInfo->gps_altitude_ref = 1;
5908
5909 double latitude = fabs(ctl->jpeg.gpsCoordinates[0]);
5910 double longitude = fabs(ctl->jpeg.gpsCoordinates[1]);
5911 double altitude = fabs(ctl->jpeg.gpsCoordinates[2]);
5912
5913 exifInfo->gps_latitude[0].num = (uint32_t)latitude;
5914 exifInfo->gps_latitude[0].den = 1;
5915 exifInfo->gps_latitude[1].num = (uint32_t)((latitude - exifInfo->gps_latitude[0].num) * 60);
5916 exifInfo->gps_latitude[1].den = 1;
5917 exifInfo->gps_latitude[2].num = (uint32_t)round((((latitude - exifInfo->gps_latitude[0].num) * 60)
5918 - exifInfo->gps_latitude[1].num) * 60);
5919 exifInfo->gps_latitude[2].den = 1;
5920
5921 exifInfo->gps_longitude[0].num = (uint32_t)longitude;
5922 exifInfo->gps_longitude[0].den = 1;
5923 exifInfo->gps_longitude[1].num = (uint32_t)((longitude - exifInfo->gps_longitude[0].num) * 60);
5924 exifInfo->gps_longitude[1].den = 1;
5925 exifInfo->gps_longitude[2].num = (uint32_t)round((((longitude - exifInfo->gps_longitude[0].num) * 60)
5926 - exifInfo->gps_longitude[1].num) * 60);
5927 exifInfo->gps_longitude[2].den = 1;
5928
5929 exifInfo->gps_altitude.num = (uint32_t)round(altitude);
5930 exifInfo->gps_altitude.den = 1;
5931
5932 struct tm tm_data;
5933 long timestamp;
5934 timestamp = (long)ctl->jpeg.gpsTimestamp;
5935 gmtime_r(&timestamp, &tm_data);
5936 exifInfo->gps_timestamp[0].num = tm_data.tm_hour;
5937 exifInfo->gps_timestamp[0].den = 1;
5938 exifInfo->gps_timestamp[1].num = tm_data.tm_min;
5939 exifInfo->gps_timestamp[1].den = 1;
5940 exifInfo->gps_timestamp[2].num = tm_data.tm_sec;
5941 exifInfo->gps_timestamp[2].den = 1;
5942 snprintf((char*)exifInfo->gps_datestamp, sizeof(exifInfo->gps_datestamp),
5943 "%04d:%02d:%02d", tm_data.tm_year + 1900, tm_data.tm_mon + 1, tm_data.tm_mday);
5944
5945 memset(exifInfo->gps_processing_method, 0, 100);
5946 memcpy(exifInfo->gps_processing_method, currentEntry->gpsProcessingMethod, 32);
5947 exifInfo->enableGps = true;
5948 } else {
5949 exifInfo->enableGps = false;
5950 }
5951
5952 //2 1th IFD TIFF Tags
5953 exifInfo->widthThumb = ctl->jpeg.thumbnailSize[0];
5954 exifInfo->heightThumb = ctl->jpeg.thumbnailSize[1];
5955 }
5956
5957 ExynosCameraHWInterface2::MainThread::~MainThread()
5958 {
5959 ALOGV("(%s):", __FUNCTION__);
5960 }
5961
5962 void ExynosCameraHWInterface2::MainThread::release()
5963 {
5964 ALOGV("(%s):", __func__);
5965 SetSignal(SIGNAL_THREAD_RELEASE);
5966 }
5967
5968 ExynosCameraHWInterface2::SensorThread::~SensorThread()
5969 {
5970 ALOGV("(%s):", __FUNCTION__);
5971 }
5972
5973 void ExynosCameraHWInterface2::SensorThread::release()
5974 {
5975 ALOGV("(%s):", __func__);
5976 SetSignal(SIGNAL_THREAD_RELEASE);
5977 }
5978
5979 ExynosCameraHWInterface2::StreamThread::~StreamThread()
5980 {
5981 ALOGV("(%s):", __FUNCTION__);
5982 }
5983
5984 void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
5985 {
5986 ALOGV("DEBUG(%s):", __FUNCTION__);
5987 memcpy(&m_parameters, new_parameters, sizeof(stream_parameters_t));
5988 }
5989
5990 void ExynosCameraHWInterface2::StreamThread::release()
5991 {
5992 ALOGV("(%s):", __func__);
5993 SetSignal(SIGNAL_THREAD_RELEASE);
5994 }
5995
5996 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
5997 {
5998 int index;
5999 for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
6000 if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
6001 return index;
6002 }
6003 return -1;
6004 }
6005
6006 int ExynosCameraHWInterface2::StreamThread::findBufferIndex(buffer_handle_t * bufHandle)
6007 {
6008 int index;
6009 for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
6010 if (m_parameters.svcBufHandle[index] == *bufHandle)
6011 return index;
6012 }
6013 return -1;
6014 }
6015
6016 status_t ExynosCameraHWInterface2::StreamThread::attachSubStream(int stream_id, int priority)
6017 {
6018 ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
6019 int index, vacantIndex;
6020 bool vacancy = false;
6021
6022 for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
6023 if (!vacancy && m_attachedSubStreams[index].streamId == -1) {
6024 vacancy = true;
6025 vacantIndex = index;
6026 } else if (m_attachedSubStreams[index].streamId == stream_id) {
6027 return BAD_VALUE;
6028 }
6029 }
6030 if (!vacancy)
6031 return NO_MEMORY;
6032 m_attachedSubStreams[vacantIndex].streamId = stream_id;
6033 m_attachedSubStreams[vacantIndex].priority = priority;
6034 m_numRegisteredStream++;
6035 return NO_ERROR;
6036 }
6037
6038 status_t ExynosCameraHWInterface2::StreamThread::detachSubStream(int stream_id)
6039 {
6040 ALOGV("(%s): substream_id(%d)", __FUNCTION__, stream_id);
6041 int index;
6042 bool found = false;
6043
6044 for (index = 0 ; index < NUM_MAX_SUBSTREAM ; index++) {
6045 if (m_attachedSubStreams[index].streamId == stream_id) {
6046 found = true;
6047 break;
6048 }
6049 }
6050 if (!found)
6051 return BAD_VALUE;
6052 m_attachedSubStreams[index].streamId = -1;
6053 m_attachedSubStreams[index].priority = 0;
6054 m_numRegisteredStream--;
6055 return NO_ERROR;
6056 }
6057
6058 int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
6059 {
6060 if (ionClient == 0) {
6061 ionClient = ion_client_create();
6062 if (ionClient < 0) {
6063 ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
6064 return 0;
6065 }
6066 }
6067 return ionClient;
6068 }
6069
6070 int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
6071 {
6072 if (ionClient != 0) {
6073 if (ionClient > 0) {
6074 ion_client_destroy(ionClient);
6075 }
6076 ionClient = 0;
6077 }
6078 return ionClient;
6079 }
6080
6081 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
6082 {
6083 return allocCameraMemory(ionClient, buf, iMemoryNum, 0);
6084 }
6085
6086 int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum, int cacheFlag)
6087 {
6088 int ret = 0;
6089 int i = 0;
6090 int flag = 0;
6091
6092 if (ionClient == 0) {
6093 ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
6094 return -1;
6095 }
6096
6097 for (i = 0 ; i < iMemoryNum ; i++) {
6098 if (buf->size.extS[i] == 0) {
6099 break;
6100 }
6101 if (1 << i & cacheFlag)
6102 flag = ION_FLAG_CACHED | ION_FLAG_CACHED_NEEDS_SYNC;
6103 else
6104 flag = 0;
6105 buf->fd.extFd[i] = ion_alloc(ionClient, \
6106 buf->size.extS[i], 0, ION_HEAP_SYSTEM_MASK, flag);
6107 if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
6108 ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
6109 buf->fd.extFd[i] = -1;
6110 freeCameraMemory(buf, iMemoryNum);
6111 return -1;
6112 }
6113
6114 buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
6115 buf->size.extS[i], 0);
6116 if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
6117 ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
6118 buf->virt.extP[i] = (char *)MAP_FAILED;
6119 freeCameraMemory(buf, iMemoryNum);
6120 return -1;
6121 }
6122 ALOGV("allocCameraMem : [%d][0x%08x] size(%d) flag(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i], flag);
6123 }
6124
6125 return ret;
6126 }
6127
6128 void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6129 {
6130
6131 int i = 0 ;
6132 int ret = 0;
6133
6134 for (i=0;i<iMemoryNum;i++) {
6135 if (buf->fd.extFd[i] != -1) {
6136 if (buf->virt.extP[i] != (char *)MAP_FAILED) {
6137 ret = ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
6138 if (ret < 0)
6139 ALOGE("ERR(%s)", __FUNCTION__);
6140 }
6141 ion_free(buf->fd.extFd[i]);
6142 ALOGV("freeCameraMemory : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
6143 }
6144 buf->fd.extFd[i] = -1;
6145 buf->virt.extP[i] = (char *)MAP_FAILED;
6146 buf->size.extS[i] = 0;
6147 }
6148 }
6149
6150 void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
6151 {
6152 int i =0 ;
6153 for (i=0;i<iMemoryNum;i++) {
6154 buf->virt.extP[i] = (char *)MAP_FAILED;
6155 buf->fd.extFd[i] = -1;
6156 buf->size.extS[i] = 0;
6157 }
6158 }
6159
6160
6161
6162
6163 static camera2_device_t *g_cam2_device = NULL;
6164 static bool g_camera_vaild = false;
6165 static Mutex g_camera_mutex;
6166 ExynosCamera2 * g_camera2[2] = { NULL, NULL };
6167
6168 static int HAL2_camera_device_close(struct hw_device_t* device)
6169 {
6170 Mutex::Autolock lock(g_camera_mutex);
6171 ALOGD("(%s): ENTER", __FUNCTION__);
6172 if (device) {
6173
6174 camera2_device_t *cam_device = (camera2_device_t *)device;
6175 ALOGV("cam_device(0x%08x):", (unsigned int)cam_device);
6176 ALOGV("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
6177 delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
6178 free(cam_device);
6179 g_camera_vaild = false;
6180 g_cam2_device = NULL;
6181 }
6182
6183 ALOGD("(%s): EXIT", __FUNCTION__);
6184 return 0;
6185 }
6186
6187 static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
6188 {
6189 return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
6190 }
6191
6192 static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
6193 const camera2_request_queue_src_ops_t *request_src_ops)
6194 {
6195 ALOGV("DEBUG(%s):", __FUNCTION__);
6196 return obj(dev)->setRequestQueueSrcOps(request_src_ops);
6197 }
6198
6199 static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
6200 {
6201 ALOGV("DEBUG(%s):", __FUNCTION__);
6202 return obj(dev)->notifyRequestQueueNotEmpty();
6203 }
6204
6205 static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
6206 const camera2_frame_queue_dst_ops_t *frame_dst_ops)
6207 {
6208 ALOGV("DEBUG(%s):", __FUNCTION__);
6209 return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
6210 }
6211
6212 static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
6213 {
6214 ALOGV("DEBUG(%s):", __FUNCTION__);
6215 return obj(dev)->getInProgressCount();
6216 }
6217
6218 static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
6219 {
6220 ALOGV("DEBUG(%s):", __FUNCTION__);
6221 return obj(dev)->flushCapturesInProgress();
6222 }
6223
6224 static int HAL2_device_construct_default_request(const struct camera2_device *dev,
6225 int request_template, camera_metadata_t **request)
6226 {
6227 ALOGV("DEBUG(%s):", __FUNCTION__);
6228 return obj(dev)->constructDefaultRequest(request_template, request);
6229 }
6230
6231 static int HAL2_device_allocate_stream(
6232 const struct camera2_device *dev,
6233 // inputs
6234 uint32_t width,
6235 uint32_t height,
6236 int format,
6237 const camera2_stream_ops_t *stream_ops,
6238 // outputs
6239 uint32_t *stream_id,
6240 uint32_t *format_actual,
6241 uint32_t *usage,
6242 uint32_t *max_buffers)
6243 {
6244 ALOGV("(%s): ", __FUNCTION__);
6245 return obj(dev)->allocateStream(width, height, format, stream_ops,
6246 stream_id, format_actual, usage, max_buffers);
6247 }
6248
6249 static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
6250 uint32_t stream_id,
6251 int num_buffers,
6252 buffer_handle_t *buffers)
6253 {
6254 ALOGV("DEBUG(%s):", __FUNCTION__);
6255 return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
6256 }
6257
6258 static int HAL2_device_release_stream(
6259 const struct camera2_device *dev,
6260 uint32_t stream_id)
6261 {
6262 ALOGV("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
6263 if (!g_camera_vaild)
6264 return 0;
6265 return obj(dev)->releaseStream(stream_id);
6266 }
6267
6268 static int HAL2_device_allocate_reprocess_stream(
6269 const struct camera2_device *dev,
6270 uint32_t width,
6271 uint32_t height,
6272 uint32_t format,
6273 const camera2_stream_in_ops_t *reprocess_stream_ops,
6274 // outputs
6275 uint32_t *stream_id,
6276 uint32_t *consumer_usage,
6277 uint32_t *max_buffers)
6278 {
6279 ALOGV("DEBUG(%s):", __FUNCTION__);
6280 return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
6281 stream_id, consumer_usage, max_buffers);
6282 }
6283
6284 static int HAL2_device_allocate_reprocess_stream_from_stream(
6285 const struct camera2_device *dev,
6286 uint32_t output_stream_id,
6287 const camera2_stream_in_ops_t *reprocess_stream_ops,
6288 // outputs
6289 uint32_t *stream_id)
6290 {
6291 ALOGV("DEBUG(%s):", __FUNCTION__);
6292 return obj(dev)->allocateReprocessStreamFromStream(output_stream_id,
6293 reprocess_stream_ops, stream_id);
6294 }
6295
6296 static int HAL2_device_release_reprocess_stream(
6297 const struct camera2_device *dev,
6298 uint32_t stream_id)
6299 {
6300 ALOGV("DEBUG(%s):", __FUNCTION__);
6301 return obj(dev)->releaseReprocessStream(stream_id);
6302 }
6303
6304 static int HAL2_device_trigger_action(const struct camera2_device *dev,
6305 uint32_t trigger_id,
6306 int ext1,
6307 int ext2)
6308 {
6309 ALOGV("DEBUG(%s):", __FUNCTION__);
6310 if (!g_camera_vaild)
6311 return 0;
6312 return obj(dev)->triggerAction(trigger_id, ext1, ext2);
6313 }
6314
6315 static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
6316 camera2_notify_callback notify_cb,
6317 void *user)
6318 {
6319 ALOGV("DEBUG(%s):", __FUNCTION__);
6320 return obj(dev)->setNotifyCallback(notify_cb, user);
6321 }
6322
6323 static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
6324 vendor_tag_query_ops_t **ops)
6325 {
6326 ALOGV("DEBUG(%s):", __FUNCTION__);
6327 return obj(dev)->getMetadataVendorTagOps(ops);
6328 }
6329
6330 static int HAL2_device_dump(const struct camera2_device *dev, int fd)
6331 {
6332 ALOGV("DEBUG(%s):", __FUNCTION__);
6333 return obj(dev)->dump(fd);
6334 }
6335
6336
6337
6338
6339
6340 static int HAL2_getNumberOfCameras()
6341 {
6342 ALOGV("(%s): returning 2", __FUNCTION__);
6343 return 2;
6344 }
6345
6346
6347 static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
6348 {
6349 ALOGV("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
6350 static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
6351
6352 status_t res;
6353
6354 if (cameraId == 0) {
6355 info->facing = CAMERA_FACING_BACK;
6356 if (!g_camera2[0])
6357 g_camera2[0] = new ExynosCamera2(0);
6358 }
6359 else if (cameraId == 1) {
6360 info->facing = CAMERA_FACING_FRONT;
6361 if (!g_camera2[1])
6362 g_camera2[1] = new ExynosCamera2(1);
6363 }
6364 else
6365 return BAD_VALUE;
6366
6367 info->orientation = 0;
6368 info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
6369 if (mCameraInfo[cameraId] == NULL) {
6370 res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
6371 if (res != OK) {
6372 ALOGE("%s: Unable to allocate static info: %s (%d)",
6373 __FUNCTION__, strerror(-res), res);
6374 return res;
6375 }
6376 res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
6377 if (res != OK) {
6378 ALOGE("%s: Unable to fill in static info: %s (%d)",
6379 __FUNCTION__, strerror(-res), res);
6380 return res;
6381 }
6382 }
6383 info->static_camera_characteristics = mCameraInfo[cameraId];
6384 return NO_ERROR;
6385 }
6386
6387 #define SET_METHOD(m) m : HAL2_device_##m
6388
6389 static camera2_device_ops_t camera2_device_ops = {
6390 SET_METHOD(set_request_queue_src_ops),
6391 SET_METHOD(notify_request_queue_not_empty),
6392 SET_METHOD(set_frame_queue_dst_ops),
6393 SET_METHOD(get_in_progress_count),
6394 SET_METHOD(flush_captures_in_progress),
6395 SET_METHOD(construct_default_request),
6396 SET_METHOD(allocate_stream),
6397 SET_METHOD(register_stream_buffers),
6398 SET_METHOD(release_stream),
6399 SET_METHOD(allocate_reprocess_stream),
6400 SET_METHOD(allocate_reprocess_stream_from_stream),
6401 SET_METHOD(release_reprocess_stream),
6402 SET_METHOD(trigger_action),
6403 SET_METHOD(set_notify_callback),
6404 SET_METHOD(get_metadata_vendor_tag_ops),
6405 SET_METHOD(dump),
6406 get_instance_metadata : NULL
6407 };
6408
6409 #undef SET_METHOD
6410
6411
6412 static int HAL2_camera_device_open(const struct hw_module_t* module,
6413 const char *id,
6414 struct hw_device_t** device)
6415 {
6416 int cameraId = atoi(id);
6417 int openInvalid = 0;
6418
6419 Mutex::Autolock lock(g_camera_mutex);
6420 if (g_camera_vaild) {
6421 ALOGE("ERR(%s): Can't open, other camera is in use", __FUNCTION__);
6422 return -EUSERS;
6423 }
6424 g_camera_vaild = false;
6425 ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
6426 if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
6427 ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
6428 return -EINVAL;
6429 }
6430
6431 ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6432 if (g_cam2_device) {
6433 if (obj(g_cam2_device)->getCameraId() == cameraId) {
6434 ALOGD("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
6435 goto done;
6436 } else {
6437 ALOGD("(%s): START waiting for cam device free", __FUNCTION__);
6438 while (g_cam2_device)
6439 usleep(SIG_WAITING_TICK);
6440 ALOGD("(%s): END waiting for cam device free", __FUNCTION__);
6441 }
6442 }
6443
6444 g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
6445 ALOGV("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
6446
6447 if (!g_cam2_device)
6448 return -ENOMEM;
6449
6450 g_cam2_device->common.tag = HARDWARE_DEVICE_TAG;
6451 g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
6452 g_cam2_device->common.module = const_cast<hw_module_t *>(module);
6453 g_cam2_device->common.close = HAL2_camera_device_close;
6454
6455 g_cam2_device->ops = &camera2_device_ops;
6456
6457 ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
6458
6459 g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId], &openInvalid);
6460 if (!openInvalid) {
6461 ALOGE("DEBUG(%s): ExynosCameraHWInterface2 creation failed", __FUNCTION__);
6462 return -ENODEV;
6463 }
6464 done:
6465 *device = (hw_device_t *)g_cam2_device;
6466 ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
6467 g_camera_vaild = true;
6468
6469 return 0;
6470 }
6471
6472
6473 static hw_module_methods_t camera_module_methods = {
6474 open : HAL2_camera_device_open
6475 };
6476
6477 extern "C" {
6478 struct camera_module HAL_MODULE_INFO_SYM = {
6479 common : {
6480 tag : HARDWARE_MODULE_TAG,
6481 module_api_version : CAMERA_MODULE_API_VERSION_2_0,
6482 hal_api_version : HARDWARE_HAL_API_VERSION,
6483 id : CAMERA_HARDWARE_MODULE_ID,
6484 name : "Exynos Camera HAL2",
6485 author : "Samsung Corporation",
6486 methods : &camera_module_methods,
6487 dso : NULL,
6488 reserved : {0},
6489 },
6490 get_number_of_cameras : HAL2_getNumberOfCameras,
6491 get_camera_info : HAL2_getCameraInfo,
6492 set_callbacks : NULL,
6493 get_vendor_tag_ops : NULL,
6494 open_legacy : NULL,
6495 reserved : {0}
6496 };
6497 }
6498
6499 }; // namespace android