hardware: samsung_slsi: libcamera2: Add support for metadata
[GitHub/LineageOS/android_hardware_samsung_slsi_exynos5.git] / libcamera2 / ExynosCameraHWInterface2.cpp
CommitLineData
c15a6b00
JS
1/*
2**
3** Copyright 2008, The Android Open Source Project
4** Copyright 2012, Samsung Electronics Co. LTD
5**
6** Licensed under the Apache License, Version 2.0 (the "License");
7** you may not use this file except in compliance with the License.
8** You may obtain a copy of the License at
9**
10** http://www.apache.org/licenses/LICENSE-2.0
11**
12** Unless required by applicable law or agreed to in writing, software
13** distributed under the License is distributed on an "AS IS" BASIS,
14** WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
15** See the License for the specific language governing permissions and
16** limitations under the License.
17*/
18
19/*!
20 * \file ExynosCameraHWInterface2.cpp
21 * \brief source file for Android Camera API 2.0 HAL
22 * \author Sungjoong Kang(sj3.kang@samsung.com)
13d8c7b4 23 * \date 2012/07/10
c15a6b00
JS
24 *
25 * <b>Revision History: </b>
26 * - 2012/05/31 : Sungjoong Kang(sj3.kang@samsung.com) \n
27 * Initial Release
13d8c7b4
SK
28 *
29 * - 2012/07/10 : Sungjoong Kang(sj3.kang@samsung.com) \n
30 * 2nd Release
31 *
c15a6b00
JS
32 */
33
34//#define LOG_NDEBUG 0
9dd63e1f 35#define LOG_TAG "ExynosCameraHAL2"
c15a6b00
JS
36#include <utils/Log.h>
37
38#include "ExynosCameraHWInterface2.h"
39#include "exynos_format.h"
40
41
42
43namespace android {
44
45
9dd63e1f
SK
46void m_savePostView(const char *fname, uint8_t *buf, uint32_t size)
47{
48 int nw;
49 int cnt = 0;
50 uint32_t written = 0;
51
52 ALOGD("opening file [%s], address[%x], size(%d)", fname, (unsigned int)buf, size);
53 int fd = open(fname, O_RDWR | O_CREAT, 0644);
54 if (fd < 0) {
55 ALOGE("failed to create file [%s]: %s", fname, strerror(errno));
56 return;
57 }
58
59 ALOGD("writing %d bytes to file [%s]", size, fname);
60 while (written < size) {
61 nw = ::write(fd, buf + written, size - written);
62 if (nw < 0) {
63 ALOGE("failed to write to file %d [%s]: %s",written,fname, strerror(errno));
64 break;
65 }
66 written += nw;
67 cnt++;
68 }
69 ALOGD("done writing %d bytes to file [%s] in %d passes",size, fname, cnt);
70 ::close(fd);
71}
72
c15a6b00
JS
73int get_pixel_depth(uint32_t fmt)
74{
75 int depth = 0;
76
77 switch (fmt) {
78 case V4L2_PIX_FMT_JPEG:
79 depth = 8;
80 break;
81
82 case V4L2_PIX_FMT_NV12:
83 case V4L2_PIX_FMT_NV21:
84 case V4L2_PIX_FMT_YUV420:
85 case V4L2_PIX_FMT_YVU420M:
86 case V4L2_PIX_FMT_NV12M:
87 case V4L2_PIX_FMT_NV12MT:
88 depth = 12;
89 break;
90
91 case V4L2_PIX_FMT_RGB565:
92 case V4L2_PIX_FMT_YUYV:
93 case V4L2_PIX_FMT_YVYU:
94 case V4L2_PIX_FMT_UYVY:
95 case V4L2_PIX_FMT_VYUY:
96 case V4L2_PIX_FMT_NV16:
97 case V4L2_PIX_FMT_NV61:
98 case V4L2_PIX_FMT_YUV422P:
99 case V4L2_PIX_FMT_SBGGR10:
100 case V4L2_PIX_FMT_SBGGR12:
101 case V4L2_PIX_FMT_SBGGR16:
102 depth = 16;
103 break;
104
105 case V4L2_PIX_FMT_RGB32:
106 depth = 32;
107 break;
108 default:
109 ALOGE("Get depth failed(format : %d)", fmt);
110 break;
111 }
112
113 return depth;
13d8c7b4 114}
c15a6b00
JS
115
116int cam_int_s_fmt(node_info_t *node)
117{
118 struct v4l2_format v4l2_fmt;
119 unsigned int framesize;
120 int ret;
121
122 memset(&v4l2_fmt, 0, sizeof(struct v4l2_format));
123
124 v4l2_fmt.type = node->type;
125 framesize = (node->width * node->height * get_pixel_depth(node->format)) / 8;
126
127 if (node->planes >= 1) {
128 v4l2_fmt.fmt.pix_mp.width = node->width;
129 v4l2_fmt.fmt.pix_mp.height = node->height;
130 v4l2_fmt.fmt.pix_mp.pixelformat = node->format;
131 v4l2_fmt.fmt.pix_mp.field = V4L2_FIELD_ANY;
132 } else {
13d8c7b4 133 ALOGE("%s:S_FMT, Out of bound : Number of element plane",__FUNCTION__);
c15a6b00
JS
134 }
135
136 /* Set up for capture */
137 ret = exynos_v4l2_s_fmt(node->fd, &v4l2_fmt);
138
139 if (ret < 0)
13d8c7b4 140 ALOGE("%s: exynos_v4l2_s_fmt fail (%d)",__FUNCTION__, ret);
c15a6b00 141
be494d19
SK
142 node->streamOn = false;
143
c15a6b00
JS
144 return ret;
145}
146
147int cam_int_reqbufs(node_info_t *node)
148{
149 struct v4l2_requestbuffers req;
150 int ret;
151
152 req.count = node->buffers;
153 req.type = node->type;
154 req.memory = node->memory;
155
156 ret = exynos_v4l2_reqbufs(node->fd, &req);
157
158 if (ret < 0)
13d8c7b4 159 ALOGE("%s: VIDIOC_REQBUFS (fd:%d) failed (%d)",__FUNCTION__,node->fd, ret);
c15a6b00
JS
160
161 return req.count;
162}
163
164int cam_int_qbuf(node_info_t *node, int index)
165{
166 struct v4l2_buffer v4l2_buf;
167 struct v4l2_plane planes[VIDEO_MAX_PLANES];
168 int i;
169 int ret = 0;
170
171 v4l2_buf.m.planes = planes;
172 v4l2_buf.type = node->type;
173 v4l2_buf.memory = node->memory;
174 v4l2_buf.index = index;
175 v4l2_buf.length = node->planes;
176
177 for(i = 0; i < node->planes; i++){
13d8c7b4
SK
178 v4l2_buf.m.planes[i].m.fd = (int)(node->buffer[index].fd.extFd[i]);
179 v4l2_buf.m.planes[i].length = (unsigned long)(node->buffer[index].size.extS[i]);
c15a6b00
JS
180 }
181
182 ret = exynos_v4l2_qbuf(node->fd, &v4l2_buf);
183
184 if (ret < 0)
13d8c7b4 185 ALOGE("%s: cam_int_qbuf failed (index:%d)(ret:%d)",__FUNCTION__, index, ret);
c15a6b00
JS
186
187 return ret;
188}
189
190int cam_int_streamon(node_info_t *node)
191{
192 enum v4l2_buf_type type = node->type;
193 int ret;
194
be494d19
SK
195 if (node->streamOn)
196 return 0;
197
c15a6b00
JS
198 ret = exynos_v4l2_streamon(node->fd, type);
199
200 if (ret < 0)
13d8c7b4 201 ALOGE("%s: VIDIOC_STREAMON failed (%d)",__FUNCTION__, ret);
be494d19
SK
202 else
203 node->streamOn = true;
c15a6b00
JS
204
205 ALOGV("On streaming I/O... ... fd(%d)", node->fd);
206
207 return ret;
208}
209
13d8c7b4
SK
210int cam_int_streamoff(node_info_t *node)
211{
212 enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
213 int ret;
214
be494d19
SK
215 if (!node->streamOn)
216 return 0;
217
13d8c7b4
SK
218 ALOGV("Off streaming I/O... fd(%d)", node->fd);
219 ret = exynos_v4l2_streamoff(node->fd, type);
220
221 if (ret < 0)
222 ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
be494d19
SK
223 else
224 node->streamOn = false;
13d8c7b4
SK
225
226 return ret;
227}
228
9dd63e1f
SK
229int isp_int_streamoff(node_info_t *node)
230{
231 enum v4l2_buf_type type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
232 int ret;
233
234 ALOGV("Off streaming I/O... fd(%d)", node->fd);
235 ret = exynos_v4l2_streamoff(node->fd, type);
236
237 if (ret < 0)
238 ALOGE("%s: VIDIOC_STREAMOFF failed (%d)",__FUNCTION__, ret);
239
240 return ret;
241}
242
c15a6b00
JS
243int cam_int_dqbuf(node_info_t *node)
244{
245 struct v4l2_buffer v4l2_buf;
246 struct v4l2_plane planes[VIDEO_MAX_PLANES];
247 int ret;
248
249 v4l2_buf.type = node->type;
250 v4l2_buf.memory = node->memory;
251 v4l2_buf.m.planes = planes;
252 v4l2_buf.length = node->planes;
253
254 ret = exynos_v4l2_dqbuf(node->fd, &v4l2_buf);
255 if (ret < 0)
13d8c7b4 256 ALOGE("%s: VIDIOC_DQBUF failed (%d)",__FUNCTION__, ret);
c15a6b00
JS
257
258 return v4l2_buf.index;
259}
260
261int cam_int_s_input(node_info_t *node, int index)
262{
263 int ret;
13d8c7b4 264
c15a6b00
JS
265 ret = exynos_v4l2_s_input(node->fd, index);
266 if (ret < 0)
13d8c7b4 267 ALOGE("%s: VIDIOC_S_INPUT failed (%d)",__FUNCTION__, ret);
c15a6b00
JS
268
269 return ret;
270}
271
272
273gralloc_module_t const* ExynosCameraHWInterface2::m_grallocHal;
274
275RequestManager::RequestManager(SignalDrivenThread* main_thread):
276 m_numOfEntries(0),
277 m_entryInsertionIndex(0),
278 m_entryProcessingIndex(0),
279 m_entryFrameOutputIndex(0)
280{
281 m_metadataConverter = new MetadataConverter;
282 m_mainThread = main_thread;
13d8c7b4 283 for (int i=0 ; i<NUM_MAX_REQUEST_MGR_ENTRY; i++) {
13d8c7b4 284 memset(&(entries[i]), 0x00, sizeof(request_manager_entry_t));
be494d19 285 entries[i].internal_shot.shot.ctl.request.frameCount = -1;
13d8c7b4 286 }
b5237e6b 287 m_sensorPipelineSkipCnt = 8;
c15a6b00
JS
288 return;
289}
290
291RequestManager::~RequestManager()
292{
293 return;
294}
295
296int RequestManager::GetNumEntries()
297{
298 return m_numOfEntries;
299}
300
9dd63e1f
SK
301void RequestManager::SetDefaultParameters(int cropX)
302{
303 m_cropX = cropX;
304}
305
c15a6b00
JS
306bool RequestManager::IsRequestQueueFull()
307{
308 Mutex::Autolock lock(m_requestMutex);
309 if (m_numOfEntries>=NUM_MAX_REQUEST_MGR_ENTRY)
310 return true;
311 else
312 return false;
313}
314
315void RequestManager::RegisterRequest(camera_metadata_t * new_request)
316{
13d8c7b4
SK
317 ALOGV("DEBUG(%s):", __FUNCTION__);
318
c15a6b00 319 Mutex::Autolock lock(m_requestMutex);
13d8c7b4 320
c15a6b00 321 request_manager_entry * newEntry = NULL;
9dd63e1f 322 int newInsertionIndex = GetNextIndex(m_entryInsertionIndex);
13d8c7b4
SK
323 ALOGV("DEBUG(%s): got lock, new insertIndex(%d), cnt before reg(%d)", __FUNCTION__,newInsertionIndex,m_numOfEntries );
324
c15a6b00 325
c15a6b00
JS
326 newEntry = &(entries[newInsertionIndex]);
327
328 if (newEntry->status!=EMPTY) {
13d8c7b4
SK
329 ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
330 return;
c15a6b00
JS
331 }
332 newEntry->status = REGISTERED;
333 newEntry->original_request = new_request;
334 // TODO : allocate internal_request dynamically
13d8c7b4 335 m_metadataConverter->ToInternalShot(new_request, &(newEntry->internal_shot));
b56dcc00 336 newEntry->output_stream_count = newEntry->internal_shot.shot.ctl.request.outputStreams[15];
c15a6b00
JS
337
338 m_numOfEntries++;
339 m_entryInsertionIndex = newInsertionIndex;
340
13d8c7b4 341
c15a6b00 342 ALOGV("## RegisterReq DONE num(%d), insert(%d), processing(%d), frame(%d), (frameCnt(%d))",
be494d19 343 m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
c15a6b00
JS
344}
345
346void RequestManager::DeregisterRequest(camera_metadata_t ** deregistered_request)
347{
13d8c7b4 348 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
349 Mutex::Autolock lock(m_requestMutex);
350
351 request_manager_entry * currentEntry = &(entries[m_entryFrameOutputIndex]);
13d8c7b4 352
be494d19 353 if (currentEntry->status != CAPTURED) {
13d8c7b4 354 ALOGD("DBG(%s): Circular buffer abnormal. processing(%d), frame(%d), status(%d) ", __FUNCTION__
c15a6b00 355 , m_entryProcessingIndex, m_entryFrameOutputIndex,(int)(currentEntry->status));
13d8c7b4 356 return;
c15a6b00 357 }
13d8c7b4
SK
358 if (deregistered_request) *deregistered_request = currentEntry->original_request;
359
c15a6b00
JS
360 currentEntry->status = EMPTY;
361 currentEntry->original_request = NULL;
be494d19
SK
362 memset(&(currentEntry->internal_shot), 0, sizeof(struct camera2_shot_ext));
363 currentEntry->internal_shot.shot.ctl.request.frameCount = -1;
c15a6b00 364 currentEntry->output_stream_count = 0;
13d8c7b4 365 currentEntry->dynamic_meta_vaild = false;
c15a6b00 366 m_numOfEntries--;
9dd63e1f 367 // Dump();
c15a6b00
JS
368 ALOGV("## DeRegistReq DONE num(%d), insert(%d), processing(%d), frame(%d)",
369 m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
13d8c7b4 370
c15a6b00 371 return;
c15a6b00
JS
372}
373
13d8c7b4 374bool RequestManager::PrepareFrame(size_t* num_entries, size_t* frame_size,
c15a6b00
JS
375 camera_metadata_t ** prepared_frame)
376{
13d8c7b4 377 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
378 Mutex::Autolock lock(m_requestMutex);
379 status_t res = NO_ERROR;
9dd63e1f 380 int tempFrameOutputIndex = GetNextIndex(m_entryFrameOutputIndex);
13d8c7b4
SK
381 request_manager_entry * currentEntry = &(entries[tempFrameOutputIndex]);
382 ALOGV("DEBUG(%s): processing(%d), frameOut(%d), insert(%d) recentlycompleted(%d)", __FUNCTION__,
383 m_entryProcessingIndex, m_entryFrameOutputIndex, m_entryInsertionIndex, m_completedIndex);
384
385 if (m_completedIndex != tempFrameOutputIndex) {
386 ALOGV("DEBUG(%s): frame left behind : completed(%d), preparing(%d)", __FUNCTION__, m_completedIndex,tempFrameOutputIndex);
387
388 request_manager_entry * currentEntry2 = &(entries[tempFrameOutputIndex]);
389 currentEntry2->status = EMPTY;
390 currentEntry2->original_request = NULL;
be494d19
SK
391 memset(&(currentEntry2->internal_shot), 0, sizeof(struct camera2_shot_ext));
392 currentEntry2->internal_shot.shot.ctl.request.frameCount = -1;
13d8c7b4
SK
393 currentEntry2->output_stream_count = 0;
394 currentEntry2->dynamic_meta_vaild = false;
395 m_numOfEntries--;
9dd63e1f 396 // Dump();
13d8c7b4
SK
397 tempFrameOutputIndex = m_completedIndex;
398 currentEntry = &(entries[tempFrameOutputIndex]);
399 }
400
401 if (currentEntry->output_stream_count!=0) {
402 ALOGD("DBG(%s): Circular buffer has remaining output : stream_count(%d)", __FUNCTION__, currentEntry->output_stream_count);
403 return false;
c15a6b00
JS
404 }
405
be494d19 406 if (currentEntry->status != CAPTURED) {
13d8c7b4
SK
407 ALOGD("DBG(%s): Circular buffer abnormal status(%d)", __FUNCTION__, (int)(currentEntry->status));
408
409 return false;
410 }
411 m_entryFrameOutputIndex = tempFrameOutputIndex;
b56dcc00 412 m_tempFrameMetadata = place_camera_metadata(m_tempFrameMetadataBuf, 2000, 15, 500); //estimated
13d8c7b4 413 res = m_metadataConverter->ToDynamicMetadata(&(currentEntry->internal_shot),
c15a6b00
JS
414 m_tempFrameMetadata);
415 if (res!=NO_ERROR) {
13d8c7b4
SK
416 ALOGE("ERROR(%s): ToDynamicMetadata (%d) ", __FUNCTION__, res);
417 return false;
c15a6b00
JS
418 }
419 *num_entries = get_camera_metadata_entry_count(m_tempFrameMetadata);
420 *frame_size = get_camera_metadata_size(m_tempFrameMetadata);
421 *prepared_frame = m_tempFrameMetadata;
13d8c7b4 422 ALOGV("## PrepareFrame DONE: frameOut(%d) frameCnt-req(%d)", m_entryFrameOutputIndex,
be494d19 423 currentEntry->internal_shot.shot.ctl.request.frameCount);
9dd63e1f 424 // Dump();
13d8c7b4 425 return true;
c15a6b00
JS
426}
427
13d8c7b4 428int RequestManager::MarkProcessingRequest(ExynosBuffer* buf)
c15a6b00 429{
13d8c7b4 430 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00 431 Mutex::Autolock lock(m_requestMutex);
13d8c7b4 432 struct camera2_shot_ext * shot_ext;
b56dcc00 433 struct camera2_shot_ext * request_shot;
13d8c7b4
SK
434 int targetStreamIndex = 0;
435
13d8c7b4
SK
436 if (m_numOfEntries == 0) {
437 ALOGV("DEBUG(%s): Request Manager Empty ", __FUNCTION__);
438 return -1;
439 }
440
441 if ((m_entryProcessingIndex == m_entryInsertionIndex)
be494d19 442 && (entries[m_entryProcessingIndex].status == REQUESTED || entries[m_entryProcessingIndex].status == CAPTURED)) {
13d8c7b4
SK
443 ALOGV("## MarkProcReq skipping(request underrun) - num(%d), insert(%d), processing(%d), frame(%d)",
444 m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
445 return -1;
446 }
c15a6b00
JS
447
448 request_manager_entry * newEntry = NULL;
9dd63e1f 449 int newProcessingIndex = GetNextIndex(m_entryProcessingIndex);
c15a6b00 450
c15a6b00 451 newEntry = &(entries[newProcessingIndex]);
b56dcc00 452 request_shot = &newEntry->internal_shot;
be494d19 453 if (newEntry->status != REGISTERED) {
13d8c7b4 454 ALOGV("DEBUG(%s): Circular buffer abnormal ", __FUNCTION__);
13d8c7b4 455 return -1;
c15a6b00 456 }
be494d19 457 newEntry->status = REQUESTED;
c15a6b00 458
13d8c7b4 459 shot_ext = (struct camera2_shot_ext *)(buf->virt.extP[1]);
b56dcc00
SK
460 ALOGV("DEBUG(%s):Writing the info of Framecnt(%d)", __FUNCTION__, request_shot->shot.ctl.request.frameCount);
461 memcpy(shot_ext, &newEntry->internal_shot, sizeof(struct camera2_shot_ext));
13d8c7b4
SK
462
463 shot_ext->request_sensor = 1;
9dd63e1f
SK
464 shot_ext->dis_bypass = 1;
465 shot_ext->dnr_bypass = 1;
13d8c7b4 466 for (int i = 0; i < newEntry->output_stream_count; i++) {
be494d19 467 targetStreamIndex = newEntry->internal_shot.shot.ctl.request.outputStreams[i];
13d8c7b4
SK
468
469 if (targetStreamIndex==0) {
470 ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP", __FUNCTION__, i);
471 shot_ext->request_scp = 1;
9dd63e1f 472 shot_ext->shot.ctl.request.outputStreams[0] = 1;
13d8c7b4 473 }
9dd63e1f 474 else if (targetStreamIndex == 1) {
13d8c7b4
SK
475 ALOGV("DEBUG(%s): outputstreams(%d) is for scalerC", __FUNCTION__, i);
476 shot_ext->request_scc = 1;
9dd63e1f
SK
477 shot_ext->shot.ctl.request.outputStreams[1] = 1;
478 }
479 else if (targetStreamIndex == 2) {
480 ALOGV("DEBUG(%s): outputstreams(%d) is for scalerP (record)", __FUNCTION__, i);
481 shot_ext->request_scp = 1;
482 shot_ext->shot.ctl.request.outputStreams[2] = 1;
13d8c7b4
SK
483 }
484 else {
485 ALOGV("DEBUG(%s): outputstreams(%d) has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
486 }
487 }
b56dcc00
SK
488 if (shot_ext->shot.ctl.aa.aeMode == AA_AEMODE_ON) {
489 ALOGV("(%s): AE_ON => ignoring some params", __FUNCTION__);
490 shot_ext->shot.ctl.sensor.exposureTime = 0;
491 shot_ext->shot.ctl.sensor.sensitivity = 0;
492 shot_ext->shot.ctl.sensor.frameDuration = 33*1000*1000;
493 // TODO : check frameDuration
494 }
13d8c7b4
SK
495 m_entryProcessingIndex = newProcessingIndex;
496
9dd63e1f 497 // Dump();
13d8c7b4 498 ALOGV("## MarkProcReq DONE totalentry(%d), insert(%d), processing(%d), frame(%d) frameCnt(%d)",
be494d19 499 m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex, newEntry->internal_shot.shot.ctl.request.frameCount);
13d8c7b4
SK
500
501 return m_entryProcessingIndex;
c15a6b00
JS
502}
503
9dd63e1f 504void RequestManager::NotifyStreamOutput(int frameCnt, int stream_id)
c15a6b00 505{
9dd63e1f
SK
506 int index;
507
508 ALOGV("DEBUG(%s): frameCnt(%d), stream_id(%d)", __FUNCTION__, frameCnt, stream_id);
509
510 index = FindEntryIndexByFrameCnt(frameCnt);
511 if (index == -1) {
512 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
513 return;
514 }
515 ALOGV("DEBUG(%s): frameCnt(%d), stream_id(%d) last cnt (%d)", __FUNCTION__, frameCnt, stream_id, entries[index].output_stream_count);
516
be494d19
SK
517 entries[index].output_stream_count--; //TODO : match stream id also
518 CheckCompleted(index);
13d8c7b4
SK
519 return;
520}
521
522void RequestManager::CheckCompleted(int index)
523{
9dd63e1f 524 ALOGV("DEBUG(%s): reqIndex(%d) current Count(%d)", __FUNCTION__, index, entries[index].output_stream_count);
b5237e6b 525 if (entries[index].output_stream_count == 0 && entries[index].dynamic_meta_vaild) {
13d8c7b4 526 ALOGV("DEBUG(%s): index[%d] completed and sending SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__, index);
b5237e6b 527 // Dump();
13d8c7b4
SK
528 m_completedIndex = index;
529 m_mainThread->SetSignal(SIGNAL_MAIN_STREAM_OUTPUT_DONE);
530 }
c15a6b00
JS
531 return;
532}
9dd63e1f
SK
533
534void RequestManager::ApplyDynamicMetadata(struct camera2_shot_ext *shot_ext, int frameCnt)
13d8c7b4 535{
9dd63e1f 536 int index;
b56dcc00
SK
537 struct camera2_shot_ext * request_shot;
538 nsecs_t timeStamp;
13d8c7b4 539
9dd63e1f
SK
540 ALOGV("DEBUG(%s): frameCnt(%d)", __FUNCTION__, frameCnt);
541
542 index = FindEntryIndexByFrameCnt(frameCnt);
543 if (index == -1) {
544 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
545 return;
13d8c7b4 546 }
9dd63e1f
SK
547
548 request_manager_entry * newEntry = &(entries[index]);
b56dcc00 549 request_shot = &(newEntry->internal_shot);
9dd63e1f 550
be494d19 551 newEntry->dynamic_meta_vaild = true;
b56dcc00
SK
552 timeStamp = request_shot->shot.dm.sensor.timeStamp;
553 memcpy(&request_shot->shot.dm, &shot_ext->shot.dm, sizeof(struct camera2_dm));
554 request_shot->shot.dm.sensor.timeStamp = timeStamp;
be494d19 555 CheckCompleted(index);
13d8c7b4
SK
556}
557
558void RequestManager::DumpInfoWithIndex(int index)
559{
be494d19 560 struct camera2_shot_ext * currMetadata = &(entries[index].internal_shot);
13d8c7b4
SK
561
562 ALOGV("#### frameCount(%d) exposureTime(%lld) ISO(%d)",
be494d19
SK
563 currMetadata->shot.ctl.request.frameCount,
564 currMetadata->shot.ctl.sensor.exposureTime,
565 currMetadata->shot.ctl.sensor.sensitivity);
b56dcc00 566 if (currMetadata->shot.ctl.request.outputStreams[15] == 0)
13d8c7b4 567 ALOGV("#### No output stream selected");
b56dcc00 568 else if (currMetadata->shot.ctl.request.outputStreams[15] == 1)
be494d19 569 ALOGV("#### OutputStreamId : %d", currMetadata->shot.ctl.request.outputStreams[0]);
b56dcc00 570 else if (currMetadata->shot.ctl.request.outputStreams[15] == 2)
be494d19
SK
571 ALOGV("#### OutputStreamId : %d, %d", currMetadata->shot.ctl.request.outputStreams[0],
572 currMetadata->shot.ctl.request.outputStreams[1]);
13d8c7b4 573 else
b56dcc00 574 ALOGV("#### OutputStream num (%d) abnormal ", currMetadata->shot.ctl.request.outputStreams[15]);
13d8c7b4
SK
575}
576
b56dcc00 577void RequestManager::UpdateIspParameters(struct camera2_shot_ext *shot_ext, int frameCnt)
13d8c7b4 578{
9dd63e1f 579 int index, targetStreamIndex;
b56dcc00 580 struct camera2_shot_ext * request_shot;
9dd63e1f
SK
581
582 ALOGV("DEBUG(%s): updating info with frameCnt(%d)", __FUNCTION__, frameCnt);
583 if (frameCnt < 0)
13d8c7b4 584 return;
9dd63e1f
SK
585
586 index = FindEntryIndexByFrameCnt(frameCnt);
587 if (index == -1) {
588 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
589 return;
590 }
591
13d8c7b4 592 request_manager_entry * newEntry = &(entries[index]);
b56dcc00 593 request_shot = &newEntry->internal_shot;
13d8c7b4
SK
594 shot_ext->request_sensor = 1;
595 shot_ext->request_scc = 0;
596 shot_ext->request_scp = 0;
9dd63e1f
SK
597 shot_ext->shot.ctl.request.outputStreams[0] = 0;
598 shot_ext->shot.ctl.request.outputStreams[1] = 0;
599 shot_ext->shot.ctl.request.outputStreams[2] = 0;
600
b56dcc00 601 memcpy(&shot_ext->shot.ctl, &request_shot->shot.ctl, sizeof(struct camera2_ctl));
13d8c7b4
SK
602 for (int i = 0; i < newEntry->output_stream_count; i++) {
603 // TODO : match with actual stream index;
b56dcc00 604 targetStreamIndex = request_shot->shot.ctl.request.outputStreams[i];
13d8c7b4
SK
605
606 if (targetStreamIndex==0) {
9dd63e1f 607 ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerP", __FUNCTION__, i);
13d8c7b4 608 shot_ext->request_scp = 1;
9dd63e1f 609 shot_ext->shot.ctl.request.outputStreams[0] = 1;
13d8c7b4 610 }
9dd63e1f
SK
611 else if (targetStreamIndex == 1) {
612 ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerC", __FUNCTION__, i);
13d8c7b4 613 shot_ext->request_scc = 1;
9dd63e1f
SK
614 shot_ext->shot.ctl.request.outputStreams[1] = 1;
615 }
616 else if (targetStreamIndex == 2) {
617 ALOGV("DEBUG(%s): outputstreams item[%d] is for scalerP (record)", __FUNCTION__, i);
618 shot_ext->request_scp = 1;
619 shot_ext->shot.ctl.request.outputStreams[2] = 1;
13d8c7b4
SK
620 }
621 else {
9dd63e1f 622 ALOGV("DEBUG(%s): outputstreams item[%d] has abnormal value(%d)", __FUNCTION__, i, targetStreamIndex);
13d8c7b4
SK
623 }
624 }
625}
626
9dd63e1f
SK
627int RequestManager::FindEntryIndexByFrameCnt(int frameCnt)
628{
629 for (int i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
be494d19 630 if (entries[i].internal_shot.shot.ctl.request.frameCount == frameCnt)
9dd63e1f
SK
631 return i;
632 }
633 return -1;
634}
635
636void RequestManager::RegisterTimestamp(int frameCnt, nsecs_t * frameTime)
13d8c7b4 637{
9dd63e1f
SK
638 int index = FindEntryIndexByFrameCnt(frameCnt);
639 if (index == -1) {
640 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
641 return;
642 }
643
13d8c7b4 644 request_manager_entry * currentEntry = &(entries[index]);
be494d19 645 currentEntry->internal_shot.shot.dm.sensor.timeStamp = *((uint64_t*)frameTime);
9dd63e1f 646 ALOGV("DEBUG(%s): applied timestamp for reqIndex(%d) frameCnt(%d) (%lld)", __FUNCTION__,
be494d19 647 index, frameCnt, currentEntry->internal_shot.shot.dm.sensor.timeStamp);
13d8c7b4
SK
648}
649
9dd63e1f 650uint64_t RequestManager::GetTimestamp(int frameCnt)
13d8c7b4 651{
9dd63e1f
SK
652 int index = FindEntryIndexByFrameCnt(frameCnt);
653 if (index == -1) {
654 ALOGE("ERR(%s): Cannot find entry for frameCnt(%d)", __FUNCTION__, frameCnt);
655 return 0;
656 }
657
13d8c7b4 658 request_manager_entry * currentEntry = &(entries[index]);
be494d19 659 uint64_t frameTime = currentEntry->internal_shot.shot.dm.sensor.timeStamp;
9dd63e1f 660 ALOGV("DEBUG(%s): Returning timestamp for reqIndex(%d) (%lld)", __FUNCTION__, index, frameTime);
13d8c7b4
SK
661 return frameTime;
662}
663
9dd63e1f
SK
664int RequestManager::FindFrameCnt(struct camera2_shot_ext * shot_ext)
665{
be494d19 666 int tempIndex, i;
b5237e6b
SK
667 if (m_sensorPipelineSkipCnt > 0) {
668 m_sensorPipelineSkipCnt--;
9dd63e1f
SK
669 return -1;
670 }
be494d19
SK
671 if (m_numOfEntries == 0) {
672 ALOGD("(%s): No Entry found", __FUNCTION__);
673 return -1;
674 }
9dd63e1f 675 tempIndex = GetNextIndex(m_entryFrameOutputIndex);
be494d19
SK
676 for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
677 if (entries[tempIndex].status == REQUESTED) {
678 entries[tempIndex].status = CAPTURED;
679 return entries[tempIndex].internal_shot.shot.ctl.request.frameCount;
680 }
681 else if (entries[tempIndex].status == CAPTURED) {
682 tempIndex = GetNextIndex(tempIndex);
683 continue;
684 }
685 else {
686 ALOGE("(%s): enry state abnormal status(%d)", __FUNCTION__, entries[tempIndex].status);
687 Dump();
688 return -1;
689 }
690 }
691 return -1;
9dd63e1f 692}
13d8c7b4 693
b5237e6b
SK
694void RequestManager::SetInitialSkip(int count)
695{
696 ALOGV("(%s): Pipeline Restarting. setting cnt(%d) - current(%d)", __FUNCTION__, count, m_sensorPipelineSkipCnt);
697 if (count > m_sensorPipelineSkipCnt)
698 m_sensorPipelineSkipCnt = count;
699}
700
13d8c7b4
SK
701void RequestManager::Dump(void)
702{
13d8c7b4
SK
703 int i = 0;
704 request_manager_entry * currentEntry;
705 ALOGV("## Dump totalentry(%d), insert(%d), processing(%d), frame(%d)",
706 m_numOfEntries,m_entryInsertionIndex,m_entryProcessingIndex, m_entryFrameOutputIndex);
707
708 for (i = 0 ; i < NUM_MAX_REQUEST_MGR_ENTRY ; i++) {
709 currentEntry = &(entries[i]);
710 ALOGV("[%2d] status[%d] frameCnt[%3d] numOutput[%d]", i,
be494d19 711 currentEntry->status, currentEntry->internal_shot.shot.ctl.request.frameCount,
13d8c7b4
SK
712 currentEntry->output_stream_count);
713 }
714}
c15a6b00 715
9dd63e1f
SK
716int RequestManager::GetNextIndex(int index)
717{
718 index++;
719 if (index >= NUM_MAX_REQUEST_MGR_ENTRY)
720 index = 0;
721
722 return index;
723}
724
daa1fcd6 725ExynosCameraHWInterface2::ExynosCameraHWInterface2(int cameraId, camera2_device_t *dev, ExynosCamera2 * camera):
c15a6b00
JS
726 m_requestQueueOps(NULL),
727 m_frameQueueOps(NULL),
728 m_callbackCookie(NULL),
729 m_numOfRemainingReqInSvc(0),
730 m_isRequestQueuePending(false),
13d8c7b4 731 m_isRequestQueueNull(true),
c15a6b00 732 m_isSensorThreadOn(false),
13d8c7b4
SK
733 m_isSensorStarted(false),
734 m_ionCameraClient(0),
735 m_initFlag1(false),
736 m_initFlag2(false),
13d8c7b4
SK
737 m_scp_flushing(false),
738 m_closing(false),
9dd63e1f
SK
739 m_recordingEnabled(false),
740 m_needsRecordBufferInit(false),
741 lastFrameCnt(-1),
742 m_scp_closing(false),
743 m_scp_closed(false),
9dd63e1f 744 m_halDevice(dev),
be494d19 745 m_cameraId(cameraId)
13d8c7b4
SK
746{
747 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
748 int ret = 0;
749
13d8c7b4 750 m_exynosPictureCSC = NULL;
9dd63e1f 751 m_exynosVideoCSC = NULL;
13d8c7b4 752
c15a6b00
JS
753 if (!m_grallocHal) {
754 ret = hw_get_module(GRALLOC_HARDWARE_MODULE_ID, (const hw_module_t **)&m_grallocHal);
755 if (ret)
13d8c7b4
SK
756 ALOGE("ERR(%s):Fail on loading gralloc HAL", __FUNCTION__);
757 }
c15a6b00 758
daa1fcd6 759 m_camera2 = camera;
c15a6b00
JS
760 m_ionCameraClient = createIonClient(m_ionCameraClient);
761 if(m_ionCameraClient == 0)
13d8c7b4 762 ALOGE("ERR(%s):Fail on ion_client_create", __FUNCTION__);
c15a6b00 763
9dd63e1f
SK
764
765 m_BayerManager = new BayerBufManager();
c15a6b00
JS
766 m_mainThread = new MainThread(this);
767 m_sensorThread = new SensorThread(this);
13d8c7b4
SK
768 m_ispThread = new IspThread(this);
769 m_mainThread->Start("MainThread", PRIORITY_DEFAULT, 0);
770 ALOGV("DEBUG(%s): created sensorthread ################", __FUNCTION__);
771 usleep(1600000);
772
c15a6b00 773 m_requestManager = new RequestManager((SignalDrivenThread*)(m_mainThread.get()));
13d8c7b4
SK
774 CSC_METHOD cscMethod = CSC_METHOD_HW;
775 m_exynosPictureCSC = csc_init(cscMethod);
776 if (m_exynosPictureCSC == NULL)
777 ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
778 csc_set_hw_property(m_exynosPictureCSC, CSC_HW_PROPERTY_FIXED_NODE, PICTURE_GSC_NODE_NUM);
c15a6b00 779
9dd63e1f
SK
780 m_exynosVideoCSC = csc_init(cscMethod);
781 if (m_exynosVideoCSC == NULL)
782 ALOGE("ERR(%s): csc_init() fail", __FUNCTION__);
783 csc_set_hw_property(m_exynosVideoCSC, CSC_HW_PROPERTY_FIXED_NODE, PREVIEW_GSC_NODE_NUM);
784
13d8c7b4 785 ALOGV("DEBUG(%s): END", __FUNCTION__);
c15a6b00
JS
786}
787
788ExynosCameraHWInterface2::~ExynosCameraHWInterface2()
789{
9dd63e1f 790 ALOGD("%s: ENTER", __FUNCTION__);
c15a6b00 791 this->release();
9dd63e1f 792 ALOGD("%s: EXIT", __FUNCTION__);
c15a6b00
JS
793}
794
795void ExynosCameraHWInterface2::release()
796{
13d8c7b4 797 int i, res;
9dd63e1f 798 ALOGD("%s: ENTER", __func__);
13d8c7b4 799 m_closing = true;
9dd63e1f
SK
800
801 while (!m_scp_closed)
802 usleep(1000);
13d8c7b4
SK
803 if (m_ispThread != NULL) {
804 m_ispThread->release();
13d8c7b4
SK
805 }
806
807 if (m_sensorThread != NULL) {
808 m_sensorThread->release();
13d8c7b4 809 }
c15a6b00
JS
810
811 if (m_mainThread != NULL) {
13d8c7b4 812 m_mainThread->release();
13d8c7b4
SK
813 }
814
815 if (m_streamThreads[0] != NULL) {
816 m_streamThreads[0]->release();
9dd63e1f 817 m_streamThreads[0]->SetSignal(SIGNAL_THREAD_TERMINATE);
c15a6b00 818 }
c15a6b00 819
13d8c7b4
SK
820 if (m_streamThreads[1] != NULL) {
821 m_streamThreads[1]->release();
9dd63e1f 822 m_streamThreads[1]->SetSignal(SIGNAL_THREAD_TERMINATE);
c15a6b00
JS
823 }
824
13d8c7b4
SK
825
826 if (m_exynosPictureCSC)
827 csc_deinit(m_exynosPictureCSC);
828 m_exynosPictureCSC = NULL;
829
9dd63e1f
SK
830 if (m_exynosVideoCSC)
831 csc_deinit(m_exynosVideoCSC);
832 m_exynosVideoCSC = NULL;
833
834 if (m_ispThread != NULL) {
835 while (!m_ispThread->IsTerminated())
836 usleep(1000);
837 m_ispThread = NULL;
838 }
839
840 if (m_sensorThread != NULL) {
841 while (!m_sensorThread->IsTerminated())
842 usleep(1000);
843 m_sensorThread = NULL;
844 }
845
846 if (m_mainThread != NULL) {
847 while (!m_mainThread->IsTerminated())
848 usleep(1000);
849 m_mainThread = NULL;
850 }
851
852 if (m_streamThreads[0] != NULL) {
853 while (!m_streamThreads[0]->IsTerminated())
854 usleep(1000);
855 m_streamThreads[0] = NULL;
856 }
857
858 if (m_streamThreads[1] != NULL) {
859 while (!m_streamThreads[1]->IsTerminated())
860 usleep(1000);
861 m_streamThreads[1] = NULL;
862 }
863
c15a6b00
JS
864 for(i = 0; i < m_camera_info.sensor.buffers; i++)
865 freeCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
866
c15a6b00
JS
867 for(i = 0; i < m_camera_info.capture.buffers; i++)
868 freeCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
869
9dd63e1f 870 ALOGV("DEBUG(%s): calling exynos_v4l2_close - sensor", __FUNCTION__);
13d8c7b4
SK
871 res = exynos_v4l2_close(m_camera_info.sensor.fd);
872 if (res != NO_ERROR ) {
9dd63e1f 873 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
13d8c7b4
SK
874 }
875
9dd63e1f 876 ALOGV("DEBUG(%s): calling exynos_v4l2_close - isp", __FUNCTION__);
13d8c7b4
SK
877 res = exynos_v4l2_close(m_camera_info.isp.fd);
878 if (res != NO_ERROR ) {
9dd63e1f 879 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
13d8c7b4
SK
880 }
881
9dd63e1f 882 ALOGV("DEBUG(%s): calling exynos_v4l2_close - capture", __FUNCTION__);
13d8c7b4
SK
883 res = exynos_v4l2_close(m_camera_info.capture.fd);
884 if (res != NO_ERROR ) {
9dd63e1f 885 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
13d8c7b4
SK
886 }
887
9dd63e1f
SK
888 ALOGV("DEBUG(%s): calling exynos_v4l2_close - scp", __FUNCTION__);
889 res = exynos_v4l2_close(m_fd_scp);
13d8c7b4 890 if (res != NO_ERROR ) {
9dd63e1f 891 ALOGE("ERR(%s): exynos_v4l2_close failed(%d)",__FUNCTION__ , res);
13d8c7b4 892 }
9dd63e1f 893 ALOGV("DEBUG(%s): calling deleteIonClient", __FUNCTION__);
c15a6b00 894 deleteIonClient(m_ionCameraClient);
9dd63e1f
SK
895
896 ALOGD("%s: EXIT", __func__);
13d8c7b4
SK
897}
898
c15a6b00
JS
899int ExynosCameraHWInterface2::getCameraId() const
900{
9dd63e1f 901 return m_cameraId;
c15a6b00 902}
c15a6b00
JS
903
904int ExynosCameraHWInterface2::setRequestQueueSrcOps(const camera2_request_queue_src_ops_t *request_src_ops)
905{
13d8c7b4 906 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
907 if ((NULL != request_src_ops) && (NULL != request_src_ops->dequeue_request)
908 && (NULL != request_src_ops->free_request) && (NULL != request_src_ops->request_count)) {
909 m_requestQueueOps = (camera2_request_queue_src_ops_t*)request_src_ops;
910 return 0;
911 }
912 else {
13d8c7b4 913 ALOGE("DEBUG(%s):setRequestQueueSrcOps : NULL arguments", __FUNCTION__);
c15a6b00
JS
914 return 1;
915 }
916}
917
918int ExynosCameraHWInterface2::notifyRequestQueueNotEmpty()
919{
b5237e6b 920 ALOGV("DEBUG(%s):setting [SIGNAL_MAIN_REQ_Q_NOT_EMPTY] current(%d)", __FUNCTION__, m_requestManager->GetNumEntries());
c15a6b00 921 if ((NULL==m_frameQueueOps)|| (NULL==m_requestQueueOps)) {
13d8c7b4 922 ALOGE("DEBUG(%s):queue ops NULL. ignoring request", __FUNCTION__);
c15a6b00
JS
923 return 0;
924 }
13d8c7b4 925 m_isRequestQueueNull = false;
b5237e6b
SK
926 if (m_requestManager->GetNumEntries() == 0)
927 m_requestManager->SetInitialSkip(5);
c15a6b00
JS
928 m_mainThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
929 return 0;
930}
931
932int ExynosCameraHWInterface2::setFrameQueueDstOps(const camera2_frame_queue_dst_ops_t *frame_dst_ops)
933{
13d8c7b4 934 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
935 if ((NULL != frame_dst_ops) && (NULL != frame_dst_ops->dequeue_frame)
936 && (NULL != frame_dst_ops->cancel_frame) && (NULL !=frame_dst_ops->enqueue_frame)) {
937 m_frameQueueOps = (camera2_frame_queue_dst_ops_t *)frame_dst_ops;
938 return 0;
939 }
940 else {
13d8c7b4 941 ALOGE("DEBUG(%s):setFrameQueueDstOps : NULL arguments", __FUNCTION__);
c15a6b00
JS
942 return 1;
943 }
944}
945
946int ExynosCameraHWInterface2::getInProgressCount()
947{
948 int inProgressCount = m_requestManager->GetNumEntries();
13d8c7b4 949 ALOGV("DEBUG(%s): # of dequeued req (%d)", __FUNCTION__, inProgressCount);
c15a6b00
JS
950 return inProgressCount;
951}
952
953int ExynosCameraHWInterface2::flushCapturesInProgress()
954{
955 return 0;
956}
957
c15a6b00
JS
958int ExynosCameraHWInterface2::constructDefaultRequest(int request_template, camera_metadata_t **request)
959{
13d8c7b4 960 ALOGV("DEBUG(%s): making template (%d) ", __FUNCTION__, request_template);
c15a6b00
JS
961
962 if (request == NULL) return BAD_VALUE;
963 if (request_template < 0 || request_template >= CAMERA2_TEMPLATE_COUNT) {
964 return BAD_VALUE;
965 }
966 status_t res;
967 // Pass 1, calculate size and allocate
daa1fcd6 968 res = m_camera2->constructDefaultRequest(request_template,
c15a6b00
JS
969 request,
970 true);
971 if (res != OK) {
972 return res;
973 }
974 // Pass 2, build request
daa1fcd6 975 res = m_camera2->constructDefaultRequest(request_template,
c15a6b00
JS
976 request,
977 false);
978 if (res != OK) {
979 ALOGE("Unable to populate new request for template %d",
980 request_template);
981 }
982
983 return res;
984}
985
986int ExynosCameraHWInterface2::allocateStream(uint32_t width, uint32_t height, int format, const camera2_stream_ops_t *stream_ops,
987 uint32_t *stream_id, uint32_t *format_actual, uint32_t *usage, uint32_t *max_buffers)
988{
13d8c7b4 989 ALOGD("DEBUG(%s): allocate stream width(%d) height(%d) format(%x)", __FUNCTION__, width, height, format);
c15a6b00 990 char node_name[30];
9dd63e1f 991 int fd = 0, allocCase = 0;
13d8c7b4
SK
992 StreamThread *AllocatedStream;
993 stream_parameters_t newParameters;
c15a6b00 994
daa1fcd6
SK
995 if (format == CAMERA2_HAL_PIXEL_FORMAT_OPAQUE &&
996 m_camera2->isSupportedResolution(width, height)) {
9dd63e1f
SK
997 if (!(m_streamThreads[0].get())) {
998 ALOGV("DEBUG(%s): stream 0 not exist", __FUNCTION__);
999 allocCase = 0;
13d8c7b4
SK
1000 }
1001 else {
6bbb593a 1002 if ((m_streamThreads[0].get())->m_activated == true) {
9dd63e1f
SK
1003 ALOGV("DEBUG(%s): stream 0 exists and activated.", __FUNCTION__);
1004 allocCase = 1;
1005 }
1006 else {
1007 ALOGV("DEBUG(%s): stream 0 exists and deactivated.", __FUNCTION__);
1008 allocCase = 2;
1009 }
13d8c7b4 1010 }
9dd63e1f
SK
1011 if (allocCase == 0 || allocCase == 2) {
1012 *stream_id = 0;
1013
1014 if (allocCase == 0) {
1015 m_streamThreads[0] = new StreamThread(this, *stream_id);
1016
1017
1018 memset(&node_name, 0x00, sizeof(char[30]));
1019 sprintf(node_name, "%s%d", NODE_PREFIX, 44);
1020 fd = exynos_v4l2_open(node_name, O_RDWR, 0);
1021 if (fd < 0) {
1022 ALOGE("DEBUG(%s): failed to open preview video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
1023 }
1024 else {
1025 ALOGV("DEBUG(%s): preview video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
1026 }
1027 m_fd_scp = fd;
1028 }
1029 AllocatedStream = (StreamThread*)(m_streamThreads[0].get());
b5237e6b
SK
1030 m_scp_flushing = false;
1031 m_scp_closing = false;
1032 m_scp_closed = false;
9dd63e1f
SK
1033 usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1034
1035 *format_actual = HAL_PIXEL_FORMAT_YV12;
6bbb593a 1036 *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
9dd63e1f
SK
1037 *max_buffers = 8;
1038
1039 newParameters.streamType = 0;
1040 newParameters.outputWidth = width;
1041 newParameters.outputHeight = height;
1042 newParameters.nodeWidth = width;
1043 newParameters.nodeHeight = height;
1044 newParameters.outputFormat = *format_actual;
1045 newParameters.nodeFormat = HAL_PIXEL_FORMAT_2_V4L2_PIX(*format_actual);
1046 newParameters.streamOps = stream_ops;
1047 newParameters.usage = *usage;
be494d19
SK
1048 newParameters.numHwBuffers = 8;
1049 newParameters.numOwnSvcBuffers = *max_buffers;
9dd63e1f
SK
1050 newParameters.fd = m_fd_scp;
1051 newParameters.nodePlanes = 3;
1052 newParameters.svcPlanes = 3;
1053 newParameters.halBuftype = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1054 newParameters.memory = V4L2_MEMORY_DMABUF;
1055 newParameters.ionClient = m_ionCameraClient;
be494d19 1056 newParameters.numSvcBufsInHal = 0;
9dd63e1f
SK
1057 AllocatedStream->m_index = *stream_id;
1058 AllocatedStream->setParameter(&newParameters);
1059 AllocatedStream->m_activated = true;
13d8c7b4 1060
9dd63e1f
SK
1061 m_scp_flushing = false;
1062 m_scp_closing = false;
1063 m_scp_closed = false;
1064 m_requestManager->SetDefaultParameters(width);
1065 m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = width;
1066 return 0;
1067 }
1068 else if (allocCase == 1) {
1069 record_parameters_t recordParameters;
1070 StreamThread *parentStream;
1071 parentStream = (StreamThread*)(m_streamThreads[0].get());
1072 if (!parentStream) {
1073 return 1;
1074 // TODO
1075 }
1076 *stream_id = 2;
1077 usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1078
804236a7 1079 *format_actual = HAL_PIXEL_FORMAT_YCbCr_420_SP; // NV12M
6bbb593a 1080 *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
9dd63e1f
SK
1081 *max_buffers = 10;
1082
1083 recordParameters.outputWidth = width;
1084 recordParameters.outputHeight = height;
1085 recordParameters.outputFormat = *format_actual;
804236a7 1086 recordParameters.svcPlanes = NUM_PLANES(*format_actual);
9dd63e1f
SK
1087 recordParameters.streamOps = stream_ops;
1088 recordParameters.usage = *usage;
be494d19
SK
1089 recordParameters.numOwnSvcBuffers = *max_buffers;
1090 recordParameters.numSvcBufsInHal = 0;
9dd63e1f
SK
1091
1092 parentStream->setRecordingParameter(&recordParameters);
1093 m_scp_flushing = false;
1094 m_scp_closing = false;
1095 m_scp_closed = false;
1096 m_recordingEnabled = true;
1097 return 0;
1098 }
13d8c7b4 1099 }
9dd63e1f 1100 else if (format == HAL_PIXEL_FORMAT_BLOB
daa1fcd6 1101 && m_camera2->isSupportedJpegResolution(width, height)) {
13d8c7b4
SK
1102
1103 *stream_id = 1;
1104
1105 m_streamThreads[1] = new StreamThread(this, *stream_id);
1106 AllocatedStream = (StreamThread*)(m_streamThreads[1].get());
9dd63e1f 1107
13d8c7b4
SK
1108 fd = m_camera_info.capture.fd;
1109 usleep(100000); // TODO : guarantee the codes below will be run after readyToRunInternal()
1110
1111 *format_actual = HAL_PIXEL_FORMAT_BLOB;
1112
1113 *usage = GRALLOC_USAGE_SW_WRITE_OFTEN;
1114 *max_buffers = 8;
1115
1116 newParameters.streamType = 1;
1117 newParameters.outputWidth = width;
1118 newParameters.outputHeight = height;
9dd63e1f 1119
daa1fcd6
SK
1120 newParameters.nodeWidth = m_camera2->getSensorW();
1121 newParameters.nodeHeight = m_camera2->getSensorH();
9dd63e1f 1122
13d8c7b4
SK
1123 newParameters.outputFormat = *format_actual;
1124 newParameters.nodeFormat = V4L2_PIX_FMT_YUYV;
1125 newParameters.streamOps = stream_ops;
1126 newParameters.usage = *usage;
be494d19
SK
1127 newParameters.numHwBuffers = 8;
1128 newParameters.numOwnSvcBuffers = *max_buffers;
13d8c7b4
SK
1129 newParameters.fd = fd;
1130 newParameters.nodePlanes = 1;
1131 newParameters.svcPlanes = 1;
1132 newParameters.halBuftype = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
1133 newParameters.memory = V4L2_MEMORY_DMABUF;
1134 newParameters.ionClient = m_ionCameraClient;
be494d19 1135 newParameters.numSvcBufsInHal = 0;
9dd63e1f 1136 AllocatedStream->m_index = *stream_id;
13d8c7b4
SK
1137 AllocatedStream->setParameter(&newParameters);
1138 return 0;
1139 }
1140 ALOGE("DEBUG(%s): Unsupported Pixel Format", __FUNCTION__);
1141 return 1; // TODO : check proper error code
c15a6b00
JS
1142}
1143
13d8c7b4
SK
1144int ExynosCameraHWInterface2::registerStreamBuffers(uint32_t stream_id,
1145 int num_buffers, buffer_handle_t *registeringBuffers)
c15a6b00 1146{
13d8c7b4
SK
1147 int i,j;
1148 void *virtAddr[3];
1149 uint32_t plane_index = 0;
1150 stream_parameters_t *targetStreamParms;
9dd63e1f 1151 record_parameters_t *targetRecordParms;
13d8c7b4
SK
1152 node_info_t *currentNode;
1153
c15a6b00
JS
1154 struct v4l2_buffer v4l2_buf;
1155 struct v4l2_plane planes[VIDEO_MAX_PLANES];
13d8c7b4
SK
1156
1157 ALOGV("DEBUG(%s): streamID (%d), num_buff(%d), handle(%x) ", __FUNCTION__,
1158 stream_id, num_buffers, (uint32_t)registeringBuffers);
1159
c15a6b00 1160 if (stream_id == 0) {
13d8c7b4
SK
1161 targetStreamParms = &(m_streamThreads[0]->m_parameters);
1162 }
1163 else if (stream_id == 1) {
1164 targetStreamParms = &(m_streamThreads[1]->m_parameters);
1165 }
9dd63e1f
SK
1166 else if (stream_id == 2) {
1167 targetRecordParms = &(m_streamThreads[0]->m_recordParameters);
1168
1169 targetRecordParms->numSvcBuffers = num_buffers;
1170
1171 for (i = 0 ; i<targetRecordParms->numSvcBuffers ; i++) {
1172 ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1173 i, (uint32_t)(registeringBuffers[i]));
1174 if (m_grallocHal) {
1175 if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1176 targetRecordParms->usage, 0, 0,
1177 targetRecordParms->outputWidth, targetRecordParms->outputHeight, virtAddr) != 0) {
1178 ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1179 }
1180 else {
1181 ExynosBuffer currentBuf;
1182 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
9dd63e1f 1183 currentBuf.fd.extFd[0] = priv_handle->fd;
6bbb593a
AR
1184 currentBuf.fd.extFd[1] = priv_handle->fd1;
1185 currentBuf.fd.extFd[2] = priv_handle->fd2;
9dd63e1f
SK
1186 for (plane_index=0 ; plane_index < targetRecordParms->svcPlanes ; plane_index++) {
1187 currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1188 ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x)",
804236a7 1189 __FUNCTION__, plane_index, currentBuf.fd.extFd[plane_index],
9dd63e1f
SK
1190 (unsigned int)currentBuf.virt.extP[plane_index]);
1191 }
1192 targetRecordParms->svcBufStatus[i] = ON_SERVICE;
b5237e6b
SK
1193 targetRecordParms->svcBuffers[i] = currentBuf;
1194 targetRecordParms->svcBufHandle[i] = registeringBuffers[i];
9dd63e1f
SK
1195 }
1196 }
1197 }
1198 m_needsRecordBufferInit = true;
1199 return 0;
1200 }
13d8c7b4
SK
1201 else {
1202 ALOGE("ERR(%s) unregisterd stream id (%d)", __FUNCTION__, stream_id);
be494d19 1203 return 1;
13d8c7b4 1204 }
c15a6b00 1205
be494d19 1206 if (targetStreamParms->streamType == 0) {
13d8c7b4
SK
1207 if (num_buffers < targetStreamParms->numHwBuffers) {
1208 ALOGE("ERR(%s) registering insufficient num of buffers (%d) < (%d)",
1209 __FUNCTION__, num_buffers, targetStreamParms->numHwBuffers);
be494d19 1210 return 1;
13d8c7b4
SK
1211 }
1212 }
1213 ALOGV("DEBUG(%s): format(%x) width(%d), height(%d) svcPlanes(%d)",
1214 __FUNCTION__, targetStreamParms->outputFormat, targetStreamParms->outputWidth,
1215 targetStreamParms->outputHeight, targetStreamParms->svcPlanes);
1216
1217 targetStreamParms->numSvcBuffers = num_buffers;
1218 currentNode = &(targetStreamParms->node); // TO Remove
1219
1220 currentNode->fd = targetStreamParms->fd;
1221 currentNode->width = targetStreamParms->nodeWidth;
1222 currentNode->height = targetStreamParms->nodeHeight;
1223 currentNode->format = targetStreamParms->nodeFormat;
1224 currentNode->planes = targetStreamParms->nodePlanes;
1225 currentNode->buffers = targetStreamParms->numHwBuffers;
1226 currentNode->type = targetStreamParms->halBuftype;
1227 currentNode->memory = targetStreamParms->memory;
1228 currentNode->ionClient = targetStreamParms->ionClient;
1229
1230 if (targetStreamParms->streamType == 0) {
1231 cam_int_s_input(currentNode, m_camera_info.sensor_id);
1232 cam_int_s_fmt(currentNode);
1233 cam_int_reqbufs(currentNode);
1234 }
1235 else if (targetStreamParms->streamType == 1) {
1236 for(i = 0; i < currentNode->buffers; i++){
1237 memcpy(&(currentNode->buffer[i]), &(m_camera_info.capture.buffer[i]), sizeof(ExynosBuffer));
1238 }
1239 }
1240
1241 for (i = 0 ; i<targetStreamParms->numSvcBuffers ; i++) {
1242 ALOGV("DEBUG(%s): registering Stream Buffers[%d] (%x) ", __FUNCTION__,
1243 i, (uint32_t)(registeringBuffers[i]));
1244 if (m_grallocHal) {
1245 if (m_grallocHal->lock(m_grallocHal, registeringBuffers[i],
1246 targetStreamParms->usage, 0, 0,
1247 currentNode->width, currentNode->height, virtAddr) != 0) {
1248 ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
1249 }
1250 else {
1251 v4l2_buf.m.planes = planes;
1252 v4l2_buf.type = currentNode->type;
1253 v4l2_buf.memory = currentNode->memory;
1254 v4l2_buf.index = i;
1255 v4l2_buf.length = currentNode->planes;
c15a6b00 1256
13d8c7b4
SK
1257 ExynosBuffer currentBuf;
1258 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(registeringBuffers[i]);
1259
1260 m_getAlignedYUVSize(currentNode->format,
1261 currentNode->width, currentNode->height, &currentBuf);
24231221 1262
9dd63e1f 1263 v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
6bbb593a
AR
1264 v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
1265 v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
9dd63e1f 1266 currentBuf.fd.extFd[0] = priv_handle->fd;
6bbb593a
AR
1267 currentBuf.fd.extFd[2] = priv_handle->fd1;
1268 currentBuf.fd.extFd[1] = priv_handle->fd2;
9dd63e1f
SK
1269 ALOGV("DEBUG(%s): ion_size(%d), stride(%d), ", __FUNCTION__,priv_handle->size, priv_handle->stride);
1270
1271
be494d19 1272 for (plane_index = 0 ; plane_index < v4l2_buf.length ; plane_index++) {
13d8c7b4
SK
1273 currentBuf.virt.extP[plane_index] = (char *)virtAddr[plane_index];
1274 v4l2_buf.m.planes[plane_index].length = currentBuf.size.extS[plane_index];
1275 ALOGV("DEBUG(%s): plane(%d): fd(%d) addr(%x), length(%d)",
1276 __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
1277 (unsigned int)currentBuf.virt.extP[plane_index],
1278 v4l2_buf.m.planes[plane_index].length);
1279 }
c15a6b00 1280
13d8c7b4
SK
1281 if (targetStreamParms->streamType == 0) {
1282 if (i < currentNode->buffers) {
1283 if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
9dd63e1f
SK
1284 ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail fd(%d)",
1285 __FUNCTION__, stream_id, currentNode->fd);
1286 //return false;
13d8c7b4 1287 }
9dd63e1f
SK
1288 ALOGV("DEBUG(%s): stream id(%d) exynos_v4l2_qbuf() success fd(%d)",
1289 __FUNCTION__, stream_id, currentNode->fd);
13d8c7b4
SK
1290 targetStreamParms->svcBufStatus[i] = REQUIRES_DQ_FROM_SVC;
1291 }
1292 else {
1293 targetStreamParms->svcBufStatus[i] = ON_SERVICE;
1294 }
1295 }
1296 else if (targetStreamParms->streamType == 1) {
1297 targetStreamParms->svcBufStatus[i] = ON_SERVICE;
c15a6b00 1298 }
13d8c7b4
SK
1299 targetStreamParms->svcBuffers[i] = currentBuf;
1300 targetStreamParms->svcBufHandle[i] = registeringBuffers[i];
1301 }
c15a6b00 1302 }
c15a6b00 1303 }
9dd63e1f
SK
1304 ALOGV("DEBUG(%s): calling streamon", __FUNCTION__);
1305 cam_int_streamon(&(targetStreamParms->node));
1306 ALOGV("DEBUG(%s): calling streamon END", __FUNCTION__);
13d8c7b4 1307 ALOGV("DEBUG(%s): END registerStreamBuffers", __FUNCTION__);
c15a6b00
JS
1308 return 0;
1309}
1310
1311int ExynosCameraHWInterface2::releaseStream(uint32_t stream_id)
1312{
13d8c7b4
SK
1313 StreamThread *targetStream;
1314 ALOGV("DEBUG(%s):", __FUNCTION__);
1315
b5237e6b 1316 if (stream_id == 0) {
13d8c7b4 1317 targetStream = (StreamThread*)(m_streamThreads[0].get());
9dd63e1f 1318 m_scp_flushing = true;
13d8c7b4 1319 }
b5237e6b 1320 else if (stream_id == 1) {
13d8c7b4
SK
1321 targetStream = (StreamThread*)(m_streamThreads[1].get());
1322 }
b5237e6b
SK
1323 else if (stream_id == 2 && m_recordingEnabled) {
1324 m_recordingEnabled = false;
1325 return 0;
1326 }
13d8c7b4
SK
1327 else {
1328 ALOGE("ERR:(%s): wrong stream id (%d)", __FUNCTION__, stream_id);
be494d19 1329 return 1;
13d8c7b4
SK
1330 }
1331
b5237e6b 1332 targetStream->m_releasing = true;
13d8c7b4 1333 targetStream->release();
b5237e6b
SK
1334 while (targetStream->m_releasing)
1335 usleep(2000);
9dd63e1f 1336 targetStream->m_activated = false;
13d8c7b4 1337 ALOGV("DEBUG(%s): DONE", __FUNCTION__);
c15a6b00
JS
1338 return 0;
1339}
1340
1341int ExynosCameraHWInterface2::allocateReprocessStream(
13d8c7b4
SK
1342 uint32_t width, uint32_t height, uint32_t format,
1343 const camera2_stream_in_ops_t *reprocess_stream_ops,
c15a6b00
JS
1344 uint32_t *stream_id, uint32_t *consumer_usage, uint32_t *max_buffers)
1345{
13d8c7b4 1346 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
1347 return 0;
1348}
1349
1350int ExynosCameraHWInterface2::releaseReprocessStream(uint32_t stream_id)
1351{
13d8c7b4 1352 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
1353 return 0;
1354}
1355
1356int ExynosCameraHWInterface2::triggerAction(uint32_t trigger_id, int ext1, int ext2)
1357{
13d8c7b4 1358 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
1359 return 0;
1360}
1361
1362int ExynosCameraHWInterface2::setNotifyCallback(camera2_notify_callback notify_cb, void *user)
1363{
13d8c7b4 1364 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
1365 m_notifyCb = notify_cb;
1366 m_callbackCookie = user;
1367 return 0;
1368}
1369
1370int ExynosCameraHWInterface2::getMetadataVendorTagOps(vendor_tag_query_ops_t **ops)
1371{
13d8c7b4 1372 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
1373 return 0;
1374}
1375
1376int ExynosCameraHWInterface2::dump(int fd)
1377{
13d8c7b4 1378 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
1379 return 0;
1380}
1381
13d8c7b4
SK
1382void ExynosCameraHWInterface2::m_getAlignedYUVSize(int colorFormat, int w, int h, ExynosBuffer *buf)
1383{
1384 switch (colorFormat) {
1385 // 1p
1386 case V4L2_PIX_FMT_RGB565 :
1387 case V4L2_PIX_FMT_YUYV :
1388 case V4L2_PIX_FMT_UYVY :
1389 case V4L2_PIX_FMT_VYUY :
1390 case V4L2_PIX_FMT_YVYU :
1391 buf->size.extS[0] = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(colorFormat), w, h);
1392 buf->size.extS[1] = 0;
1393 buf->size.extS[2] = 0;
1394 break;
1395 // 2p
1396 case V4L2_PIX_FMT_NV12 :
1397 case V4L2_PIX_FMT_NV12T :
1398 case V4L2_PIX_FMT_NV21 :
1399 buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
1400 buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 16);
1401 buf->size.extS[2] = 0;
1402 break;
1403 case V4L2_PIX_FMT_NV12M :
1404 case V4L2_PIX_FMT_NV12MT_16X16 :
9dd63e1f 1405 case V4L2_PIX_FMT_NV21M:
13d8c7b4
SK
1406 buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
1407 buf->size.extS[1] = ALIGN(buf->size.extS[0] / 2, 256);
1408 buf->size.extS[2] = 0;
1409 break;
1410 case V4L2_PIX_FMT_NV16 :
1411 case V4L2_PIX_FMT_NV61 :
1412 buf->size.extS[0] = ALIGN(w, 16) * ALIGN(h, 16);
1413 buf->size.extS[1] = ALIGN(w, 16) * ALIGN(h, 16);
1414 buf->size.extS[2] = 0;
1415 break;
1416 // 3p
1417 case V4L2_PIX_FMT_YUV420 :
1418 case V4L2_PIX_FMT_YVU420 :
1419 buf->size.extS[0] = (w * h);
1420 buf->size.extS[1] = (w * h) >> 2;
1421 buf->size.extS[2] = (w * h) >> 2;
1422 break;
1423 case V4L2_PIX_FMT_YUV420M:
1424 case V4L2_PIX_FMT_YVU420M :
1425 case V4L2_PIX_FMT_YUV422P :
1426 buf->size.extS[0] = ALIGN(w, 32) * ALIGN(h, 16);
1427 buf->size.extS[1] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
1428 buf->size.extS[2] = ALIGN(w/2, 16) * ALIGN(h/2, 8);
1429 break;
1430 default:
1431 ALOGE("ERR(%s):unmatched colorFormat(%d)", __FUNCTION__, colorFormat);
1432 return;
1433 break;
1434 }
1435}
c15a6b00 1436
13d8c7b4
SK
1437bool ExynosCameraHWInterface2::m_getRatioSize(int src_w, int src_h,
1438 int dst_w, int dst_h,
1439 int *crop_x, int *crop_y,
1440 int *crop_w, int *crop_h,
1441 int zoom)
c15a6b00 1442{
13d8c7b4
SK
1443 *crop_w = src_w;
1444 *crop_h = src_h;
1445
1446 if ( src_w != dst_w
1447 || src_h != dst_h) {
1448 float src_ratio = 1.0f;
1449 float dst_ratio = 1.0f;
1450
1451 // ex : 1024 / 768
1452 src_ratio = (float)src_w / (float)src_h;
1453
1454 // ex : 352 / 288
1455 dst_ratio = (float)dst_w / (float)dst_h;
1456
1457 if (dst_w * dst_h < src_w * src_h) {
1458 if (dst_ratio <= src_ratio) {
1459 // shrink w
1460 *crop_w = src_h * dst_ratio;
1461 *crop_h = src_h;
1462 } else {
1463 // shrink h
1464 *crop_w = src_w;
1465 *crop_h = src_w / dst_ratio;
c15a6b00 1466 }
13d8c7b4
SK
1467 } else {
1468 if (dst_ratio <= src_ratio) {
1469 // shrink w
1470 *crop_w = src_h * dst_ratio;
1471 *crop_h = src_h;
1472 } else {
1473 // shrink h
1474 *crop_w = src_w;
1475 *crop_h = src_w / dst_ratio;
c15a6b00
JS
1476 }
1477 }
c15a6b00
JS
1478 }
1479
13d8c7b4
SK
1480 if (zoom != 0) {
1481 float zoomLevel = ((float)zoom + 10.0) / 10.0;
1482 *crop_w = (int)((float)*crop_w / zoomLevel);
1483 *crop_h = (int)((float)*crop_h / zoomLevel);
1484 }
1485
1486 #define CAMERA_CROP_WIDTH_RESTRAIN_NUM (0x2)
1487 unsigned int w_align = (*crop_w & (CAMERA_CROP_WIDTH_RESTRAIN_NUM - 1));
1488 if (w_align != 0) {
1489 if ( (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1) <= w_align
1490 && *crop_w + (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align) <= dst_w) {
1491 *crop_w += (CAMERA_CROP_WIDTH_RESTRAIN_NUM - w_align);
1492 }
1493 else
1494 *crop_w -= w_align;
1495 }
1496
1497 #define CAMERA_CROP_HEIGHT_RESTRAIN_NUM (0x2)
1498 unsigned int h_align = (*crop_h & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - 1));
1499 if (h_align != 0) {
1500 if ( (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1) <= h_align
1501 && *crop_h + (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align) <= dst_h) {
1502 *crop_h += (CAMERA_CROP_HEIGHT_RESTRAIN_NUM - h_align);
1503 }
1504 else
1505 *crop_h -= h_align;
1506 }
1507
1508 *crop_x = (src_w - *crop_w) >> 1;
1509 *crop_y = (src_h - *crop_h) >> 1;
1510
1511 if (*crop_x & (CAMERA_CROP_WIDTH_RESTRAIN_NUM >> 1))
1512 *crop_x -= 1;
1513
1514 if (*crop_y & (CAMERA_CROP_HEIGHT_RESTRAIN_NUM >> 1))
1515 *crop_y -= 1;
1516
1517 return true;
1518}
1519
9dd63e1f 1520BayerBufManager::BayerBufManager()
13d8c7b4 1521{
9dd63e1f
SK
1522 ALOGV("DEBUG(%s): ", __FUNCTION__);
1523 for (int i = 0; i < NUM_BAYER_BUFFERS ; i++) {
1524 entries[i].status = BAYER_ON_HAL_EMPTY;
1525 entries[i].reqFrameCnt = 0;
13d8c7b4 1526 }
9dd63e1f
SK
1527 sensorEnqueueHead = 0;
1528 sensorDequeueHead = 0;
1529 ispEnqueueHead = 0;
1530 ispDequeueHead = 0;
1531 numOnSensor = 0;
1532 numOnIsp = 0;
1533 numOnHalFilled = 0;
1534 numOnHalEmpty = NUM_BAYER_BUFFERS;
13d8c7b4
SK
1535}
1536
9dd63e1f 1537int BayerBufManager::GetIndexForSensorEnqueue()
13d8c7b4 1538{
9dd63e1f
SK
1539 int ret = 0;
1540 if (numOnHalEmpty == 0)
1541 ret = -1;
1542 else
1543 ret = sensorEnqueueHead;
1544 ALOGV("DEBUG(%s): returning (%d)", __FUNCTION__, ret);
1545 return ret;
13d8c7b4
SK
1546}
1547
9dd63e1f 1548int BayerBufManager::MarkSensorEnqueue(int index)
13d8c7b4 1549{
9dd63e1f
SK
1550 ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index);
1551
1552 // sanity check
1553 if (index != sensorEnqueueHead) {
1554 ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorEnqueueHead);
1555 return -1;
1556 }
1557 if (entries[index].status != BAYER_ON_HAL_EMPTY) {
1558 ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
1559 index, entries[index].status, BAYER_ON_HAL_EMPTY);
1560 return -1;
13d8c7b4 1561 }
13d8c7b4 1562
9dd63e1f
SK
1563 entries[index].status = BAYER_ON_SENSOR;
1564 entries[index].reqFrameCnt = 0;
1565 numOnHalEmpty--;
1566 numOnSensor++;
1567 sensorEnqueueHead = GetNextIndex(index);
1568 ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
1569 __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
1570 return 0;
1571}
13d8c7b4 1572
9dd63e1f 1573int BayerBufManager::MarkSensorDequeue(int index, int reqFrameCnt, nsecs_t *timeStamp)
13d8c7b4 1574{
9dd63e1f
SK
1575 ALOGV("DEBUG(%s) : BayerIndex[%d] reqFrameCnt(%d)", __FUNCTION__, index, reqFrameCnt);
1576
1577 // sanity check
1578 if (index != sensorDequeueHead) {
1579 ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, sensorDequeueHead);
13d8c7b4
SK
1580 return -1;
1581 }
9dd63e1f
SK
1582 if (entries[index].status != BAYER_ON_SENSOR) {
1583 ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
1584 index, entries[index].status, BAYER_ON_SENSOR);
1585 return -1;
13d8c7b4 1586 }
13d8c7b4 1587
9dd63e1f
SK
1588 entries[index].status = BAYER_ON_HAL_FILLED;
1589 entries[index].reqFrameCnt = reqFrameCnt;
1590 entries[index].timeStamp = *timeStamp;
1591 numOnHalFilled++;
1592 numOnSensor--;
1593 sensorDequeueHead = GetNextIndex(index);
1594 ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
1595 __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
1596 return 0;
1597}
1598
1599int BayerBufManager::GetIndexForIspEnqueue(int *reqFrameCnt)
1600{
1601 int ret = 0;
1602 if (numOnHalFilled == 0)
1603 ret = -1;
1604 else {
1605 *reqFrameCnt = entries[ispEnqueueHead].reqFrameCnt;
1606 ret = ispEnqueueHead;
13d8c7b4 1607 }
9dd63e1f 1608 ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
13d8c7b4
SK
1609 return ret;
1610}
1611
9dd63e1f
SK
1612int BayerBufManager::GetIndexForIspDequeue(int *reqFrameCnt)
1613{
1614 int ret = 0;
1615 if (numOnIsp == 0)
1616 ret = -1;
1617 else {
1618 *reqFrameCnt = entries[ispDequeueHead].reqFrameCnt;
1619 ret = ispDequeueHead;
1620 }
1621 ALOGV("DEBUG(%s): returning BayerIndex[%d]", __FUNCTION__, ret);
1622 return ret;
1623}
13d8c7b4 1624
9dd63e1f 1625int BayerBufManager::MarkIspEnqueue(int index)
13d8c7b4 1626{
9dd63e1f
SK
1627 ALOGV("DEBUG(%s) : BayerIndex[%d] ", __FUNCTION__, index);
1628
1629 // sanity check
1630 if (index != ispEnqueueHead) {
1631 ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispEnqueueHead);
13d8c7b4
SK
1632 return -1;
1633 }
9dd63e1f
SK
1634 if (entries[index].status != BAYER_ON_HAL_FILLED) {
1635 ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
1636 index, entries[index].status, BAYER_ON_HAL_FILLED);
1637 return -1;
13d8c7b4
SK
1638 }
1639
9dd63e1f
SK
1640 entries[index].status = BAYER_ON_ISP;
1641 numOnHalFilled--;
1642 numOnIsp++;
1643 ispEnqueueHead = GetNextIndex(index);
1644 ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
1645 __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
1646 return 0;
1647}
1648
1649int BayerBufManager::MarkIspDequeue(int index)
1650{
1651 ALOGV("DEBUG(%s) : BayerIndex[%d]", __FUNCTION__, index);
1652
1653 // sanity check
1654 if (index != ispDequeueHead) {
1655 ALOGV("DEBUG(%s) : Abnormal BayerIndex[%d] - expected[%d]", __FUNCTION__, index, ispDequeueHead);
1656 return -1;
13d8c7b4 1657 }
9dd63e1f
SK
1658 if (entries[index].status != BAYER_ON_ISP) {
1659 ALOGV("DEBUG(%s) : Abnormal status in BayerIndex[%d] = (%d) expected (%d)", __FUNCTION__,
1660 index, entries[index].status, BAYER_ON_ISP);
13d8c7b4
SK
1661 return -1;
1662 }
1663
9dd63e1f
SK
1664 entries[index].status = BAYER_ON_HAL_EMPTY;
1665 entries[index].reqFrameCnt = 0;
1666 numOnHalEmpty++;
1667 numOnIsp--;
1668 ispDequeueHead = GetNextIndex(index);
1669 ALOGV("DEBUG(%s) END: HAL-e(%d) HAL-f(%d) Sensor(%d) ISP(%d) ",
1670 __FUNCTION__, numOnHalEmpty, numOnHalFilled, numOnSensor, numOnIsp);
1671 return 0;
1672}
13d8c7b4 1673
9dd63e1f
SK
1674int BayerBufManager::GetNumOnSensor()
1675{
1676 return numOnSensor;
13d8c7b4
SK
1677}
1678
9dd63e1f 1679int BayerBufManager::GetNumOnHalFilled()
13d8c7b4 1680{
9dd63e1f
SK
1681 return numOnHalFilled;
1682}
1683
1684int BayerBufManager::GetNumOnIsp()
1685{
1686 return numOnIsp;
1687}
1688
1689int BayerBufManager::GetNextIndex(int index)
1690{
1691 index++;
1692 if (index >= NUM_BAYER_BUFFERS)
1693 index = 0;
1694
1695 return index;
1696}
1697
1698void ExynosCameraHWInterface2::m_mainThreadFunc(SignalDrivenThread * self)
1699{
1700 camera_metadata_t *currentRequest = NULL;
1701 camera_metadata_t *currentFrame = NULL;
1702 size_t numEntries = 0;
1703 size_t frameSize = 0;
1704 camera_metadata_t * preparedFrame = NULL;
13d8c7b4
SK
1705 camera_metadata_t *deregisteredRequest = NULL;
1706 uint32_t currentSignal = self->GetProcessingSignal();
1707 MainThread * selfThread = ((MainThread*)self);
1708 int res = 0;
1709
1710 ALOGV("DEBUG(%s): m_mainThreadFunc (%x)", __FUNCTION__, currentSignal);
1711
1712 if (currentSignal & SIGNAL_THREAD_RELEASE) {
1713 ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
1714
1715 ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
1716 selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
1717 return;
1718 }
1719
1720 if (currentSignal & SIGNAL_MAIN_REQ_Q_NOT_EMPTY) {
1721 ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_REQ_Q_NOT_EMPTY", __FUNCTION__);
1722 if (m_requestManager->IsRequestQueueFull()==false
1723 && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST) {
1724 m_requestQueueOps->dequeue_request(m_requestQueueOps, &currentRequest);
1725 if (NULL == currentRequest) {
1726 ALOGV("DEBUG(%s): dequeue_request returned NULL ", __FUNCTION__);
1727 m_isRequestQueueNull = true;
1728 }
1729 else {
1730 m_requestManager->RegisterRequest(currentRequest);
1731
1732 m_numOfRemainingReqInSvc = m_requestQueueOps->request_count(m_requestQueueOps);
1733 ALOGV("DEBUG(%s): remaining req cnt (%d)", __FUNCTION__, m_numOfRemainingReqInSvc);
1734 if (m_requestManager->IsRequestQueueFull()==false
1735 && m_requestManager->GetNumEntries()<NUM_MAX_DEQUEUED_REQUEST)
1736 selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY); // dequeue repeatedly
9dd63e1f 1737
13d8c7b4
SK
1738 m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
1739 }
c15a6b00
JS
1740 }
1741 else {
13d8c7b4
SK
1742 m_isRequestQueuePending = true;
1743 }
1744 }
1745
1746 if (currentSignal & SIGNAL_MAIN_STREAM_OUTPUT_DONE) {
1747 ALOGV("DEBUG(%s): MainThread processing SIGNAL_MAIN_STREAM_OUTPUT_DONE", __FUNCTION__);
1748 /*while (1)*/ {
13d8c7b4
SK
1749 m_requestManager->PrepareFrame(&numEntries, &frameSize, &preparedFrame);
1750 m_requestManager->DeregisterRequest(&deregisteredRequest);
1751 m_requestQueueOps->free_request(m_requestQueueOps, deregisteredRequest);
1752 m_frameQueueOps->dequeue_frame(m_frameQueueOps, numEntries, frameSize, &currentFrame);
1753 if (currentFrame==NULL) {
1754 ALOGD("DBG(%s): frame dequeue returned NULL",__FUNCTION__ );
1755 }
1756 else {
daa1fcd6 1757 ALOGV("DEBUG(%s): frame dequeue done. numEntries(%d) frameSize(%d)",__FUNCTION__ , numEntries, frameSize);
13d8c7b4
SK
1758 }
1759 res = append_camera_metadata(currentFrame, preparedFrame);
1760 if (res==0) {
1761 ALOGV("DEBUG(%s): frame metadata append success",__FUNCTION__);
1762 m_frameQueueOps->enqueue_frame(m_frameQueueOps, currentFrame);
1763 }
1764 else {
1765 ALOGE("ERR(%s): frame metadata append fail (%d)",__FUNCTION__, res);
1766 }
1767 }
1768 if (!m_isRequestQueueNull) {
1769 selfThread->SetSignal(SIGNAL_MAIN_REQ_Q_NOT_EMPTY);
1770 }
9dd63e1f 1771
13d8c7b4
SK
1772 if (getInProgressCount()>0) {
1773 ALOGV("DEBUG(%s): STREAM_OUTPUT_DONE and signalling REQ_PROCESSING",__FUNCTION__);
1774 m_sensorThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
c15a6b00 1775 }
c15a6b00 1776 }
13d8c7b4 1777 ALOGV("DEBUG(%s): MainThread Exit", __FUNCTION__);
c15a6b00
JS
1778 return;
1779}
13d8c7b4 1780
c15a6b00
JS
1781void ExynosCameraHWInterface2::m_sensorThreadInitialize(SignalDrivenThread * self)
1782{
13d8c7b4
SK
1783 ALOGV("DEBUG(%s): ", __FUNCTION__ );
1784 SensorThread * selfThread = ((SensorThread*)self);
c15a6b00
JS
1785 char node_name[30];
1786 int fd = 0;
1787 int i =0, j=0;
1788
9dd63e1f
SK
1789 if(m_cameraId == 0)
1790 m_camera_info.sensor_id = SENSOR_NAME_S5K4E5;
1791 else
1792 m_camera_info.sensor_id = SENSOR_NAME_S5K6A3;
1793
13d8c7b4
SK
1794 memset(&m_camera_info.dummy_shot, 0x00, sizeof(struct camera2_shot_ext));
1795 m_camera_info.dummy_shot.shot.ctl.request.metadataMode = METADATA_MODE_FULL;
1796 m_camera_info.dummy_shot.shot.magicNumber = 0x23456789;
1797
9dd63e1f
SK
1798 m_camera_info.dummy_shot.dis_bypass = 1;
1799 m_camera_info.dummy_shot.dnr_bypass = 1;
1800
13d8c7b4
SK
1801 /*sensor setting*/
1802 m_camera_info.dummy_shot.shot.ctl.sensor.exposureTime = 0;
1803 m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 0;
1804 m_camera_info.dummy_shot.shot.ctl.sensor.sensitivity = 0;
c15a6b00 1805
13d8c7b4
SK
1806 m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[0] = 0;
1807 m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[1] = 0;
9dd63e1f 1808 //m_camera_info.dummy_shot.shot.ctl.scaler.cropRegion[2] = 1920;
c15a6b00 1809
13d8c7b4
SK
1810 /*request setting*/
1811 m_camera_info.dummy_shot.request_sensor = 1;
1812 m_camera_info.dummy_shot.request_scc = 0;
1813 m_camera_info.dummy_shot.request_scp = 0;
9dd63e1f
SK
1814 m_camera_info.dummy_shot.shot.ctl.request.outputStreams[0] = 0;
1815 m_camera_info.dummy_shot.shot.ctl.request.outputStreams[1] = 0;
1816 m_camera_info.dummy_shot.shot.ctl.request.outputStreams[2] = 0;
13d8c7b4
SK
1817
1818 /*sensor init*/
c15a6b00
JS
1819 memset(&node_name, 0x00, sizeof(char[30]));
1820 sprintf(node_name, "%s%d", NODE_PREFIX, 40);
1821 fd = exynos_v4l2_open(node_name, O_RDWR, 0);
13d8c7b4 1822
c15a6b00 1823 if (fd < 0) {
13d8c7b4 1824 ALOGE("ERR(%s): failed to open sensor video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
c15a6b00
JS
1825 }
1826 else {
13d8c7b4 1827 ALOGV("DEBUG(%s): sensor video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
c15a6b00
JS
1828 }
1829 m_camera_info.sensor.fd = fd;
9dd63e1f 1830
daa1fcd6
SK
1831 m_camera_info.sensor.width = m_camera2->getSensorRawW();
1832 m_camera_info.sensor.height = m_camera2->getSensorRawH();
9dd63e1f 1833
c15a6b00
JS
1834 m_camera_info.sensor.format = V4L2_PIX_FMT_SBGGR16;
1835 m_camera_info.sensor.planes = 2;
13d8c7b4 1836 m_camera_info.sensor.buffers = NUM_BAYER_BUFFERS;
c15a6b00 1837 m_camera_info.sensor.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
24231221 1838 m_camera_info.sensor.memory = V4L2_MEMORY_DMABUF;
c15a6b00
JS
1839 m_camera_info.sensor.ionClient = m_ionCameraClient;
1840
1841 for(i = 0; i < m_camera_info.sensor.buffers; i++){
1842 initCameraMemory(&m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
13d8c7b4
SK
1843 m_camera_info.sensor.buffer[i].size.extS[0] = m_camera_info.sensor.width*m_camera_info.sensor.height*2;
1844 m_camera_info.sensor.buffer[i].size.extS[1] = 8*1024; // HACK, driver use 8*1024, should be use predefined value
c15a6b00
JS
1845 allocCameraMemory(m_camera_info.sensor.ionClient, &m_camera_info.sensor.buffer[i], m_camera_info.sensor.planes);
1846 }
1847
13d8c7b4
SK
1848 m_initFlag1 = true;
1849
13d8c7b4
SK
1850
1851 while (!m_initFlag2) // temp
1852 usleep(100000);
1853 ALOGV("DEBUG(%s): END of SensorThreadInitialize ", __FUNCTION__);
1854 return;
1855}
1856
1857
13d8c7b4
SK
1858void ExynosCameraHWInterface2::DumpInfoWithShot(struct camera2_shot_ext * shot_ext)
1859{
1860 ALOGV("#### common Section");
1861 ALOGV("#### magic(%x) ",
1862 shot_ext->shot.magicNumber);
1863 ALOGV("#### ctl Section");
b56dcc00 1864 ALOGV("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) AWB(%d)",
13d8c7b4 1865 shot_ext->shot.ctl.request.metadataMode,
b56dcc00 1866 shot_ext->shot.ctl.lens.aperture,
13d8c7b4
SK
1867 shot_ext->shot.ctl.sensor.exposureTime,
1868 shot_ext->shot.ctl.sensor.frameDuration,
b56dcc00
SK
1869 shot_ext->shot.ctl.sensor.sensitivity,
1870 shot_ext->shot.ctl.aa.awbMode);
13d8c7b4 1871
9dd63e1f
SK
1872 ALOGV("#### OutputStream Sensor(%d) SCP(%d) SCC(%d) pv(%d) rec(%d)",
1873 shot_ext->request_sensor, shot_ext->request_scp, shot_ext->request_scc,
1874 shot_ext->shot.ctl.request.outputStreams[0],
1875 shot_ext->shot.ctl.request.outputStreams[2]);
13d8c7b4
SK
1876
1877 ALOGV("#### DM Section");
b56dcc00 1878 ALOGV("#### meta(%d) aper(%f) exp(%lld) duration(%lld) ISO(%d) timestamp(%lld) AWB(%d) cnt(%d)",
13d8c7b4 1879 shot_ext->shot.dm.request.metadataMode,
b56dcc00 1880 shot_ext->shot.dm.lens.aperture,
13d8c7b4
SK
1881 shot_ext->shot.dm.sensor.exposureTime,
1882 shot_ext->shot.dm.sensor.frameDuration,
1883 shot_ext->shot.dm.sensor.sensitivity,
b56dcc00
SK
1884 shot_ext->shot.dm.sensor.timeStamp,
1885 shot_ext->shot.dm.aa.awbMode,
1886 shot_ext->shot.dm.request.frameCount );
13d8c7b4
SK
1887}
1888
1889void ExynosCameraHWInterface2::m_sensorThreadFunc(SignalDrivenThread * self)
1890{
1891 uint32_t currentSignal = self->GetProcessingSignal();
1892 SensorThread * selfThread = ((SensorThread*)self);
1893 int index;
1894 status_t res;
1895 nsecs_t frameTime;
1896 int bayersOnSensor = 0, bayersOnIsp = 0;
1897 ALOGV("DEBUG(%s): m_sensorThreadFunc (%x)", __FUNCTION__, currentSignal);
1898
1899 if (currentSignal & SIGNAL_THREAD_RELEASE) {
9dd63e1f 1900 ALOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
13d8c7b4 1901
9dd63e1f 1902 ALOGV("(%s): calling sensor streamoff", __FUNCTION__);
13d8c7b4 1903 cam_int_streamoff(&(m_camera_info.sensor));
9dd63e1f 1904 ALOGV("(%s): calling sensor streamoff done", __FUNCTION__);
b5237e6b
SK
1905
1906 m_camera_info.sensor.buffers = 0;
1907 ALOGV("DEBUG(%s): sensor calling reqbuf 0 ", __FUNCTION__);
1908 cam_int_reqbufs(&(m_camera_info.sensor));
1909 ALOGV("DEBUG(%s): sensor calling reqbuf 0 done", __FUNCTION__);
9dd63e1f
SK
1910
1911 ALOGV("(%s): calling ISP streamoff", __FUNCTION__);
1912 isp_int_streamoff(&(m_camera_info.isp));
1913 ALOGV("(%s): calling ISP streamoff done", __FUNCTION__);
1914
b5237e6b
SK
1915 m_camera_info.isp.buffers = 0;
1916 ALOGV("DEBUG(%s): isp calling reqbuf 0 ", __FUNCTION__);
1917 cam_int_reqbufs(&(m_camera_info.isp));
1918 ALOGV("DEBUG(%s): isp calling reqbuf 0 done", __FUNCTION__);
1919
13d8c7b4 1920 exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_DISABLE_STREAM);
13d8c7b4 1921
9dd63e1f 1922 ALOGD("(%s): EXIT processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
13d8c7b4
SK
1923 selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
1924 return;
1925 }
1926
1927 if (currentSignal & SIGNAL_SENSOR_START_REQ_PROCESSING)
1928 {
1929 ALOGV("DEBUG(%s): SensorThread processing SIGNAL_SENSOR_START_REQ_PROCESSING", __FUNCTION__);
9dd63e1f
SK
1930 int targetStreamIndex = 0, i=0;
1931 int matchedFrameCnt, processingReqIndex;
13d8c7b4
SK
1932 struct camera2_shot_ext *shot_ext;
1933 if (!m_isSensorStarted)
1934 {
1935 m_isSensorStarted = true;
b5237e6b 1936 ALOGD("(%s): calling preview streamon", __FUNCTION__);
13d8c7b4 1937 cam_int_streamon(&(m_streamThreads[0]->m_parameters.node));
b5237e6b 1938 ALOGD("(%s): calling isp streamon done", __FUNCTION__);
9dd63e1f
SK
1939 for (i = 0; i < m_camera_info.isp.buffers; i++) {
1940 ALOGV("DEBUG(%s): isp initial QBUF [%d]", __FUNCTION__, i);
1941 cam_int_qbuf(&(m_camera_info.isp), i);
1942 }
1943
1944 cam_int_streamon(&(m_camera_info.isp));
1945
1946 for (i = 0; i < m_camera_info.isp.buffers; i++) {
1947 ALOGV("DEBUG(%s): isp initial DQBUF [%d]", __FUNCTION__, i);
1948 cam_int_dqbuf(&(m_camera_info.isp));
1949 }
1950
13d8c7b4
SK
1951 ALOGV("DEBUG(%s): calling isp sctrl done", __FUNCTION__);
1952 exynos_v4l2_s_ctrl(m_camera_info.sensor.fd, V4L2_CID_IS_S_STREAM, IS_ENABLE_STREAM);
1953 ALOGV("DEBUG(%s): calling sensor sctrl done", __FUNCTION__);
1954
1955 }
13d8c7b4 1956
3f0357b8 1957 ALOGV("### Sensor DQBUF start");
13d8c7b4
SK
1958 index = cam_int_dqbuf(&(m_camera_info.sensor));
1959 frameTime = systemTime();
3f0357b8 1960 ALOGV("### Sensor DQBUF done BayerIndex(%d)", index);
9dd63e1f
SK
1961 shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
1962 matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
be494d19
SK
1963 ALOGV("### Matched(%d) last(%d), dqbuf timestamp(%lld)", matchedFrameCnt, lastFrameCnt
1964 , shot_ext->shot.dm.sensor.timeStamp);
a07cbd98
SK
1965 if (matchedFrameCnt != -1) {
1966 while (matchedFrameCnt == lastFrameCnt) {
1967 m_BayerManager->MarkSensorDequeue(index, -1, &frameTime);
3f0357b8 1968 ALOGV("### Sensor DQBUF start");
a07cbd98
SK
1969 index = cam_int_dqbuf(&(m_camera_info.sensor));
1970 frameTime = systemTime();
3f0357b8 1971 ALOGV("### Sensor DQBUF done BayerIndex(%d)", index);
a07cbd98
SK
1972 shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
1973 matchedFrameCnt = m_requestManager->FindFrameCnt(shot_ext);
3f0357b8 1974 ALOGV("### Matched(%d) last(%d)", matchedFrameCnt, lastFrameCnt);
a07cbd98
SK
1975 }
1976 lastFrameCnt = matchedFrameCnt;
9dd63e1f
SK
1977 m_scp_closing = false;
1978 m_scp_closed = false;
1979 }
9dd63e1f
SK
1980 m_BayerManager->MarkSensorDequeue(index, matchedFrameCnt, &frameTime);
1981
1982 m_requestManager->RegisterTimestamp(matchedFrameCnt, &frameTime);
386436c9 1983 ALOGV("### Sensor DQed BayerIndex[%d] passing to ISP. frameCnt(%d) timestamp(%lld)",
9dd63e1f
SK
1984 index, matchedFrameCnt, frameTime);
1985
1986 if (!(m_ispThread.get()))
1987 return;
13d8c7b4 1988
9dd63e1f 1989 m_ispThread->SetSignal(SIGNAL_ISP_START_BAYER_INPUT);
13d8c7b4 1990
9dd63e1f 1991 while (m_BayerManager->GetNumOnSensor() <= NUM_SENSOR_QBUF) {
13d8c7b4 1992
9dd63e1f 1993 index = m_BayerManager->GetIndexForSensorEnqueue();
13d8c7b4
SK
1994 if (index == -1) {
1995 ALOGE("ERR(%s) No free Bayer buffer", __FUNCTION__);
1996 break;
1997 }
1998 processingReqIndex = m_requestManager->MarkProcessingRequest(&(m_camera_info.sensor.buffer[index]));
1999
9dd63e1f 2000 shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
13d8c7b4 2001 if (processingReqIndex == -1) {
9dd63e1f 2002 ALOGV("DEBUG(%s) req underrun => inserting bubble to BayerIndex(%d)", __FUNCTION__, index);
13d8c7b4
SK
2003 memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(struct camera2_shot_ext));
2004 }
2005
9dd63e1f
SK
2006 m_BayerManager->MarkSensorEnqueue(index);
2007 if (m_scp_closing || m_scp_closed) {
2008 ALOGV("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
2009 shot_ext->request_scc = 0;
2010 shot_ext->request_scp = 0;
2011 shot_ext->request_sensor = 0;
2012 }
386436c9 2013 ALOGV("### Sensor QBUF start BayerIndex[%d]", index);
13d8c7b4 2014 cam_int_qbuf(&(m_camera_info.sensor), index);
386436c9 2015 ALOGV("### Sensor QBUF done");
13d8c7b4 2016 }
9dd63e1f 2017 if (!m_closing){
13d8c7b4 2018 selfThread->SetSignal(SIGNAL_SENSOR_START_REQ_PROCESSING);
13d8c7b4 2019 }
9dd63e1f
SK
2020 return;
2021 }
13d8c7b4
SK
2022 return;
2023}
2024
13d8c7b4
SK
2025void ExynosCameraHWInterface2::m_ispThreadInitialize(SignalDrivenThread * self)
2026{
2027 ALOGV("DEBUG(%s): ", __FUNCTION__ );
2028 IspThread * selfThread = ((IspThread*)self);
2029 char node_name[30];
2030 int fd = 0;
2031 int i =0, j=0;
2032
2033
2034 while (!m_initFlag1) //temp
2035 usleep(100000);
2036
2037 /*isp init*/
2038 memset(&node_name, 0x00, sizeof(char[30]));
2039 sprintf(node_name, "%s%d", NODE_PREFIX, 41);
2040 fd = exynos_v4l2_open(node_name, O_RDWR, 0);
2041
2042 if (fd < 0) {
2043 ALOGE("ERR(%s): failed to open isp video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
2044 }
2045 else {
2046 ALOGV("DEBUG(%s): isp video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
2047 }
2048 m_camera_info.isp.fd = fd;
2049
2050 m_camera_info.isp.width = m_camera_info.sensor.width;
2051 m_camera_info.isp.height = m_camera_info.sensor.height;
2052 m_camera_info.isp.format = m_camera_info.sensor.format;
2053 m_camera_info.isp.planes = m_camera_info.sensor.planes;
2054 m_camera_info.isp.buffers = m_camera_info.sensor.buffers;
2055 m_camera_info.isp.type = V4L2_BUF_TYPE_VIDEO_OUTPUT_MPLANE;
2056 m_camera_info.isp.memory = V4L2_MEMORY_DMABUF;
13d8c7b4
SK
2057
2058 for(i = 0; i < m_camera_info.isp.buffers; i++){
2059 initCameraMemory(&m_camera_info.isp.buffer[i], m_camera_info.isp.planes);
2060 m_camera_info.isp.buffer[i].size.extS[0] = m_camera_info.sensor.buffer[i].size.extS[0];
2061 m_camera_info.isp.buffer[i].size.extS[1] = m_camera_info.sensor.buffer[i].size.extS[1];
2062 m_camera_info.isp.buffer[i].fd.extFd[0] = m_camera_info.sensor.buffer[i].fd.extFd[0];
2063 m_camera_info.isp.buffer[i].fd.extFd[1] = m_camera_info.sensor.buffer[i].fd.extFd[1];
2064 m_camera_info.isp.buffer[i].virt.extP[0] = m_camera_info.sensor.buffer[i].virt.extP[0];
2065 m_camera_info.isp.buffer[i].virt.extP[1] = m_camera_info.sensor.buffer[i].virt.extP[1];
2066 };
2067
9dd63e1f
SK
2068 cam_int_s_input(&(m_camera_info.isp), m_camera_info.sensor_id);
2069 cam_int_s_fmt(&(m_camera_info.isp));
2070 ALOGV("DEBUG(%s): isp calling reqbuf", __FUNCTION__);
2071 cam_int_reqbufs(&(m_camera_info.isp));
2072 ALOGV("DEBUG(%s): isp calling querybuf", __FUNCTION__);
13d8c7b4 2073 ALOGV("DEBUG(%s): isp mem alloc done", __FUNCTION__);
9dd63e1f 2074
13d8c7b4
SK
2075 cam_int_s_input(&(m_camera_info.sensor), m_camera_info.sensor_id);
2076 ALOGV("DEBUG(%s): sensor s_input done", __FUNCTION__);
13d8c7b4
SK
2077 if (cam_int_s_fmt(&(m_camera_info.sensor))< 0) {
2078 ALOGE("ERR(%s): sensor s_fmt fail", __FUNCTION__);
2079 }
2080 ALOGV("DEBUG(%s): sensor s_fmt done", __FUNCTION__);
2081 cam_int_reqbufs(&(m_camera_info.sensor));
2082 ALOGV("DEBUG(%s): sensor reqbuf done", __FUNCTION__);
2083 for (i = 0; i < m_camera_info.sensor.buffers; i++) {
2084 ALOGV("DEBUG(%s): sensor initial QBUF [%d]", __FUNCTION__, i);
2085 memcpy( m_camera_info.sensor.buffer[i].virt.extP[1], &(m_camera_info.dummy_shot),
2086 sizeof(struct camera2_shot_ext));
2087 m_camera_info.dummy_shot.shot.ctl.sensor.frameDuration = 33*1000*1000; // apply from frame #1
2088
2089 cam_int_qbuf(&(m_camera_info.sensor), i);
9dd63e1f 2090 m_BayerManager->MarkSensorEnqueue(i);
c15a6b00 2091 }
9dd63e1f 2092 ALOGE("== stream_on :: m_camera_info.sensor");
13d8c7b4 2093 cam_int_streamon(&(m_camera_info.sensor));
c15a6b00 2094
c15a6b00
JS
2095
2096
13d8c7b4 2097/*capture init*/
c15a6b00
JS
2098 memset(&node_name, 0x00, sizeof(char[30]));
2099 sprintf(node_name, "%s%d", NODE_PREFIX, 42);
2100 fd = exynos_v4l2_open(node_name, O_RDWR, 0);
13d8c7b4 2101
c15a6b00 2102 if (fd < 0) {
13d8c7b4 2103 ALOGE("ERR(%s): failed to open capture video node (%s) fd (%d)", __FUNCTION__,node_name, fd);
c15a6b00
JS
2104 }
2105 else {
13d8c7b4 2106 ALOGV("DEBUG(%s): capture video node opened(%s) fd (%d)", __FUNCTION__,node_name, fd);
c15a6b00 2107 }
c15a6b00 2108 m_camera_info.capture.fd = fd;
9dd63e1f 2109
daa1fcd6
SK
2110 m_camera_info.capture.width = m_camera2->getSensorW();
2111 m_camera_info.capture.height = m_camera2->getSensorH();
c15a6b00
JS
2112 m_camera_info.capture.format = V4L2_PIX_FMT_YUYV;
2113 m_camera_info.capture.planes = 1;
2114 m_camera_info.capture.buffers = 8;
2115 m_camera_info.capture.type = V4L2_BUF_TYPE_VIDEO_CAPTURE_MPLANE;
24231221 2116 m_camera_info.capture.memory = V4L2_MEMORY_DMABUF;
c15a6b00
JS
2117 m_camera_info.capture.ionClient = m_ionCameraClient;
2118
2119 for(i = 0; i < m_camera_info.capture.buffers; i++){
2120 initCameraMemory(&m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
13d8c7b4 2121 m_camera_info.capture.buffer[i].size.extS[0] = m_camera_info.capture.width*m_camera_info.capture.height*2;
c15a6b00
JS
2122 allocCameraMemory(m_camera_info.capture.ionClient, &m_camera_info.capture.buffer[i], m_camera_info.capture.planes);
2123 }
2124
13d8c7b4
SK
2125 cam_int_s_input(&(m_camera_info.capture), m_camera_info.sensor_id);
2126 cam_int_s_fmt(&(m_camera_info.capture));
2127 ALOGV("DEBUG(%s): capture calling reqbuf", __FUNCTION__);
2128 cam_int_reqbufs(&(m_camera_info.capture));
2129 ALOGV("DEBUG(%s): capture calling querybuf", __FUNCTION__);
c15a6b00 2130
13d8c7b4
SK
2131 for (i = 0; i < m_camera_info.capture.buffers; i++) {
2132 ALOGV("DEBUG(%s): capture initial QBUF [%d]", __FUNCTION__, i);
2133 cam_int_qbuf(&(m_camera_info.capture), i);
c15a6b00 2134 }
9dd63e1f
SK
2135
2136 ALOGE("== stream_on :: m_camera_info.capture");
13d8c7b4 2137 cam_int_streamon(&(m_camera_info.capture));
c15a6b00 2138
13d8c7b4
SK
2139 m_initFlag2 = true;
2140 ALOGV("DEBUG(%s): END of IspThreadInitialize ", __FUNCTION__);
c15a6b00
JS
2141 return;
2142}
2143
13d8c7b4
SK
2144
2145void ExynosCameraHWInterface2::m_ispThreadFunc(SignalDrivenThread * self)
c15a6b00 2146{
13d8c7b4
SK
2147 uint32_t currentSignal = self->GetProcessingSignal();
2148 IspThread * selfThread = ((IspThread*)self);
c15a6b00 2149 int index;
13d8c7b4
SK
2150 status_t res;
2151 ALOGV("DEBUG(%s): m_ispThreadFunc (%x)", __FUNCTION__, currentSignal);
2152
2153 if (currentSignal & SIGNAL_THREAD_RELEASE) {
9dd63e1f 2154 ALOGD("(%s): ENTER processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
13d8c7b4 2155
9dd63e1f 2156 ALOGV("(%s): calling capture streamoff", __FUNCTION__);
13d8c7b4 2157 cam_int_streamoff(&(m_camera_info.capture));
9dd63e1f
SK
2158 ALOGV("(%s): calling capture streamoff done", __FUNCTION__);
2159
b5237e6b
SK
2160 m_camera_info.capture.buffers = 0;
2161 ALOGV("DEBUG(%s): capture calling reqbuf 0 ", __FUNCTION__);
2162 cam_int_reqbufs(&(m_camera_info.capture));
2163 ALOGV("DEBUG(%s): capture calling reqbuf 0 done", __FUNCTION__);
2164
9dd63e1f 2165 ALOGD("(%s): EXIT processing SIGNAL_THREAD_RELEASE ", __FUNCTION__);
13d8c7b4
SK
2166 selfThread->SetSignal(SIGNAL_THREAD_TERMINATE);
2167 return;
2168 }
9dd63e1f 2169
13d8c7b4 2170 if (currentSignal & SIGNAL_ISP_START_BAYER_INPUT)
c15a6b00 2171 {
13d8c7b4 2172 struct camera2_shot_ext *shot_ext;
9dd63e1f
SK
2173 int bayerIndexToEnqueue = 0;
2174 int processingFrameCnt = 0;
13d8c7b4 2175
13d8c7b4 2176 ALOGV("DEBUG(%s): IspThread processing SIGNAL_ISP_START_BAYER_INPUT", __FUNCTION__);
9dd63e1f
SK
2177
2178 bayerIndexToEnqueue = m_BayerManager->GetIndexForIspEnqueue(&processingFrameCnt);
2179 shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[bayerIndexToEnqueue].virt.extP[1]);
2180
2181 ALOGV("### isp QBUF start bayerIndex[%d] for frameCnt(%d)", bayerIndexToEnqueue, processingFrameCnt);
2182
2183 if (processingFrameCnt != -1) {
2184 ALOGV("### writing output stream info");
b56dcc00 2185 m_requestManager->UpdateIspParameters(shot_ext, processingFrameCnt);
9dd63e1f
SK
2186 }
2187 else {
2188 memcpy(shot_ext, &(m_camera_info.dummy_shot), sizeof(struct camera2_shot_ext));
c15a6b00 2189 }
13d8c7b4
SK
2190 if (m_scp_flushing) {
2191 shot_ext->request_scp = 1;
2192 }
9dd63e1f
SK
2193 if (m_scp_closing || m_scp_closed) {
2194 ALOGV("(%s): SCP_CLOSING(%d) SCP_CLOSED(%d)", __FUNCTION__, m_scp_closing, m_scp_closed);
2195 shot_ext->request_scc = 0;
2196 shot_ext->request_scp = 0;
2197 shot_ext->request_sensor = 0;
c15a6b00 2198 }
9dd63e1f
SK
2199 cam_int_qbuf(&(m_camera_info.isp), bayerIndexToEnqueue);
2200 ALOGV("### isp QBUF done bayerIndex[%d] scp(%d)", bayerIndexToEnqueue, shot_ext->request_scp);
2201 m_BayerManager->MarkIspEnqueue(bayerIndexToEnqueue);
13d8c7b4 2202
9dd63e1f
SK
2203 if (m_BayerManager->GetNumOnHalFilled() != 0) {
2204 // input has priority
2205 selfThread->SetSignal(SIGNAL_ISP_START_BAYER_INPUT);
2206 return;
13d8c7b4 2207 }
9dd63e1f
SK
2208 else {
2209 selfThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
13d8c7b4 2210 }
9dd63e1f 2211 }
13d8c7b4 2212
9dd63e1f
SK
2213 if (currentSignal & SIGNAL_ISP_START_BAYER_DEQUEUE)
2214 {
2215 struct camera2_shot_ext *shot_ext;
2216 int bayerIndexToDequeue = 0;
2217 int processingFrameCnt = 0;
2218 ALOGV("DEBUG(%s): IspThread processing SIGNAL_ISP_START_BAYER_DEQUEUE", __FUNCTION__);
9dd63e1f
SK
2219 bayerIndexToDequeue = m_BayerManager->GetIndexForIspDequeue(&processingFrameCnt);
2220 m_ispProcessingFrameCnt = processingFrameCnt;
2221 m_previewOutput = 0;
2222 m_recordOutput = 0;
2223 shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[bayerIndexToDequeue].virt.extP[1]);
2224 if (processingFrameCnt != -1 || m_scp_flushing) // bubble
2225 {
2226 if (shot_ext->request_scc) {
2227 m_streamThreads[1]->SetSignal(SIGNAL_STREAM_DATA_COMING);
13d8c7b4 2228 }
9dd63e1f
SK
2229 m_previewOutput = shot_ext->shot.ctl.request.outputStreams[0];
2230 m_recordOutput = shot_ext->shot.ctl.request.outputStreams[2];
2231 if (m_previewOutput || m_recordOutput) {
2232 m_streamThreads[0]->SetSignal(SIGNAL_STREAM_DATA_COMING);
2233 }
13d8c7b4 2234 }
3f0357b8 2235 ALOGV("### isp DQBUF start");
13d8c7b4 2236 index = cam_int_dqbuf(&(m_camera_info.isp));
3f0357b8 2237 ALOGV("### isp DQBUF done bayerIndex(%d) for frameCnt(%d)", index, processingFrameCnt);
9dd63e1f
SK
2238 shot_ext = (struct camera2_shot_ext *)(m_camera_info.sensor.buffer[index].virt.extP[1]);
2239 ALOGV("(%s): SCP_CLOSING check sensor(%d) scc(%d) scp(%d) ", __FUNCTION__,
2240 shot_ext->request_sensor, shot_ext->request_scc, shot_ext->request_scp);
2241 if (shot_ext->request_scc + shot_ext->request_scp + shot_ext->request_sensor == 0) {
2242 ALOGV("(%s): SCP_CLOSING check OK ", __FUNCTION__);
2243 m_scp_closed = true;
2244 }
2245 else
2246 m_scp_closed = false;
2247 if (processingFrameCnt != -1) {
9dd63e1f
SK
2248 m_requestManager->ApplyDynamicMetadata(shot_ext, processingFrameCnt);
2249 }
2250 m_BayerManager->MarkIspDequeue(index);
2251 if (m_BayerManager->GetNumOnIsp() != 0) {
2252 selfThread->SetSignal(SIGNAL_ISP_START_BAYER_DEQUEUE);
13d8c7b4 2253 }
c15a6b00 2254 }
9dd63e1f 2255
c15a6b00
JS
2256 return;
2257}
2258
2259void ExynosCameraHWInterface2::m_streamThreadInitialize(SignalDrivenThread * self)
2260{
13d8c7b4
SK
2261 StreamThread * selfThread = ((StreamThread*)self);
2262 ALOGV("DEBUG(%s): ", __FUNCTION__ );
2263 memset(&(selfThread->m_parameters), 0, sizeof(stream_parameters_t));
2264 selfThread->m_isBufferInit = false;
2265
c15a6b00
JS
2266 return;
2267}
2268
c15a6b00
JS
2269void ExynosCameraHWInterface2::m_streamThreadFunc(SignalDrivenThread * self)
2270{
13d8c7b4
SK
2271 uint32_t currentSignal = self->GetProcessingSignal();
2272 StreamThread * selfThread = ((StreamThread*)self);
2273 stream_parameters_t *selfStreamParms = &(selfThread->m_parameters);
9dd63e1f 2274 record_parameters_t *selfRecordParms = &(selfThread->m_recordParameters);
13d8c7b4 2275 node_info_t *currentNode = &(selfStreamParms->node);
c15a6b00 2276
13d8c7b4 2277 ALOGV("DEBUG(%s): m_streamThreadFunc[%d] (%x)", __FUNCTION__, selfThread->m_index, currentSignal);
c15a6b00 2278
13d8c7b4
SK
2279 if (currentSignal & SIGNAL_STREAM_CHANGE_PARAMETER) {
2280 ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER", __FUNCTION__);
2281 selfThread->applyChange();
2282 if (selfStreamParms->streamType==1) {
2283 m_resizeBuf.size.extS[0] = ALIGN(selfStreamParms->outputWidth, 16) * ALIGN(selfStreamParms->outputHeight, 16) * 2;
2284 m_resizeBuf.size.extS[1] = 0;
2285 m_resizeBuf.size.extS[2] = 0;
c15a6b00 2286
13d8c7b4
SK
2287 if (allocCameraMemory(selfStreamParms->ionClient, &m_resizeBuf, 1) == -1) {
2288 ALOGE("ERR(%s): Failed to allocate resize buf", __FUNCTION__);
2289 }
2290 }
2291 ALOGV("DEBUG(%s): processing SIGNAL_STREAM_CHANGE_PARAMETER DONE", __FUNCTION__);
c15a6b00 2292 }
13d8c7b4
SK
2293
2294 if (currentSignal & SIGNAL_THREAD_RELEASE) {
9dd63e1f 2295 int i, index = -1, cnt_to_dq = 0;
13d8c7b4
SK
2296 status_t res;
2297 ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE", __FUNCTION__);
2298
2299
2300
2301 if (selfThread->m_isBufferInit) {
2302 for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
2303 ALOGV("DEBUG(%s): checking buffer index[%d] - status(%d)",
2304 __FUNCTION__, i, selfStreamParms->svcBufStatus[i]);
2305 if (selfStreamParms->svcBufStatus[i] ==ON_DRIVER) cnt_to_dq++;
2306 }
b5237e6b 2307
9dd63e1f 2308 m_scp_closing = true;
13d8c7b4
SK
2309 ALOGV("DEBUG(%s): calling stream(%d) streamoff (fd:%d)", __FUNCTION__,
2310 selfThread->m_index, selfStreamParms->fd);
2311 cam_int_streamoff(&(selfStreamParms->node));
2312 ALOGV("DEBUG(%s): calling stream(%d) streamoff done", __FUNCTION__, selfThread->m_index);
b5237e6b
SK
2313 if (selfStreamParms->streamType == 0) {
2314 ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 (fd:%d)", __FUNCTION__,
2315 selfThread->m_index, selfStreamParms->fd);
2316 currentNode->buffers = 0;
2317 cam_int_reqbufs(currentNode);
2318 ALOGV("DEBUG(%s): calling stream(%d) reqbuf 0 DONE(fd:%d)", __FUNCTION__,
2319 selfThread->m_index, selfStreamParms->fd);
13d8c7b4 2320 }
b5237e6b 2321 selfThread->m_releasing = false;
13d8c7b4 2322 }
b5237e6b 2323 if (selfThread->m_index == 1 && m_resizeBuf.size.s != 0) {
9dd63e1f 2324 freeCameraMemory(&m_resizeBuf, 1);
13d8c7b4 2325 }
9dd63e1f 2326 selfThread->m_isBufferInit = false;
13d8c7b4 2327 selfThread->m_index = 255;
9dd63e1f 2328
13d8c7b4 2329 ALOGV("DEBUG(%s): processing SIGNAL_THREAD_RELEASE DONE", __FUNCTION__);
9dd63e1f 2330
13d8c7b4
SK
2331 return;
2332 }
2333
2334 if (currentSignal & SIGNAL_STREAM_DATA_COMING) {
c15a6b00
JS
2335 buffer_handle_t * buf = NULL;
2336 status_t res;
2337 void *virtAddr[3];
2338 int i, j;
2339 int index;
13d8c7b4
SK
2340 ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING",
2341 __FUNCTION__,selfThread->m_index);
2342 if (!(selfThread->m_isBufferInit)) {
2343 for ( i=0 ; i < selfStreamParms->numSvcBuffers; i++) {
2344 res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
c15a6b00 2345 if (res != NO_ERROR || buf == NULL) {
13d8c7b4 2346 ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
c15a6b00
JS
2347 return;
2348 }
13d8c7b4 2349 ALOGV("DEBUG(%s): got buf(%x) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
c15a6b00 2350 ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
13d8c7b4 2351
c15a6b00 2352 if (m_grallocHal->lock(m_grallocHal, *buf,
13d8c7b4
SK
2353 selfStreamParms->usage,
2354 0, 0, selfStreamParms->outputWidth, selfStreamParms->outputHeight, virtAddr) != 0) {
2355 ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2356 return;
c15a6b00 2357 }
13d8c7b4
SK
2358 ALOGV("DEBUG(%s): locked img buf plane0(%x) plane1(%x) plane2(%x)",
2359 __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
2360
2361 index = selfThread->findBufferIndex(virtAddr[0]);
2362 if (index == -1) {
2363 ALOGE("ERR(%s): could not find buffer index", __FUNCTION__);
c15a6b00
JS
2364 }
2365 else {
13d8c7b4
SK
2366 ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
2367 __FUNCTION__, index, selfStreamParms->svcBufStatus[index]);
2368 if (selfStreamParms->svcBufStatus[index]== REQUIRES_DQ_FROM_SVC)
2369 selfStreamParms->svcBufStatus[index] = ON_DRIVER;
2370 else if (selfStreamParms->svcBufStatus[index]== ON_SERVICE)
2371 selfStreamParms->svcBufStatus[index] = ON_HAL;
2372 else {
9dd63e1f 2373 ALOGV("DBG(%s): buffer status abnormal (%d) "
13d8c7b4
SK
2374 , __FUNCTION__, selfStreamParms->svcBufStatus[index]);
2375 }
be494d19 2376 selfStreamParms->numSvcBufsInHal++;
13d8c7b4 2377 if (*buf != selfStreamParms->svcBufHandle[index])
9dd63e1f 2378 ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
13d8c7b4
SK
2379 else
2380 ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
c15a6b00 2381 }
be494d19 2382 selfStreamParms->svcBufIndex = 0;
c15a6b00 2383 }
13d8c7b4 2384 selfThread->m_isBufferInit = true;
c15a6b00 2385 }
9dd63e1f
SK
2386
2387 if (m_recordingEnabled && m_needsRecordBufferInit) {
2388 ALOGV("DEBUG(%s): Recording Buffer Initialization numsvcbuf(%d)",
2389 __FUNCTION__, selfRecordParms->numSvcBuffers);
9dd63e1f
SK
2390 int checkingIndex = 0;
2391 bool found = false;
2392 for ( i=0 ; i < selfRecordParms->numSvcBuffers; i++) {
2393 res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
2394 if (res != NO_ERROR || buf == NULL) {
2395 ALOGE("ERR(%s): Init: unable to dequeue buffer : %d",__FUNCTION__ , res);
2396 return;
2397 }
be494d19 2398 selfRecordParms->numSvcBufsInHal++;
9dd63e1f 2399 ALOGV("DEBUG(%s): [record] got buf(%x) bufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
be494d19 2400 selfRecordParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
9dd63e1f
SK
2401
2402 if (m_grallocHal->lock(m_grallocHal, *buf,
2403 selfRecordParms->usage, 0, 0,
2404 selfRecordParms->outputWidth, selfRecordParms->outputHeight, virtAddr) != 0) {
2405 ALOGE("ERR(%s): could not obtain gralloc buffer", __FUNCTION__);
2406 }
2407 else {
2408 ALOGV("DEBUG(%s): [record] locked img buf plane0(%x) plane1(%x) plane2(%x)",
2409 __FUNCTION__, (unsigned int)virtAddr[0], (unsigned int)virtAddr[1], (unsigned int)virtAddr[2]);
2410
2411 }
2412 found = false;
2413 for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
9dd63e1f
SK
2414 if (selfRecordParms->svcBufHandle[checkingIndex] == *buf ) {
2415 found = true;
2416 break;
2417 }
2418 }
2419 ALOGV("DEBUG(%s): [record] found(%d) - index[%d]", __FUNCTION__, found, checkingIndex);
2420 if (!found) break;
2421 index = checkingIndex;
2422
2423
2424 if (index == -1) {
b5237e6b 2425 ALOGD("ERR(%s): could not find buffer index", __FUNCTION__);
9dd63e1f
SK
2426 }
2427 else {
2428 ALOGV("DEBUG(%s): found buffer index[%d] - status(%d)",
2429 __FUNCTION__, index, selfRecordParms->svcBufStatus[index]);
2430 if (selfRecordParms->svcBufStatus[index]== ON_SERVICE)
2431 selfRecordParms->svcBufStatus[index] = ON_HAL;
2432 else {
2433 ALOGV("DBG(%s): buffer status abnormal (%d) "
2434 , __FUNCTION__, selfRecordParms->svcBufStatus[index]);
2435 }
2436 if (*buf != selfRecordParms->svcBufHandle[index])
2437 ALOGV("DBG(%s): different buf_handle index ", __FUNCTION__);
2438 else
2439 ALOGV("DEBUG(%s): same buf_handle index", __FUNCTION__);
2440 }
be494d19 2441 selfRecordParms->svcBufIndex = 0;
9dd63e1f
SK
2442 }
2443 m_needsRecordBufferInit = false;
2444 }
c15a6b00 2445
b5237e6b
SK
2446 do {
2447 if (selfStreamParms->streamType == 0) {
2448 ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
2449 selfThread->m_index, selfStreamParms->streamType);
c15a6b00 2450
b5237e6b
SK
2451 index = cam_int_dqbuf(&(selfStreamParms->node));
2452 ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
2453 selfThread->m_index, selfStreamParms->streamType, index);
9dd63e1f 2454
9dd63e1f 2455
b5237e6b
SK
2456 if (selfStreamParms->svcBufStatus[index] != ON_DRIVER)
2457 ALOGD("DBG(%s): DQed buffer status abnormal (%d) ",
2458 __FUNCTION__, selfStreamParms->svcBufStatus[index]);
2459 selfStreamParms->svcBufStatus[index] = ON_HAL;
9dd63e1f 2460
b5237e6b 2461 if (m_recordOutput && m_recordingEnabled) {
be494d19 2462 ALOGV("DEBUG(%s): Entering record frame creator, index(%d)",__FUNCTION__, selfRecordParms->svcBufIndex);
b5237e6b
SK
2463 bool found = false;
2464 for (int i = 0 ; selfRecordParms->numSvcBuffers ; i++) {
be494d19 2465 if (selfRecordParms->svcBufStatus[selfRecordParms->svcBufIndex] == ON_HAL) {
b5237e6b
SK
2466 found = true;
2467 break;
2468 }
be494d19
SK
2469 selfRecordParms->svcBufIndex++;
2470 if (selfRecordParms->svcBufIndex >= selfRecordParms->numSvcBuffers)
2471 selfRecordParms->svcBufIndex = 0;
b5237e6b
SK
2472 }
2473 if (!found) {
2474 ALOGE("(%s): cannot find free recording buffer", __FUNCTION__);
be494d19 2475 selfRecordParms->svcBufIndex++;
b5237e6b
SK
2476 break;
2477 }
9dd63e1f 2478
b5237e6b
SK
2479 if (m_exynosVideoCSC) {
2480 int videoW = selfRecordParms->outputWidth, videoH = selfRecordParms->outputHeight;
2481 int cropX, cropY, cropW, cropH = 0;
2482 int previewW = selfStreamParms->outputWidth, previewH = selfStreamParms->outputHeight;
b5237e6b 2483 m_getRatioSize(previewW, previewH,
804236a7 2484 videoW, videoH,
b5237e6b
SK
2485 &cropX, &cropY,
2486 &cropW, &cropH,
9dd63e1f
SK
2487 0);
2488
b5237e6b
SK
2489 ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
2490 __FUNCTION__, cropX, cropY, cropW, cropH);
2491
b5237e6b 2492 csc_set_src_format(m_exynosVideoCSC,
b5237e6b
SK
2493 previewW, previewH,
2494 cropX, cropY, cropW, cropH,
2495 HAL_PIXEL_FORMAT_YV12,
2496 0);
2497
2498 csc_set_dst_format(m_exynosVideoCSC,
804236a7 2499 videoW, videoH,
b5237e6b 2500 0, 0, videoW, videoH,
be494d19 2501 selfRecordParms->outputFormat,
b5237e6b
SK
2502 1);
2503
b5237e6b
SK
2504 csc_set_src_buffer(m_exynosVideoCSC,
2505 (void **)(&(selfStreamParms->svcBuffers[index].fd.fd)));
804236a7 2506
b5237e6b 2507 csc_set_dst_buffer(m_exynosVideoCSC,
804236a7 2508 (void **)(&(selfRecordParms->svcBuffers[selfRecordParms->svcBufIndex].fd.fd)));
b5237e6b
SK
2509
2510 if (csc_convert(m_exynosVideoCSC) != 0) {
2511 ALOGE("ERR(%s):csc_convert() fail", __FUNCTION__);
2512 }
2513 else {
be494d19 2514 ALOGV("(%s):csc_convert() SUCCESS", __FUNCTION__);
b5237e6b 2515 }
9dd63e1f
SK
2516 }
2517 else {
b5237e6b 2518 ALOGE("ERR(%s):m_exynosVideoCSC == NULL", __FUNCTION__);
9dd63e1f 2519 }
b5237e6b 2520
b5237e6b 2521 res = selfRecordParms->streamOps->enqueue_buffer(selfRecordParms->streamOps,
be494d19
SK
2522 m_requestManager->GetTimestamp(m_ispProcessingFrameCnt),
2523 &(selfRecordParms->svcBufHandle[selfRecordParms->svcBufIndex]));
b5237e6b
SK
2524 ALOGV("DEBUG(%s): stream(%d) record enqueue_buffer to svc done res(%d)", __FUNCTION__,
2525 selfThread->m_index, res);
2526 if (res == 0) {
be494d19
SK
2527 selfRecordParms->svcBufStatus[selfRecordParms->svcBufIndex] = ON_SERVICE;
2528 selfRecordParms->numSvcBufsInHal--;
b5237e6b 2529 }
be494d19 2530
b5237e6b
SK
2531 m_requestManager->NotifyStreamOutput(m_ispProcessingFrameCnt, 2);
2532
2533 }
2534 if (m_previewOutput) {
2535 res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
2536 m_requestManager->GetTimestamp(m_ispProcessingFrameCnt), &(selfStreamParms->svcBufHandle[index]));
2537 ALOGV("DEBUG(%s): stream(%d) enqueue_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
2538 }
9dd63e1f 2539 else {
b5237e6b
SK
2540 res = selfStreamParms->streamOps->cancel_buffer(selfStreamParms->streamOps,
2541 &(selfStreamParms->svcBufHandle[index]));
2542 ALOGV("DEBUG(%s): stream(%d) cancel_buffer to svc done res(%d)", __FUNCTION__, selfThread->m_index, res);
9dd63e1f 2543 }
9dd63e1f 2544 if (res == 0) {
b5237e6b 2545 selfStreamParms->svcBufStatus[index] = ON_SERVICE;
be494d19 2546 selfStreamParms->numSvcBufsInHal--;
9dd63e1f 2547 }
b5237e6b
SK
2548 else {
2549 selfStreamParms->svcBufStatus[index] = ON_HAL;
2550 }
2551 m_requestManager->NotifyStreamOutput(m_ispProcessingFrameCnt, selfThread->m_index);
13d8c7b4 2552 }
b5237e6b
SK
2553 else if (selfStreamParms->streamType == 1) {
2554 ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF START ",__FUNCTION__,
2555 selfThread->m_index, selfStreamParms->streamType);
2556 index = cam_int_dqbuf(&(selfStreamParms->node));
2557 ALOGV("DEBUG(%s): stream(%d) type(%d) DQBUF done index(%d)",__FUNCTION__,
2558 selfThread->m_index, selfStreamParms->streamType, index);
13d8c7b4 2559
b5237e6b 2560 m_jpegEncodingFrameCnt = m_ispProcessingFrameCnt;
13d8c7b4 2561
b5237e6b
SK
2562 bool ret = false;
2563 int pictureW, pictureH, pictureFramesize = 0;
2564 int pictureFormat;
2565 int cropX, cropY, cropW, cropH = 0;
13d8c7b4 2566
13d8c7b4 2567
b5237e6b 2568 ExynosBuffer jpegBuf, resizeBufInfo;
13d8c7b4 2569
b5237e6b 2570 ExynosRect m_orgPictureRect;
13d8c7b4 2571
b5237e6b
SK
2572 m_orgPictureRect.w = selfStreamParms->outputWidth;
2573 m_orgPictureRect.h = selfStreamParms->outputHeight;
13d8c7b4 2574
b5237e6b 2575 ExynosBuffer* m_pictureBuf = &(m_camera_info.capture.buffer[index]);
13d8c7b4 2576
daa1fcd6
SK
2577 pictureW = selfStreamParms->nodeWidth;
2578 pictureH = selfStreamParms->nodeHeight;
b5237e6b
SK
2579 pictureFormat = V4L2_PIX_FMT_YUYV;
2580 pictureFramesize = FRAME_SIZE(V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat), pictureW, pictureH);
13d8c7b4 2581
b5237e6b
SK
2582 if (m_exynosPictureCSC) {
2583 m_getRatioSize(pictureW, pictureH,
2584 m_orgPictureRect.w, m_orgPictureRect.h,
2585 &cropX, &cropY,
2586 &cropW, &cropH,
2587 0);
13d8c7b4 2588
b5237e6b
SK
2589 ALOGV("DEBUG(%s):cropX = %d, cropY = %d, cropW = %d, cropH = %d",
2590 __FUNCTION__, cropX, cropY, cropW, cropH);
13d8c7b4 2591
b5237e6b
SK
2592 csc_set_src_format(m_exynosPictureCSC,
2593 ALIGN(pictureW, 16), ALIGN(pictureH, 16),
2594 cropX, cropY, cropW, cropH,
2595 V4L2_PIX_2_HAL_PIXEL_FORMAT(pictureFormat),
2596 0);
13d8c7b4 2597
b5237e6b
SK
2598 csc_set_dst_format(m_exynosPictureCSC,
2599 m_orgPictureRect.w, m_orgPictureRect.h,
2600 0, 0, m_orgPictureRect.w, m_orgPictureRect.h,
2601 V4L2_PIX_2_HAL_PIXEL_FORMAT(V4L2_PIX_FMT_NV16),
2602 0);
2603 csc_set_src_buffer(m_exynosPictureCSC,
2604 (void **)&m_pictureBuf->fd.fd);
13d8c7b4 2605
b5237e6b
SK
2606 csc_set_dst_buffer(m_exynosPictureCSC,
2607 (void **)&m_resizeBuf.fd.fd);
2608 for (int i=0 ; i < 3 ; i++)
2609 ALOGV("DEBUG(%s): m_resizeBuf.virt.extP[%d]=%d m_resizeBuf.size.extS[%d]=%d",
13d8c7b4 2610 __FUNCTION__, i, m_resizeBuf.fd.extFd[i], i, m_resizeBuf.size.extS[i]);
13d8c7b4 2611
b5237e6b
SK
2612 if (csc_convert(m_exynosPictureCSC) != 0)
2613 ALOGE("ERR(%s): csc_convert() fail", __FUNCTION__);
13d8c7b4 2614
c15a6b00 2615
b5237e6b
SK
2616 }
2617 else {
2618 ALOGE("ERR(%s): m_exynosPictureCSC == NULL", __FUNCTION__);
2619 }
2620
2621 resizeBufInfo = m_resizeBuf;
c15a6b00 2622
b5237e6b 2623 m_getAlignedYUVSize(V4L2_PIX_FMT_NV16, m_orgPictureRect.w, m_orgPictureRect.h, &m_resizeBuf);
a8b0b07d 2624
b5237e6b
SK
2625 for (int i = 1; i < 3; i++) {
2626 if (m_resizeBuf.size.extS[i] != 0)
2627 m_resizeBuf.fd.extFd[i] = m_resizeBuf.fd.extFd[i-1] + m_resizeBuf.size.extS[i-1];
c15a6b00 2628
b5237e6b
SK
2629 ALOGV("(%s): m_resizeBuf.size.extS[%d] = %d", __FUNCTION__, i, m_resizeBuf.size.extS[i]);
2630 }
c15a6b00 2631
c15a6b00 2632
b5237e6b
SK
2633 ExynosRect jpegRect;
2634 bool found = false;
2635 jpegRect.w = m_orgPictureRect.w;
2636 jpegRect.h = m_orgPictureRect.h;
2637 jpegRect.colorFormat = V4L2_PIX_FMT_NV16;
a8b0b07d 2638
b5237e6b
SK
2639 jpegBuf.size.extS[0] = 5*1024*1024;
2640 jpegBuf.size.extS[1] = 0;
2641 jpegBuf.size.extS[2] = 0;
c15a6b00 2642
b5237e6b 2643 allocCameraMemory(currentNode->ionClient, &jpegBuf, 1);
13d8c7b4 2644
b5237e6b
SK
2645 ALOGV("DEBUG(%s): jpegBuf.size.s = %d , jpegBuf.virt.p = %x", __FUNCTION__,
2646 jpegBuf.size.s, (unsigned int)jpegBuf.virt.p);
13d8c7b4 2647
be494d19 2648 m_requestManager->NotifyStreamOutput(m_jpegEncodingFrameCnt, selfThread->m_index);
b5237e6b
SK
2649 if (yuv2Jpeg(&m_resizeBuf, &jpegBuf, &jpegRect) == false)
2650 ALOGE("ERR(%s):yuv2Jpeg() fail", __FUNCTION__);
2651 cam_int_qbuf(&(selfStreamParms->node), index);
2652 ALOGV("DEBUG(%s): stream(%d) type(%d) QBUF DONE ",__FUNCTION__,
2653 selfThread->m_index, selfStreamParms->streamType);
2654
2655 m_resizeBuf = resizeBufInfo;
2656
2657 for (int i = 0; i < selfStreamParms->numSvcBuffers ; i++) {
be494d19 2658 if (selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] == ON_HAL) {
b5237e6b
SK
2659 found = true;
2660 break;
2661 }
be494d19
SK
2662 selfStreamParms->svcBufIndex++;
2663 if (selfStreamParms->svcBufIndex >= selfStreamParms->numSvcBuffers)
2664 selfStreamParms->svcBufIndex = 0;
b5237e6b
SK
2665 }
2666 if (!found) {
2667 ALOGE("ERR(%s): NO free SVC buffer for JPEG", __FUNCTION__);
13d8c7b4
SK
2668 }
2669 else {
be494d19 2670 memcpy(selfStreamParms->svcBuffers[selfStreamParms->svcBufIndex].virt.extP[0], jpegBuf.virt.extP[0], 5*1024*1024);
b5237e6b
SK
2671
2672 res = selfStreamParms->streamOps->enqueue_buffer(selfStreamParms->streamOps,
be494d19 2673 m_requestManager->GetTimestamp(m_jpegEncodingFrameCnt), &(selfStreamParms->svcBufHandle[selfStreamParms->svcBufIndex]));
b5237e6b
SK
2674
2675 freeCameraMemory(&jpegBuf, 1);
2676 ALOGV("DEBUG(%s): stream(%d) enqueue_buffer index(%d) to svc done res(%d)",
be494d19 2677 __FUNCTION__, selfThread->m_index, selfStreamParms->svcBufIndex, res);
b5237e6b 2678 if (res == 0) {
be494d19
SK
2679 selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] = ON_SERVICE;
2680 selfStreamParms->numSvcBufsInHal--;
b5237e6b
SK
2681 }
2682 else {
be494d19 2683 selfStreamParms->svcBufStatus[selfStreamParms->svcBufIndex] = ON_HAL;
b5237e6b 2684 }
be494d19 2685
13d8c7b4 2686 }
c15a6b00 2687
b5237e6b 2688 }
c15a6b00 2689 }
b5237e6b
SK
2690 while (0);
2691
9dd63e1f
SK
2692 if (selfStreamParms->streamType==0 && m_recordOutput && m_recordingEnabled) {
2693 do {
be494d19
SK
2694 ALOGV("DEBUG(%s): record currentBuf#(%d)", __FUNCTION__ , selfRecordParms->numSvcBufsInHal);
2695 if (selfRecordParms->numSvcBufsInHal >= 1)
9dd63e1f
SK
2696 {
2697 ALOGV("DEBUG(%s): breaking", __FUNCTION__);
2698 break;
2699 }
2700 res = selfRecordParms->streamOps->dequeue_buffer(selfRecordParms->streamOps, &buf);
2701 if (res != NO_ERROR || buf == NULL) {
2702 ALOGV("DEBUG(%s): record stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res);
2703 break;
2704 }
be494d19 2705 selfRecordParms->numSvcBufsInHal ++;
9dd63e1f 2706 ALOGV("DEBUG(%s): record got buf(%x) numBufInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__, (uint32_t)(*buf),
be494d19 2707 selfRecordParms->numSvcBufsInHal, ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
9dd63e1f
SK
2708 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
2709
2710 bool found = false;
2711 int checkingIndex = 0;
2712 for (checkingIndex = 0; checkingIndex < selfRecordParms->numSvcBuffers ; checkingIndex++) {
2713 if (priv_handle->fd == selfRecordParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
2714 found = true;
2715 break;
2716 }
2717 }
2718 ALOGV("DEBUG(%s): recording dequeueed_buffer found index(%d)", __FUNCTION__, found);
2719 if (!found) break;
2720 index = checkingIndex;
2721 if (selfRecordParms->svcBufStatus[index] == ON_SERVICE) {
2722 selfRecordParms->svcBufStatus[index] = ON_HAL;
2723 }
2724 else {
2725 ALOGV("DEBUG(%s): record bufstatus abnormal [%d] status = %d", __FUNCTION__,
2726 index, selfRecordParms->svcBufStatus[index]);
2727 }
2728 } while (0);
2729 }
be494d19
SK
2730 if (selfStreamParms->streamType == 0) {
2731 while (selfStreamParms->numSvcBufsInHal < selfStreamParms->numOwnSvcBuffers) {
2732 res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
2733 if (res != NO_ERROR || buf == NULL) {
2734 ALOGV("DEBUG(%s): stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res);
9dd63e1f
SK
2735 break;
2736 }
be494d19
SK
2737 selfStreamParms->numSvcBufsInHal++;
2738 ALOGV("DEBUG(%s): stream(%d) got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
2739 selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
2740 ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2741 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
13d8c7b4 2742
be494d19
SK
2743 bool found = false;
2744 int checkingIndex = 0;
2745 for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
2746 if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
2747 found = true;
2748 break;
2749 }
9dd63e1f 2750 }
be494d19
SK
2751 ALOGV("DEBUG(%s): post_dequeue_buffer found(%d)", __FUNCTION__, found);
2752 if (!found) break;
2753 ALOGV("DEBUG(%s): preparing to qbuf [%d]", __FUNCTION__, checkingIndex);
2754 index = checkingIndex;
2755 if (index < selfStreamParms->numHwBuffers) {
2756 uint32_t plane_index = 0;
2757 ExynosBuffer* currentBuf = &(selfStreamParms->svcBuffers[index]);
2758 struct v4l2_buffer v4l2_buf;
2759 struct v4l2_plane planes[VIDEO_MAX_PLANES];
2760
2761 v4l2_buf.m.planes = planes;
2762 v4l2_buf.type = currentNode->type;
2763 v4l2_buf.memory = currentNode->memory;
2764 v4l2_buf.index = index;
2765 v4l2_buf.length = currentNode->planes;
2766
2767 v4l2_buf.m.planes[0].m.fd = priv_handle->fd;
2768 v4l2_buf.m.planes[2].m.fd = priv_handle->fd1;
2769 v4l2_buf.m.planes[1].m.fd = priv_handle->fd2;
2770 for (plane_index=0 ; plane_index < v4l2_buf.length ; plane_index++) {
2771 v4l2_buf.m.planes[plane_index].length = currentBuf->size.extS[plane_index];
2772 ALOGV("DEBUG(%s): plane(%d): fd(%d) length(%d)",
2773 __FUNCTION__, plane_index, v4l2_buf.m.planes[plane_index].m.fd,
2774 v4l2_buf.m.planes[plane_index].length);
2775 }
9dd63e1f
SK
2776 if (exynos_v4l2_qbuf(currentNode->fd, &v4l2_buf) < 0) {
2777 ALOGE("ERR(%s): stream id(%d) exynos_v4l2_qbuf() fail",
2778 __FUNCTION__, selfThread->m_index);
2779 return;
13d8c7b4 2780 }
9dd63e1f
SK
2781 selfStreamParms->svcBufStatus[index] = ON_DRIVER;
2782 ALOGV("DEBUG(%s): stream id(%d) type0 QBUF done index(%d)",
2783 __FUNCTION__, selfThread->m_index, index);
2784 }
be494d19
SK
2785 }
2786 }
2787 else if (selfStreamParms->streamType == 1) {
2788 while (selfStreamParms->numSvcBufsInHal < selfStreamParms->numOwnSvcBuffers) {
2789 res = selfStreamParms->streamOps->dequeue_buffer(selfStreamParms->streamOps, &buf);
2790 if (res != NO_ERROR || buf == NULL) {
2791 ALOGV("DEBUG(%s): stream(%d) dequeue_buffer fail res(%d)",__FUNCTION__ , selfThread->m_index, res);
2792 break;
2793 }
2794
2795 ALOGV("DEBUG(%s): stream(%d) got buf(%x) numInHal(%d) version(%d), numFds(%d), numInts(%d)", __FUNCTION__,
2796 selfThread->m_index, (uint32_t)(*buf), selfStreamParms->numSvcBufsInHal,
2797 ((native_handle_t*)(*buf))->version, ((native_handle_t*)(*buf))->numFds, ((native_handle_t*)(*buf))->numInts);
2798
2799 const private_handle_t *priv_handle = reinterpret_cast<const private_handle_t *>(*buf);
2800
2801 bool found = false;
2802 int checkingIndex = 0;
2803 for (checkingIndex = 0; checkingIndex < selfStreamParms->numSvcBuffers ; checkingIndex++) {
2804 if (priv_handle->fd == selfStreamParms->svcBuffers[checkingIndex].fd.extFd[0] ) {
2805 found = true;
2806 break;
2807 }
13d8c7b4 2808 }
be494d19
SK
2809 if (!found) break;
2810 selfStreamParms->svcBufStatus[checkingIndex] = ON_HAL;
2811 selfStreamParms->numSvcBufsInHal++;
13d8c7b4 2812 }
be494d19 2813
13d8c7b4
SK
2814 }
2815 ALOGV("DEBUG(%s): stream(%d) processing SIGNAL_STREAM_DATA_COMING DONE",
2816 __FUNCTION__,selfThread->m_index);
c15a6b00 2817 }
c15a6b00
JS
2818 return;
2819}
2820
13d8c7b4
SK
2821bool ExynosCameraHWInterface2::yuv2Jpeg(ExynosBuffer *yuvBuf,
2822 ExynosBuffer *jpegBuf,
2823 ExynosRect *rect)
2824{
2825 unsigned char *addr;
2826
2827 ExynosJpegEncoderForCamera jpegEnc;
2828 bool ret = false;
2829 int res = 0;
2830
2831 unsigned int *yuvSize = yuvBuf->size.extS;
2832
2833 if (jpegEnc.create()) {
9dd63e1f 2834 ALOGE("ERR(%s):jpegEnc.create() fail", __FUNCTION__);
13d8c7b4
SK
2835 goto jpeg_encode_done;
2836 }
2837
2838 if (jpegEnc.setQuality(100)) {
9dd63e1f 2839 ALOGE("ERR(%s):jpegEnc.setQuality() fail", __FUNCTION__);
13d8c7b4
SK
2840 goto jpeg_encode_done;
2841 }
2842
2843 if (jpegEnc.setSize(rect->w, rect->h)) {
9dd63e1f 2844 ALOGE("ERR(%s):jpegEnc.setSize() fail", __FUNCTION__);
13d8c7b4
SK
2845 goto jpeg_encode_done;
2846 }
2847 ALOGV("%s : width = %d , height = %d\n", __FUNCTION__, rect->w, rect->h);
2848
2849 if (jpegEnc.setColorFormat(rect->colorFormat)) {
9dd63e1f 2850 ALOGE("ERR(%s):jpegEnc.setColorFormat() fail", __FUNCTION__);
13d8c7b4
SK
2851 goto jpeg_encode_done;
2852 }
13d8c7b4
SK
2853
2854 if (jpegEnc.setJpegFormat(V4L2_PIX_FMT_JPEG_422)) {
9dd63e1f 2855 ALOGE("ERR(%s):jpegEnc.setJpegFormat() fail", __FUNCTION__);
13d8c7b4
SK
2856 goto jpeg_encode_done;
2857 }
2858#if 0
2859 if (m_curCameraInfo->thumbnailW != 0 && m_curCameraInfo->thumbnailH != 0) {
2860 int thumbW = 0, thumbH = 0;
2861 mExifInfo.enableThumb = true;
2862 if (rect->w < 320 || rect->h < 240) {
2863 thumbW = 160;
2864 thumbH = 120;
2865 } else {
2866 thumbW = m_curCameraInfo->thumbnailW;
2867 thumbH = m_curCameraInfo->thumbnailH;
2868 }
2869 if (jpegEnc.setThumbnailSize(thumbW, thumbH)) {
9dd63e1f 2870 LOGE("ERR(%s):jpegEnc.setThumbnailSize(%d, %d) fail", __FUNCTION__, thumbW, thumbH);
13d8c7b4
SK
2871 goto jpeg_encode_done;
2872 }
2873
2874 if (0 < m_jpegThumbnailQuality && m_jpegThumbnailQuality <= 100) {
2875 if (jpegEnc.setThumbnailQuality(m_jpegThumbnailQuality)) {
9dd63e1f 2876 LOGE("ERR(%s):jpegEnc.setThumbnailQuality(%d) fail", __FUNCTION__, m_jpegThumbnailQuality);
13d8c7b4
SK
2877 goto jpeg_encode_done;
2878 }
2879 }
2880
2881 m_setExifChangedAttribute(&mExifInfo, rect);
2882 } else
2883#endif
2884 {
2885 mExifInfo.enableThumb = false;
2886 }
9dd63e1f 2887 ALOGV("DEBUG(%s):calling jpegEnc.setInBuf() yuvSize(%d)", __FUNCTION__, *yuvSize);
13d8c7b4 2888 if (jpegEnc.setInBuf((int *)&(yuvBuf->fd.fd), (int *)yuvSize)) {
9dd63e1f 2889 ALOGE("ERR(%s):jpegEnc.setInBuf() fail", __FUNCTION__);
13d8c7b4
SK
2890 goto jpeg_encode_done;
2891 }
2892
2893 if (jpegEnc.setOutBuf(jpegBuf->fd.fd, jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2])) {
9dd63e1f 2894 ALOGE("ERR(%s):jpegEnc.setOutBuf() fail", __FUNCTION__);
13d8c7b4
SK
2895 goto jpeg_encode_done;
2896 }
13d8c7b4
SK
2897 memset(jpegBuf->virt.p,0,jpegBuf->size.extS[0] + jpegBuf->size.extS[1] + jpegBuf->size.extS[2]);
2898
2899 if (jpegEnc.updateConfig()) {
9dd63e1f 2900 ALOGE("ERR(%s):jpegEnc.updateConfig() fail", __FUNCTION__);
13d8c7b4
SK
2901 goto jpeg_encode_done;
2902 }
2903
2904 if (res = jpegEnc.encode((int *)&jpegBuf->size.s, NULL)) {
9dd63e1f 2905 ALOGE("ERR(%s):jpegEnc.encode() fail ret(%d)", __FUNCTION__, res);
13d8c7b4
SK
2906 goto jpeg_encode_done;
2907 }
2908
2909 ret = true;
2910
2911jpeg_encode_done:
2912
2913 if (jpegEnc.flagCreate() == true)
2914 jpegEnc.destroy();
2915
2916 return ret;
2917}
2918
2919
2920ExynosCameraHWInterface2::MainThread::~MainThread()
2921{
9dd63e1f 2922 ALOGD("(%s):", __FUNCTION__);
13d8c7b4
SK
2923}
2924
2925void ExynosCameraHWInterface2::MainThread::release()
2926{
9dd63e1f 2927 ALOGD("(%s):", __func__);
13d8c7b4 2928 SetSignal(SIGNAL_THREAD_RELEASE);
13d8c7b4
SK
2929}
2930
2931ExynosCameraHWInterface2::SensorThread::~SensorThread()
2932{
9dd63e1f 2933 ALOGD("(%s):", __FUNCTION__);
13d8c7b4
SK
2934}
2935
2936void ExynosCameraHWInterface2::SensorThread::release()
2937{
9dd63e1f 2938 ALOGD("(%s):", __func__);
13d8c7b4 2939 SetSignal(SIGNAL_THREAD_RELEASE);
13d8c7b4
SK
2940}
2941
2942ExynosCameraHWInterface2::IspThread::~IspThread()
2943{
9dd63e1f 2944 ALOGD("(%s):", __FUNCTION__);
13d8c7b4
SK
2945}
2946
2947void ExynosCameraHWInterface2::IspThread::release()
2948{
9dd63e1f 2949 ALOGD("(%s):", __func__);
13d8c7b4 2950 SetSignal(SIGNAL_THREAD_RELEASE);
13d8c7b4
SK
2951}
2952
2953ExynosCameraHWInterface2::StreamThread::~StreamThread()
2954{
9dd63e1f 2955 ALOGD("(%s):", __FUNCTION__);
13d8c7b4
SK
2956}
2957
2958void ExynosCameraHWInterface2::StreamThread::setParameter(stream_parameters_t * new_parameters)
2959{
2960 ALOGV("DEBUG(%s):", __FUNCTION__);
2961
2962 m_tempParameters = new_parameters;
2963
c15a6b00 2964 SetSignal(SIGNAL_STREAM_CHANGE_PARAMETER);
13d8c7b4
SK
2965
2966 // TODO : return synchronously (after setting parameters asynchronously)
9dd63e1f 2967 usleep(2000);
13d8c7b4
SK
2968}
2969
2970void ExynosCameraHWInterface2::StreamThread::applyChange()
2971{
2972 memcpy(&m_parameters, m_tempParameters, sizeof(stream_parameters_t));
2973
9dd63e1f 2974 ALOGV("DEBUG(%s): Applying Stream paremeters width(%d), height(%d)",
13d8c7b4 2975 __FUNCTION__, m_parameters.outputWidth, m_parameters.outputHeight);
c15a6b00
JS
2976}
2977
13d8c7b4 2978void ExynosCameraHWInterface2::StreamThread::release()
c15a6b00 2979{
9dd63e1f 2980 ALOGV("(%s):", __func__);
13d8c7b4 2981 SetSignal(SIGNAL_THREAD_RELEASE);
13d8c7b4
SK
2982}
2983
2984int ExynosCameraHWInterface2::StreamThread::findBufferIndex(void * bufAddr)
2985{
2986 int index;
2987 for (index = 0 ; index < m_parameters.numSvcBuffers ; index++) {
2988 if (m_parameters.svcBuffers[index].virt.extP[0] == bufAddr)
2989 return index;
2990 }
2991 return -1;
c15a6b00
JS
2992}
2993
9dd63e1f
SK
2994void ExynosCameraHWInterface2::StreamThread::setRecordingParameter(record_parameters_t * recordParm)
2995{
2996 memcpy(&m_recordParameters, recordParm, sizeof(record_parameters_t));
2997}
2998
c15a6b00
JS
2999int ExynosCameraHWInterface2::createIonClient(ion_client ionClient)
3000{
3001 if (ionClient == 0) {
3002 ionClient = ion_client_create();
3003 if (ionClient < 0) {
13d8c7b4 3004 ALOGE("[%s]src ion client create failed, value = %d\n", __FUNCTION__, ionClient);
c15a6b00
JS
3005 return 0;
3006 }
3007 }
3008
3009 return ionClient;
3010}
3011
3012int ExynosCameraHWInterface2::deleteIonClient(ion_client ionClient)
3013{
3014 if (ionClient != 0) {
3015 if (ionClient > 0) {
3016 ion_client_destroy(ionClient);
3017 }
3018 ionClient = 0;
3019 }
3020
3021 return ionClient;
3022}
3023
13d8c7b4 3024int ExynosCameraHWInterface2::allocCameraMemory(ion_client ionClient, ExynosBuffer *buf, int iMemoryNum)
c15a6b00
JS
3025{
3026 int ret = 0;
3027 int i = 0;
3028
3029 if (ionClient == 0) {
13d8c7b4 3030 ALOGE("[%s] ionClient is zero (%d)\n", __FUNCTION__, ionClient);
c15a6b00
JS
3031 return -1;
3032 }
3033
3034 for (i=0;i<iMemoryNum;i++) {
13d8c7b4 3035 if (buf->size.extS[i] == 0) {
c15a6b00
JS
3036 break;
3037 }
3038
13d8c7b4
SK
3039 buf->fd.extFd[i] = ion_alloc(ionClient, \
3040 buf->size.extS[i], 0, ION_HEAP_EXYNOS_MASK,0);
3041 if ((buf->fd.extFd[i] == -1) ||(buf->fd.extFd[i] == 0)) {
3042 ALOGE("[%s]ion_alloc(%d) failed\n", __FUNCTION__, buf->size.extS[i]);
3043 buf->fd.extFd[i] = -1;
c15a6b00
JS
3044 freeCameraMemory(buf, iMemoryNum);
3045 return -1;
3046 }
3047
13d8c7b4
SK
3048 buf->virt.extP[i] = (char *)ion_map(buf->fd.extFd[i], \
3049 buf->size.extS[i], 0);
3050 if ((buf->virt.extP[i] == (char *)MAP_FAILED) || (buf->virt.extP[i] == NULL)) {
3051 ALOGE("[%s]src ion map failed(%d)\n", __FUNCTION__, buf->size.extS[i]);
3052 buf->virt.extP[i] = (char *)MAP_FAILED;
c15a6b00
JS
3053 freeCameraMemory(buf, iMemoryNum);
3054 return -1;
3055 }
13d8c7b4 3056 ALOGV("allocCameraMem : [%d][0x%08x] size(%d)", i, (unsigned int)(buf->virt.extP[i]), buf->size.extS[i]);
c15a6b00
JS
3057 }
3058
3059 return ret;
3060}
3061
13d8c7b4 3062void ExynosCameraHWInterface2::freeCameraMemory(ExynosBuffer *buf, int iMemoryNum)
c15a6b00 3063{
13d8c7b4 3064
c15a6b00
JS
3065 int i =0 ;
3066
3067 for (i=0;i<iMemoryNum;i++) {
13d8c7b4
SK
3068 if (buf->fd.extFd[i] != -1) {
3069 if (buf->virt.extP[i] != (char *)MAP_FAILED) {
3070 ion_unmap(buf->virt.extP[i], buf->size.extS[i]);
c15a6b00 3071 }
13d8c7b4 3072 ion_free(buf->fd.extFd[i]);
c15a6b00 3073 }
13d8c7b4
SK
3074 buf->fd.extFd[i] = -1;
3075 buf->virt.extP[i] = (char *)MAP_FAILED;
3076 buf->size.extS[i] = 0;
c15a6b00
JS
3077 }
3078}
3079
13d8c7b4 3080void ExynosCameraHWInterface2::initCameraMemory(ExynosBuffer *buf, int iMemoryNum)
c15a6b00
JS
3081{
3082 int i =0 ;
3083 for (i=0;i<iMemoryNum;i++) {
13d8c7b4
SK
3084 buf->virt.extP[i] = (char *)MAP_FAILED;
3085 buf->fd.extFd[i] = -1;
3086 buf->size.extS[i] = 0;
c15a6b00
JS
3087 }
3088}
3089
3090
13d8c7b4
SK
3091
3092
9dd63e1f 3093static camera2_device_t *g_cam2_device = NULL;
b5237e6b 3094static bool g_camera_vaild = false;
daa1fcd6 3095ExynosCamera2 * g_camera2[2] = { NULL, NULL };
c15a6b00
JS
3096
3097static int HAL2_camera_device_close(struct hw_device_t* device)
3098{
9dd63e1f 3099 ALOGD("%s: ENTER", __FUNCTION__);
c15a6b00 3100 if (device) {
9dd63e1f 3101
c15a6b00 3102 camera2_device_t *cam_device = (camera2_device_t *)device;
9dd63e1f
SK
3103 ALOGD("cam_device(0x%08x):", (unsigned int)cam_device);
3104 ALOGD("g_cam2_device(0x%08x):", (unsigned int)g_cam2_device);
c15a6b00 3105 delete static_cast<ExynosCameraHWInterface2 *>(cam_device->priv);
9dd63e1f 3106 g_cam2_device = NULL;
c15a6b00 3107 free(cam_device);
b5237e6b 3108 g_camera_vaild = false;
c15a6b00 3109 }
9dd63e1f 3110 ALOGD("%s: EXIT", __FUNCTION__);
c15a6b00
JS
3111 return 0;
3112}
3113
3114static inline ExynosCameraHWInterface2 *obj(const struct camera2_device *dev)
3115{
3116 return reinterpret_cast<ExynosCameraHWInterface2 *>(dev->priv);
3117}
3118
3119static int HAL2_device_set_request_queue_src_ops(const struct camera2_device *dev,
3120 const camera2_request_queue_src_ops_t *request_src_ops)
3121{
13d8c7b4 3122 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3123 return obj(dev)->setRequestQueueSrcOps(request_src_ops);
3124}
3125
3126static int HAL2_device_notify_request_queue_not_empty(const struct camera2_device *dev)
3127{
13d8c7b4 3128 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3129 return obj(dev)->notifyRequestQueueNotEmpty();
3130}
3131
3132static int HAL2_device_set_frame_queue_dst_ops(const struct camera2_device *dev,
3133 const camera2_frame_queue_dst_ops_t *frame_dst_ops)
3134{
13d8c7b4 3135 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3136 return obj(dev)->setFrameQueueDstOps(frame_dst_ops);
3137}
3138
3139static int HAL2_device_get_in_progress_count(const struct camera2_device *dev)
3140{
13d8c7b4 3141 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3142 return obj(dev)->getInProgressCount();
3143}
3144
3145static int HAL2_device_flush_captures_in_progress(const struct camera2_device *dev)
3146{
13d8c7b4 3147 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3148 return obj(dev)->flushCapturesInProgress();
3149}
3150
3151static int HAL2_device_construct_default_request(const struct camera2_device *dev,
3152 int request_template, camera_metadata_t **request)
3153{
13d8c7b4 3154 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3155 return obj(dev)->constructDefaultRequest(request_template, request);
3156}
3157
3158static int HAL2_device_allocate_stream(
3159 const struct camera2_device *dev,
3160 // inputs
3161 uint32_t width,
3162 uint32_t height,
3163 int format,
3164 const camera2_stream_ops_t *stream_ops,
3165 // outputs
3166 uint32_t *stream_id,
3167 uint32_t *format_actual,
3168 uint32_t *usage,
3169 uint32_t *max_buffers)
3170{
9dd63e1f 3171 ALOGV("(%s): ", __FUNCTION__);
c15a6b00
JS
3172 return obj(dev)->allocateStream(width, height, format, stream_ops,
3173 stream_id, format_actual, usage, max_buffers);
3174}
3175
3176
3177static int HAL2_device_register_stream_buffers(const struct camera2_device *dev,
3178 uint32_t stream_id,
3179 int num_buffers,
3180 buffer_handle_t *buffers)
3181{
13d8c7b4 3182 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3183 return obj(dev)->registerStreamBuffers(stream_id, num_buffers, buffers);
3184}
3185
3186static int HAL2_device_release_stream(
3187 const struct camera2_device *dev,
3188 uint32_t stream_id)
3189{
9dd63e1f 3190 ALOGD("DEBUG(%s)(id: %d):", __FUNCTION__, stream_id);
b5237e6b
SK
3191 if (!g_camera_vaild)
3192 return 0;
c15a6b00
JS
3193 return obj(dev)->releaseStream(stream_id);
3194}
3195
3196static int HAL2_device_allocate_reprocess_stream(
3197 const struct camera2_device *dev,
3198 uint32_t width,
3199 uint32_t height,
3200 uint32_t format,
3201 const camera2_stream_in_ops_t *reprocess_stream_ops,
3202 // outputs
3203 uint32_t *stream_id,
3204 uint32_t *consumer_usage,
3205 uint32_t *max_buffers)
3206{
13d8c7b4 3207 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3208 return obj(dev)->allocateReprocessStream(width, height, format, reprocess_stream_ops,
3209 stream_id, consumer_usage, max_buffers);
3210}
3211
3212static int HAL2_device_release_reprocess_stream(
3213 const struct camera2_device *dev,
3214 uint32_t stream_id)
3215{
13d8c7b4 3216 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3217 return obj(dev)->releaseReprocessStream(stream_id);
3218}
3219
3220static int HAL2_device_trigger_action(const struct camera2_device *dev,
3221 uint32_t trigger_id,
3222 int ext1,
3223 int ext2)
3224{
13d8c7b4 3225 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3226 return obj(dev)->triggerAction(trigger_id, ext1, ext2);
3227}
3228
3229static int HAL2_device_set_notify_callback(const struct camera2_device *dev,
3230 camera2_notify_callback notify_cb,
3231 void *user)
3232{
13d8c7b4 3233 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3234 return obj(dev)->setNotifyCallback(notify_cb, user);
3235}
3236
3237static int HAL2_device_get_metadata_vendor_tag_ops(const struct camera2_device*dev,
3238 vendor_tag_query_ops_t **ops)
3239{
13d8c7b4 3240 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3241 return obj(dev)->getMetadataVendorTagOps(ops);
3242}
3243
3244static int HAL2_device_dump(const struct camera2_device *dev, int fd)
3245{
13d8c7b4 3246 ALOGV("DEBUG(%s):", __FUNCTION__);
c15a6b00
JS
3247 return obj(dev)->dump(fd);
3248}
3249
3250
3251
3252
3253
3254static int HAL2_getNumberOfCameras()
3255{
9dd63e1f
SK
3256 ALOGV("(%s): returning 2", __FUNCTION__);
3257 return 2;
c15a6b00
JS
3258}
3259
3260
c15a6b00
JS
3261static int HAL2_getCameraInfo(int cameraId, struct camera_info *info)
3262{
9dd63e1f
SK
3263 ALOGD("DEBUG(%s): cameraID: %d", __FUNCTION__, cameraId);
3264 static camera_metadata_t * mCameraInfo[2] = {NULL, NULL};
3265
c15a6b00 3266 status_t res;
13d8c7b4 3267
daa1fcd6 3268 if (cameraId == 0) {
9dd63e1f 3269 info->facing = CAMERA_FACING_BACK;
daa1fcd6
SK
3270 if (!g_camera2[0])
3271 g_camera2[0] = new ExynosCamera2(0);
3272 }
3273 else if (cameraId == 1) {
9dd63e1f 3274 info->facing = CAMERA_FACING_FRONT;
daa1fcd6
SK
3275 if (!g_camera2[1])
3276 g_camera2[1] = new ExynosCamera2(1);
3277 }
3278 else
3279 return BAD_VALUE;
3280
c15a6b00
JS
3281 info->orientation = 0;
3282 info->device_version = HARDWARE_DEVICE_API_VERSION(2, 0);
9dd63e1f 3283 if (mCameraInfo[cameraId] == NULL) {
daa1fcd6 3284 res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, true);
c15a6b00
JS
3285 if (res != OK) {
3286 ALOGE("%s: Unable to allocate static info: %s (%d)",
13d8c7b4 3287 __FUNCTION__, strerror(-res), res);
c15a6b00
JS
3288 return res;
3289 }
daa1fcd6 3290 res = g_camera2[cameraId]->constructStaticInfo(&(mCameraInfo[cameraId]), cameraId, false);
c15a6b00
JS
3291 if (res != OK) {
3292 ALOGE("%s: Unable to fill in static info: %s (%d)",
13d8c7b4 3293 __FUNCTION__, strerror(-res), res);
c15a6b00
JS
3294 return res;
3295 }
3296 }
9dd63e1f 3297 info->static_camera_characteristics = mCameraInfo[cameraId];
13d8c7b4 3298 return NO_ERROR;
c15a6b00
JS
3299}
3300
3301#define SET_METHOD(m) m : HAL2_device_##m
3302
3303static camera2_device_ops_t camera2_device_ops = {
3304 SET_METHOD(set_request_queue_src_ops),
3305 SET_METHOD(notify_request_queue_not_empty),
3306 SET_METHOD(set_frame_queue_dst_ops),
3307 SET_METHOD(get_in_progress_count),
3308 SET_METHOD(flush_captures_in_progress),
3309 SET_METHOD(construct_default_request),
3310 SET_METHOD(allocate_stream),
3311 SET_METHOD(register_stream_buffers),
3312 SET_METHOD(release_stream),
3313 SET_METHOD(allocate_reprocess_stream),
3314 SET_METHOD(release_reprocess_stream),
3315 SET_METHOD(trigger_action),
3316 SET_METHOD(set_notify_callback),
3317 SET_METHOD(get_metadata_vendor_tag_ops),
3318 SET_METHOD(dump),
3319};
3320
3321#undef SET_METHOD
3322
3323
3324static int HAL2_camera_device_open(const struct hw_module_t* module,
3325 const char *id,
3326 struct hw_device_t** device)
3327{
9dd63e1f 3328
c15a6b00
JS
3329
3330 int cameraId = atoi(id);
9dd63e1f 3331
b5237e6b 3332 g_camera_vaild = false;
9dd63e1f 3333 ALOGD("\n\n>>> I'm Samsung's CameraHAL_2(ID:%d) <<<\n\n", cameraId);
c15a6b00 3334 if (cameraId < 0 || cameraId >= HAL2_getNumberOfCameras()) {
13d8c7b4 3335 ALOGE("ERR(%s):Invalid camera ID %s", __FUNCTION__, id);
c15a6b00
JS
3336 return -EINVAL;
3337 }
3338
9dd63e1f 3339 ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
c15a6b00
JS
3340 if (g_cam2_device) {
3341 if (obj(g_cam2_device)->getCameraId() == cameraId) {
13d8c7b4 3342 ALOGV("DEBUG(%s):returning existing camera ID %s", __FUNCTION__, id);
c15a6b00
JS
3343 goto done;
3344 } else {
9dd63e1f
SK
3345
3346 while (g_cam2_device)
3347 usleep(10000);
3348 /*ALOGE("ERR(%s):Cannot open camera %d. camera %d is already running!",
13d8c7b4 3349 __FUNCTION__, cameraId, obj(g_cam2_device)->getCameraId());
9dd63e1f 3350 return -ENOSYS;*/
c15a6b00
JS
3351 }
3352 }
3353
3354 g_cam2_device = (camera2_device_t *)malloc(sizeof(camera2_device_t));
9dd63e1f
SK
3355 ALOGD("g_cam2_device : 0x%08x", (unsigned int)g_cam2_device);
3356
c15a6b00
JS
3357 if (!g_cam2_device)
3358 return -ENOMEM;
3359
3360 g_cam2_device->common.tag = HARDWARE_DEVICE_TAG;
3361 g_cam2_device->common.version = CAMERA_DEVICE_API_VERSION_2_0;
3362 g_cam2_device->common.module = const_cast<hw_module_t *>(module);
3363 g_cam2_device->common.close = HAL2_camera_device_close;
3364
3365 g_cam2_device->ops = &camera2_device_ops;
3366
13d8c7b4 3367 ALOGV("DEBUG(%s):open camera2 %s", __FUNCTION__, id);
c15a6b00 3368
daa1fcd6 3369 g_cam2_device->priv = new ExynosCameraHWInterface2(cameraId, g_cam2_device, g_camera2[cameraId]);
c15a6b00
JS
3370
3371done:
3372 *device = (hw_device_t *)g_cam2_device;
13d8c7b4 3373 ALOGV("DEBUG(%s):opened camera2 %s (%p)", __FUNCTION__, id, *device);
b5237e6b 3374 g_camera_vaild = true;
c15a6b00
JS
3375
3376 return 0;
3377}
3378
3379
3380static hw_module_methods_t camera_module_methods = {
3381 open : HAL2_camera_device_open
3382};
3383
3384extern "C" {
3385 struct camera_module HAL_MODULE_INFO_SYM = {
3386 common : {
3387 tag : HARDWARE_MODULE_TAG,
3388 module_api_version : CAMERA_MODULE_API_VERSION_2_0,
3389 hal_api_version : HARDWARE_HAL_API_VERSION,
3390 id : CAMERA_HARDWARE_MODULE_ID,
3391 name : "Exynos Camera HAL2",
3392 author : "Samsung Corporation",
3393 methods : &camera_module_methods,
3394 dso: NULL,
3395 reserved: {0},
3396 },
3397 get_number_of_cameras : HAL2_getNumberOfCameras,
3398 get_camera_info : HAL2_getCameraInfo
3399 };
3400}
3401
3402}; // namespace android